Merge branch 'master' of https://github.com/MongoEngine/mongoengine
This commit is contained in:
commit
10e0b1daec
@ -11,7 +11,6 @@ env:
|
||||
- PYMONGO=dev DJANGO=1.4.2
|
||||
- PYMONGO=2.5 DJANGO=1.5.1
|
||||
- PYMONGO=2.5 DJANGO=1.4.2
|
||||
- PYMONGO=2.4.2 DJANGO=1.4.2
|
||||
install:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi
|
||||
|
3
AUTHORS
3
AUTHORS
@ -157,3 +157,6 @@ that much better:
|
||||
* Kenneth Falck
|
||||
* Lukasz Balcerzak
|
||||
* Nicolas Cortot
|
||||
* Alex (https://github.com/kelsta)
|
||||
* Jin Zhang
|
||||
|
||||
|
@ -20,7 +20,7 @@ post to the `user group <http://groups.google.com/group/mongoengine-users>`
|
||||
Supported Interpreters
|
||||
----------------------
|
||||
|
||||
PyMongo supports CPython 2.5 and newer. Language
|
||||
MongoEngine supports CPython 2.6 and newer. Language
|
||||
features not supported by all interpreters can not be used.
|
||||
Please also ensure that your code is properly converted by
|
||||
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
|
||||
@ -46,7 +46,7 @@ General Guidelines
|
||||
- Write tests and make sure they pass (make sure you have a mongod
|
||||
running on the default port, then execute ``python setup.py test``
|
||||
from the cmd line to run the test suite).
|
||||
- Add yourself to AUTHORS.rst :)
|
||||
- Add yourself to AUTHORS :)
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
@ -26,7 +26,7 @@ setup.py install``.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
- pymongo 2.1.1+
|
||||
- pymongo 2.5+
|
||||
- sphinx (optional - for documentation generation)
|
||||
|
||||
Examples
|
||||
|
141
benchmark.py
141
benchmark.py
@ -86,17 +86,43 @@ def main():
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force=True
|
||||
8.36906409264
|
||||
0.8.X
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
3.69964408875
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
|
||||
3.5526599884
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
7.00959801674
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries without continual assign - MongoEngine
|
||||
5.60943293571
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True
|
||||
6.715102911
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
|
||||
5.50644683838
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
|
||||
4.69851183891
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
|
||||
4.68946313858
|
||||
----------------------------------------------------------------------------------------------------
|
||||
"""
|
||||
|
||||
setup = """
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('timeit_test')
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.timeit_test
|
||||
noddy = db.noddy
|
||||
@ -106,7 +132,7 @@ for i in xrange(10000):
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.insert(example)
|
||||
noddy.save(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
@ -117,9 +143,32 @@ myNoddys = noddy.find()
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.timeit_test
|
||||
noddy = db.noddy
|
||||
|
||||
for i in xrange(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.save(example, write_concern={"w": 0})
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - Pymongo write_concern={"w": 0}"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
setup = """
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('timeit_test')
|
||||
connection.disconnect()
|
||||
|
||||
@ -149,33 +198,18 @@ myNoddys = Noddy.objects()
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
fields = {}
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(safe=False, validate=False)
|
||||
fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.fields = fields
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(safe=False, validate=False, cascade=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False"""
|
||||
print """Creating 10000 dictionaries without continual assign - MongoEngine"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
@ -184,16 +218,65 @@ for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(force_insert=True, safe=False, validate=False, cascade=False)
|
||||
noddy.save(write_concern={"w": 0}, cascade=True)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, force=True"""
|
||||
print """Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0}, validate=False, cascade=True)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(validate=False, write_concern={"w": 0})
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -76,10 +76,13 @@ Fields
|
||||
.. autoclass:: mongoengine.fields.BinaryField
|
||||
.. autoclass:: mongoengine.fields.FileField
|
||||
.. autoclass:: mongoengine.fields.ImageField
|
||||
.. autoclass:: mongoengine.fields.GeoPointField
|
||||
.. autoclass:: mongoengine.fields.SequenceField
|
||||
.. autoclass:: mongoengine.fields.ObjectIdField
|
||||
.. autoclass:: mongoengine.fields.UUIDField
|
||||
.. autoclass:: mongoengine.fields.GeoPointField
|
||||
.. autoclass:: mongoengine.fields.PointField
|
||||
.. autoclass:: mongoengine.fields.LineStringField
|
||||
.. autoclass:: mongoengine.fields.PolygonField
|
||||
.. autoclass:: mongoengine.fields.GridFSError
|
||||
.. autoclass:: mongoengine.fields.GridFSProxy
|
||||
.. autoclass:: mongoengine.fields.ImageGridFsProxy
|
||||
|
@ -2,8 +2,23 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
Changes in 0.8.X
|
||||
Changes in 0.8.0
|
||||
================
|
||||
- Added no_sub_classes context manager and queryset helper (#312)
|
||||
- Querysets now utilises a local cache
|
||||
- Changed __len__ behavour in the queryset (#247, #311)
|
||||
- Fixed querying string versions of ObjectIds issue with ReferenceField (#307)
|
||||
- Added $setOnInsert support for upserts (#308)
|
||||
- Upserts now possible with just query parameters (#309)
|
||||
- Upserting is the only way to ensure docs are saved correctly (#306)
|
||||
- Fixed register_delete_rule inheritance issue
|
||||
- Fix cloning of sliced querysets (#303)
|
||||
- Fixed update_one write concern (#302)
|
||||
- Updated minimum requirement for pymongo to 2.5
|
||||
- Add support for new geojson fields, indexes and queries (#299)
|
||||
- If values cant be compared mark as changed (#287)
|
||||
- Ensure as_pymongo() and to_json honour only() and exclude() (#293)
|
||||
- Document serialization uses field order to ensure a strict order is set (#296)
|
||||
- DecimalField now stores as float not string (#289)
|
||||
- UUIDField now stores as a binary by default (#292)
|
||||
- Added Custom User Model for Django 1.5 (#285)
|
||||
@ -13,7 +28,6 @@ Changes in 0.8.X
|
||||
- Added SequenceField.set_next_value(value) helper (#159)
|
||||
- Updated .only() behaviour - now like exclude it is chainable (#202)
|
||||
- Added with_limit_and_skip support to count() (#235)
|
||||
- Removed __len__ from queryset (#247)
|
||||
- Objects queryset manager now inherited (#256)
|
||||
- Updated connection to use MongoClient (#262, #274)
|
||||
- Fixed db_alias and inherited Documents (#143)
|
||||
|
@ -132,7 +132,11 @@ html_theme_path = ['_themes']
|
||||
html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
html_sidebars = {
|
||||
'index': ['globaltoc.html', 'searchbox.html'],
|
||||
'**': ['localtoc.html', 'relations.html', 'searchbox.html']
|
||||
}
|
||||
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
|
@ -1,8 +1,8 @@
|
||||
=============================
|
||||
Using MongoEngine with Django
|
||||
=============================
|
||||
==============
|
||||
Django Support
|
||||
==============
|
||||
|
||||
.. note:: Updated to support Django 1.4
|
||||
.. note:: Updated to support Django 1.5
|
||||
|
||||
Connecting
|
||||
==========
|
||||
@ -98,7 +98,7 @@ Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` sec
|
||||
|
||||
Storage
|
||||
=======
|
||||
With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`,
|
||||
With MongoEngine's support for GridFS via the :class:`~mongoengine.fields.FileField`,
|
||||
it is useful to have a Django file storage backend that wraps this. The new
|
||||
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
||||
Using it is very similar to using the default FileSystemStorage.::
|
||||
|
@ -24,6 +24,9 @@ objects** as class attributes to the document class::
|
||||
title = StringField(max_length=200, required=True)
|
||||
date_modified = DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
As BSON (the binary format for storing data in mongodb) is order dependent,
|
||||
documents are serialized based on their field order.
|
||||
|
||||
Dynamic document schemas
|
||||
========================
|
||||
One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
|
||||
@ -51,6 +54,7 @@ be saved ::
|
||||
|
||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||
|
||||
Dynamic fields are stored in alphabetical order *after* any declared fields.
|
||||
|
||||
Fields
|
||||
======
|
||||
@ -62,31 +66,31 @@ not provided. Default values may optionally be a callable, which will be called
|
||||
to retrieve the value (such as in the above example). The field types available
|
||||
are as follows:
|
||||
|
||||
* :class:`~mongoengine.BinaryField`
|
||||
* :class:`~mongoengine.BooleanField`
|
||||
* :class:`~mongoengine.ComplexDateTimeField`
|
||||
* :class:`~mongoengine.DateTimeField`
|
||||
* :class:`~mongoengine.DecimalField`
|
||||
* :class:`~mongoengine.DictField`
|
||||
* :class:`~mongoengine.DynamicField`
|
||||
* :class:`~mongoengine.EmailField`
|
||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.FileField`
|
||||
* :class:`~mongoengine.FloatField`
|
||||
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
||||
* :class:`~mongoengine.GenericReferenceField`
|
||||
* :class:`~mongoengine.GeoPointField`
|
||||
* :class:`~mongoengine.ImageField`
|
||||
* :class:`~mongoengine.IntField`
|
||||
* :class:`~mongoengine.ListField`
|
||||
* :class:`~mongoengine.MapField`
|
||||
* :class:`~mongoengine.ObjectIdField`
|
||||
* :class:`~mongoengine.ReferenceField`
|
||||
* :class:`~mongoengine.SequenceField`
|
||||
* :class:`~mongoengine.SortedListField`
|
||||
* :class:`~mongoengine.StringField`
|
||||
* :class:`~mongoengine.URLField`
|
||||
* :class:`~mongoengine.UUIDField`
|
||||
* :class:`~mongoengine.fields.BinaryField`
|
||||
* :class:`~mongoengine.fields.BooleanField`
|
||||
* :class:`~mongoengine.fields.ComplexDateTimeField`
|
||||
* :class:`~mongoengine.fields.DateTimeField`
|
||||
* :class:`~mongoengine.fields.DecimalField`
|
||||
* :class:`~mongoengine.fields.DictField`
|
||||
* :class:`~mongoengine.fields.DynamicField`
|
||||
* :class:`~mongoengine.fields.EmailField`
|
||||
* :class:`~mongoengine.fields.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.fields.FileField`
|
||||
* :class:`~mongoengine.fields.FloatField`
|
||||
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
||||
* :class:`~mongoengine.fields.GenericReferenceField`
|
||||
* :class:`~mongoengine.fields.GeoPointField`
|
||||
* :class:`~mongoengine.fields.ImageField`
|
||||
* :class:`~mongoengine.fields.IntField`
|
||||
* :class:`~mongoengine.fields.ListField`
|
||||
* :class:`~mongoengine.fields.MapField`
|
||||
* :class:`~mongoengine.fields.ObjectIdField`
|
||||
* :class:`~mongoengine.fields.ReferenceField`
|
||||
* :class:`~mongoengine.fields.SequenceField`
|
||||
* :class:`~mongoengine.fields.SortedListField`
|
||||
* :class:`~mongoengine.fields.StringField`
|
||||
* :class:`~mongoengine.fields.URLField`
|
||||
* :class:`~mongoengine.fields.UUIDField`
|
||||
|
||||
Field arguments
|
||||
---------------
|
||||
@ -110,7 +114,7 @@ arguments can be set on all fields:
|
||||
The definion of default parameters follow `the general rules on Python
|
||||
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
|
||||
which means that some care should be taken when dealing with default mutable objects
|
||||
(like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`)::
|
||||
(like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`)::
|
||||
|
||||
class ExampleFirst(Document):
|
||||
# Default an empty list
|
||||
@ -172,8 +176,8 @@ arguments can be set on all fields:
|
||||
List fields
|
||||
-----------
|
||||
MongoDB allows the storage of lists of items. To add a list of items to a
|
||||
:class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field
|
||||
type. :class:`~mongoengine.ListField` takes another field object as its first
|
||||
:class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field
|
||||
type. :class:`~mongoengine.fields.ListField` takes another field object as its first
|
||||
argument, which specifies which type elements may be stored within the list::
|
||||
|
||||
class Page(Document):
|
||||
@ -191,7 +195,7 @@ inherit from :class:`~mongoengine.EmbeddedDocument` rather than
|
||||
content = StringField()
|
||||
|
||||
To embed the document within another document, use the
|
||||
:class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded
|
||||
:class:`~mongoengine.fields.EmbeddedDocumentField` field type, providing the embedded
|
||||
document class as the first argument::
|
||||
|
||||
class Page(Document):
|
||||
@ -206,7 +210,7 @@ Dictionary Fields
|
||||
Often, an embedded document may be used instead of a dictionary -- generally
|
||||
this is recommended as dictionaries don't support validation or custom field
|
||||
types. However, sometimes you will not know the structure of what you want to
|
||||
store; in this situation a :class:`~mongoengine.DictField` is appropriate::
|
||||
store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
|
||||
|
||||
class SurveyResponse(Document):
|
||||
date = DateTimeField()
|
||||
@ -224,7 +228,7 @@ other objects, so are the most flexible field type available.
|
||||
Reference fields
|
||||
----------------
|
||||
References may be stored to other documents in the database using the
|
||||
:class:`~mongoengine.ReferenceField`. Pass in another document class as the
|
||||
:class:`~mongoengine.fields.ReferenceField`. Pass in another document class as the
|
||||
first argument to the constructor, then simply assign document objects to the
|
||||
field::
|
||||
|
||||
@ -245,9 +249,9 @@ field::
|
||||
The :class:`User` object is automatically turned into a reference behind the
|
||||
scenes, and dereferenced when the :class:`Page` object is retrieved.
|
||||
|
||||
To add a :class:`~mongoengine.ReferenceField` that references the document
|
||||
To add a :class:`~mongoengine.fields.ReferenceField` that references the document
|
||||
being defined, use the string ``'self'`` in place of the document class as the
|
||||
argument to :class:`~mongoengine.ReferenceField`'s constructor. To reference a
|
||||
argument to :class:`~mongoengine.fields.ReferenceField`'s constructor. To reference a
|
||||
document that has not yet been defined, use the name of the undefined document
|
||||
as the constructor's argument::
|
||||
|
||||
@ -325,7 +329,7 @@ Its value can take any of the following constants:
|
||||
:const:`mongoengine.PULL`
|
||||
Removes the reference to the object (using MongoDB's "pull" operation)
|
||||
from any object's fields of
|
||||
:class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`).
|
||||
:class:`~mongoengine.fields.ListField` (:class:`~mongoengine.fields.ReferenceField`).
|
||||
|
||||
|
||||
.. warning::
|
||||
@ -352,7 +356,7 @@ Its value can take any of the following constants:
|
||||
Generic reference fields
|
||||
''''''''''''''''''''''''
|
||||
A second kind of reference field also exists,
|
||||
:class:`~mongoengine.GenericReferenceField`. This allows you to reference any
|
||||
:class:`~mongoengine.fields.GenericReferenceField`. This allows you to reference any
|
||||
kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
||||
:class:`~mongoengine.Document` subclass as a constructor argument::
|
||||
|
||||
@ -376,15 +380,15 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
||||
|
||||
.. note::
|
||||
|
||||
Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less
|
||||
efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if
|
||||
Using :class:`~mongoengine.fields.GenericReferenceField`\ s is slightly less
|
||||
efficient than the standard :class:`~mongoengine.fields.ReferenceField`\ s, so if
|
||||
you will only be referencing one document type, prefer the standard
|
||||
:class:`~mongoengine.ReferenceField`.
|
||||
:class:`~mongoengine.fields.ReferenceField`.
|
||||
|
||||
Uniqueness constraints
|
||||
----------------------
|
||||
MongoEngine allows you to specify that a field should be unique across a
|
||||
collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's
|
||||
collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's
|
||||
constructor. If you try to save a document that has the same value for a unique
|
||||
field as a document that is already in the database, a
|
||||
:class:`~mongoengine.OperationError` will be raised. You may also specify
|
||||
@ -475,6 +479,10 @@ If a dictionary is passed then the following options are available:
|
||||
:attr:`unique` (Default: False)
|
||||
Whether the index should be unique.
|
||||
|
||||
:attr:`expireAfterSeconds` (Optional)
|
||||
Allows you to automatically expire data from a collection by setting the
|
||||
time in seconds to expire the a field.
|
||||
|
||||
.. note::
|
||||
|
||||
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
|
||||
@ -485,18 +493,47 @@ Compound Indexes and Indexing sub documents
|
||||
Compound indexes can be created by adding the Embedded field or dictionary
|
||||
field name to the index definition.
|
||||
|
||||
Sometimes its more efficient to index parts of Embeedded / dictionary fields,
|
||||
Sometimes its more efficient to index parts of Embedded / dictionary fields,
|
||||
in this case use 'dot' notation to identify the value to index eg: `rank.title`
|
||||
|
||||
Geospatial indexes
|
||||
------------------
|
||||
|
||||
|
||||
The best geo index for mongodb is the new "2dsphere", which has an improved
|
||||
spherical model and provides better performance and more options when querying.
|
||||
The following fields will explicitly add a "2dsphere" index:
|
||||
|
||||
- :class:`~mongoengine.fields.PointField`
|
||||
- :class:`~mongoengine.fields.LineStringField`
|
||||
- :class:`~mongoengine.fields.PolygonField`
|
||||
|
||||
As "2dsphere" indexes can be part of a compound index, you may not want the
|
||||
automatic index but would prefer a compound index. In this example we turn off
|
||||
auto indexing and explicitly declare a compound index on ``location`` and ``datetime``::
|
||||
|
||||
class Log(Document):
|
||||
location = PointField(auto_index=False)
|
||||
datetime = DateTimeField()
|
||||
|
||||
meta = {
|
||||
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
|
||||
}
|
||||
|
||||
|
||||
Pre MongoDB 2.4 Geo
|
||||
'''''''''''''''''''
|
||||
|
||||
.. note:: For MongoDB < 2.4 this is still current, however the new 2dsphere
|
||||
index is a big improvement over the previous 2D model - so upgrading is
|
||||
advised.
|
||||
|
||||
Geospatial indexes will be automatically created for all
|
||||
:class:`~mongoengine.GeoPointField`\ s
|
||||
:class:`~mongoengine.fields.GeoPointField`\ s
|
||||
|
||||
It is also possible to explicitly define geospatial indexes. This is
|
||||
useful if you need to define a geospatial index on a subfield of a
|
||||
:class:`~mongoengine.DictField` or a custom field that contains a
|
||||
:class:`~mongoengine.fields.DictField` or a custom field that contains a
|
||||
point. To create a geospatial index you must prefix the field with the
|
||||
***** sign. ::
|
||||
|
||||
@ -508,6 +545,22 @@ point. To create a geospatial index you must prefix the field with the
|
||||
],
|
||||
}
|
||||
|
||||
Time To Live indexes
|
||||
--------------------
|
||||
|
||||
A special index type that allows you to automatically expire data from a
|
||||
collection after a given period. See the official
|
||||
`ttl <http://docs.mongodb.org/manual/tutorial/expire-data/#expire-data-from-collections-by-setting-ttl>`_
|
||||
documentation for more information. A common usecase might be session data::
|
||||
|
||||
class Session(Document):
|
||||
created = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'indexes': [
|
||||
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
||||
]
|
||||
}
|
||||
|
||||
Ordering
|
||||
========
|
||||
A default ordering can be specified for your
|
||||
|
@ -30,11 +30,14 @@ already exist, then any changes will be updated atomically. For example::
|
||||
|
||||
.. note::
|
||||
|
||||
Changes to documents are tracked and on the whole perform `set` operations.
|
||||
Changes to documents are tracked and on the whole perform ``set`` operations.
|
||||
|
||||
* ``list_field.pop(0)`` - *sets* the resulting list
|
||||
* ``list_field.push(0)`` - *sets* the resulting list
|
||||
* ``del(list_field)`` - *unsets* whole list
|
||||
|
||||
With lists its preferable to use ``Doc.update(push__list_field=0)`` as
|
||||
this stops the whole list being updated - stopping any race conditions.
|
||||
|
||||
.. seealso::
|
||||
:ref:`guide-atomic-updates`
|
||||
|
||||
@ -68,11 +71,12 @@ document values for example::
|
||||
|
||||
Cascading Saves
|
||||
---------------
|
||||
If your document contains :class:`~mongoengine.ReferenceField` or
|
||||
:class:`~mongoengine.GenericReferenceField` objects, then by default the
|
||||
:meth:`~mongoengine.Document.save` method will automatically save any changes to
|
||||
those objects as well. If this is not desired passing :attr:`cascade` as False
|
||||
to the save method turns this feature off.
|
||||
If your document contains :class:`~mongoengine.fields.ReferenceField` or
|
||||
:class:`~mongoengine.fields.GenericReferenceField` objects, then by default the
|
||||
:meth:`~mongoengine.Document.save` method will not save any changes to
|
||||
those objects. If you want all references to also be saved also, noting each
|
||||
save is a separate query, then passing :attr:`cascade` as True
|
||||
to the save method will cascade any saves.
|
||||
|
||||
Deleting documents
|
||||
------------------
|
||||
|
@ -7,7 +7,7 @@ GridFS
|
||||
Writing
|
||||
-------
|
||||
|
||||
GridFS support comes in the form of the :class:`~mongoengine.FileField` field
|
||||
GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field
|
||||
object. This field acts as a file-like object and provides a couple of
|
||||
different ways of inserting and retrieving data. Arbitrary metadata such as
|
||||
content type can also be stored alongside the files. In the following example,
|
||||
@ -27,7 +27,7 @@ a document is created to store details about animals, including a photo::
|
||||
Retrieval
|
||||
---------
|
||||
|
||||
So using the :class:`~mongoengine.FileField` is just like using any other
|
||||
So using the :class:`~mongoengine.fields.FileField` is just like using any other
|
||||
field. The file can also be retrieved just as easily::
|
||||
|
||||
marmot = Animal.objects(genus='Marmota').first()
|
||||
@ -37,7 +37,7 @@ field. The file can also be retrieved just as easily::
|
||||
Streaming
|
||||
---------
|
||||
|
||||
Streaming data into a :class:`~mongoengine.FileField` is achieved in a
|
||||
Streaming data into a :class:`~mongoengine.fields.FileField` is achieved in a
|
||||
slightly different manner. First, a new file must be created by calling the
|
||||
:func:`new_file` method. Data can then be written using :func:`write`::
|
||||
|
||||
|
@ -65,6 +65,9 @@ Available operators are as follows:
|
||||
* ``size`` -- the size of the array is
|
||||
* ``exists`` -- value for field exists
|
||||
|
||||
String queries
|
||||
--------------
|
||||
|
||||
The following operators are available as shortcuts to querying with regular
|
||||
expressions:
|
||||
|
||||
@ -78,8 +81,71 @@ expressions:
|
||||
* ``iendswith`` -- string field ends with value (case insensitive)
|
||||
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
||||
|
||||
There are a few special operators for performing geographical queries, that
|
||||
may used with :class:`~mongoengine.GeoPointField`\ s:
|
||||
|
||||
Geo queries
|
||||
-----------
|
||||
|
||||
There are a few special operators for performing geographical queries. The following
|
||||
were added in 0.8 for: :class:`~mongoengine.fields.PointField`,
|
||||
:class:`~mongoengine.fields.LineStringField` and
|
||||
:class:`~mongoengine.fields.PolygonField`:
|
||||
|
||||
* ``geo_within`` -- Check if a geometry is within a polygon. For ease of use
|
||||
it accepts either a geojson geometry or just the polygon coordinates eg::
|
||||
|
||||
loc.objects(point__geo_with=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
|
||||
loc.objects(point__geo_with={"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
||||
|
||||
* ``geo_within_box`` - simplified geo_within searching with a box eg::
|
||||
|
||||
loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)])
|
||||
loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>])
|
||||
|
||||
* ``geo_within_polygon`` -- simplified geo_within searching within a simple polygon eg::
|
||||
|
||||
loc.objects(point__geo_within_polygon=[[40, 5], [40, 6], [41, 6], [40, 5]])
|
||||
loc.objects(point__geo_within_polygon=[ [ <x1> , <y1> ] ,
|
||||
[ <x2> , <y2> ] ,
|
||||
[ <x3> , <y3> ] ])
|
||||
|
||||
* ``geo_within_center`` -- simplified geo_within the flat circle radius of a point eg::
|
||||
|
||||
loc.objects(point__geo_within_center=[(-125.0, 35.0), 1])
|
||||
loc.objects(point__geo_within_center=[ [ <x>, <y> ] , <radius> ])
|
||||
|
||||
* ``geo_within_sphere`` -- simplified geo_within the spherical circle radius of a point eg::
|
||||
|
||||
loc.objects(point__geo_within_sphere=[(-125.0, 35.0), 1])
|
||||
loc.objects(point__geo_within_sphere=[ [ <x>, <y> ] , <radius> ])
|
||||
|
||||
* ``geo_intersects`` -- selects all locations that intersect with a geometry eg::
|
||||
|
||||
# Inferred from provided points lists:
|
||||
loc.objects(poly__geo_intersects=[40, 6])
|
||||
loc.objects(poly__geo_intersects=[[40, 5], [40, 6]])
|
||||
loc.objects(poly__geo_intersects=[[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]])
|
||||
|
||||
# With geoJson style objects
|
||||
loc.objects(poly__geo_intersects={"type": "Point", "coordinates": [40, 6]})
|
||||
loc.objects(poly__geo_intersects={"type": "LineString",
|
||||
"coordinates": [[40, 5], [40, 6]]})
|
||||
loc.objects(poly__geo_intersects={"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]})
|
||||
|
||||
* ``near`` -- Find all the locations near a given point::
|
||||
|
||||
loc.objects(point__near=[40, 5])
|
||||
loc.objects(point__near={"type": "Point", "coordinates": [40, 5]})
|
||||
|
||||
|
||||
You can also set the maximum distance in meters as well::
|
||||
|
||||
loc.objects(point__near=[40, 5], point__max_distance=1000)
|
||||
|
||||
|
||||
The older 2D indexes are still supported with the
|
||||
:class:`~mongoengine.fields.GeoPointField`:
|
||||
|
||||
* ``within_distance`` -- provide a list containing a point and a maximum
|
||||
distance (e.g. [(41.342, -87.653), 5])
|
||||
@ -91,7 +157,9 @@ may used with :class:`~mongoengine.GeoPointField`\ s:
|
||||
[(35.0, -125.0), (40.0, -100.0)])
|
||||
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
|
||||
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
|
||||
|
||||
.. note:: Requires Mongo Server 2.0
|
||||
|
||||
* ``max_distance`` -- can be added to your location queries to set a maximum
|
||||
distance.
|
||||
|
||||
@ -100,7 +168,7 @@ Querying lists
|
||||
--------------
|
||||
On most fields, this syntax will look up documents where the field specified
|
||||
matches the given value exactly, but when the field refers to a
|
||||
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
||||
:class:`~mongoengine.fields.ListField`, a single item may be provided, in which case
|
||||
lists that contain that item will be matched::
|
||||
|
||||
class Page(Document):
|
||||
@ -319,7 +387,7 @@ Retrieving a subset of fields
|
||||
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
|
||||
and for efficiency only these should be retrieved from the database. This issue
|
||||
is especially important for MongoDB, as fields may often be extremely large
|
||||
(e.g. a :class:`~mongoengine.ListField` of
|
||||
(e.g. a :class:`~mongoengine.fields.ListField` of
|
||||
:class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a
|
||||
blog post. To select only a subset of fields, use
|
||||
:meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to
|
||||
@ -351,14 +419,14 @@ If you later need the missing fields, just call
|
||||
Getting related data
|
||||
--------------------
|
||||
|
||||
When iterating the results of :class:`~mongoengine.ListField` or
|
||||
:class:`~mongoengine.DictField` we automatically dereference any
|
||||
When iterating the results of :class:`~mongoengine.fields.ListField` or
|
||||
:class:`~mongoengine.fields.DictField` we automatically dereference any
|
||||
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
||||
number the queries to mongo.
|
||||
|
||||
There are times when that efficiency is not enough, documents that have
|
||||
:class:`~mongoengine.ReferenceField` objects or
|
||||
:class:`~mongoengine.GenericReferenceField` objects at the top level are
|
||||
:class:`~mongoengine.fields.ReferenceField` objects or
|
||||
:class:`~mongoengine.fields.GenericReferenceField` objects at the top level are
|
||||
expensive as the number of queries to MongoDB can quickly rise.
|
||||
|
||||
To limit the number of queries use
|
||||
@ -541,7 +609,7 @@ Javascript code. When accessing a field on a collection object, use
|
||||
square-bracket notation, and prefix the MongoEngine field name with a tilde.
|
||||
The field name that follows the tilde will be translated to the name used in
|
||||
the database. Note that when referring to fields on embedded documents,
|
||||
the name of the :class:`~mongoengine.EmbeddedDocumentField`, followed by a dot,
|
||||
the name of the :class:`~mongoengine.fields.EmbeddedDocumentField`, followed by a dot,
|
||||
should be used before the name of the field on the embedded document. The
|
||||
following example shows how the substitutions are made::
|
||||
|
||||
|
@ -55,15 +55,25 @@ See the :doc:`changelog` for a full list of changes to MongoEngine and
|
||||
.. note:: Always read and test the `upgrade <upgrade>`_ documentation before
|
||||
putting updates live in production **;)**
|
||||
|
||||
Offline Reading
|
||||
---------------
|
||||
|
||||
Download the docs in `pdf <https://media.readthedocs.org/pdf/mongoengine-odm/latest/mongoengine-odm.pdf>`_
|
||||
or `epub <https://media.readthedocs.org/epub/mongoengine-odm/latest/mongoengine-odm.epub>`_
|
||||
formats for offline reading.
|
||||
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:numbered:
|
||||
:hidden:
|
||||
|
||||
tutorial
|
||||
guide/index
|
||||
apireference
|
||||
django
|
||||
changelog
|
||||
upgrade
|
||||
django
|
||||
|
||||
Indices and tables
|
||||
------------------
|
||||
|
@ -115,7 +115,7 @@ by setting :attr:`allow_inheritance` to True in the :attr:`meta`::
|
||||
link_url = StringField()
|
||||
|
||||
We are storing a reference to the author of the posts using a
|
||||
:class:`~mongoengine.ReferenceField` object. These are similar to foreign key
|
||||
:class:`~mongoengine.fields.ReferenceField` object. These are similar to foreign key
|
||||
fields in traditional ORMs, and are automatically translated into references
|
||||
when they are saved, and dereferenced when they are loaded.
|
||||
|
||||
@ -137,7 +137,7 @@ size of our database. So let's take a look that the code our modified
|
||||
author = ReferenceField(User)
|
||||
tags = ListField(StringField(max_length=30))
|
||||
|
||||
The :class:`~mongoengine.ListField` object that is used to define a Post's tags
|
||||
The :class:`~mongoengine.fields.ListField` object that is used to define a Post's tags
|
||||
takes a field object as its first argument --- this means that you can have
|
||||
lists of any type of field (including lists).
|
||||
|
||||
@ -174,7 +174,7 @@ We can then store a list of comment documents in our post document::
|
||||
Handling deletions of references
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The :class:`~mongoengine.ReferenceField` object takes a keyword
|
||||
The :class:`~mongoengine.fields.ReferenceField` object takes a keyword
|
||||
`reverse_delete_rule` for handling deletion rules if the reference is deleted.
|
||||
To delete all the posts if a user is deleted set the rule::
|
||||
|
||||
@ -184,7 +184,7 @@ To delete all the posts if a user is deleted set the rule::
|
||||
tags = ListField(StringField(max_length=30))
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
See :class:`~mongoengine.ReferenceField` for more information.
|
||||
See :class:`~mongoengine.fields.ReferenceField` for more information.
|
||||
|
||||
.. note::
|
||||
MapFields and DictFields currently don't support automatic handling of
|
||||
|
@ -15,10 +15,10 @@ possible for the whole of the release.
|
||||
live. There maybe multiple manual steps in migrating and these are best honed
|
||||
on a staging / test system.
|
||||
|
||||
Python
|
||||
=======
|
||||
Python and PyMongo
|
||||
==================
|
||||
|
||||
Support for python 2.5 has been dropped.
|
||||
MongoEngine requires python 2.6 (or above) and pymongo 2.5 (or above)
|
||||
|
||||
Data Model
|
||||
==========
|
||||
@ -120,6 +120,9 @@ eg::
|
||||
p._mark_as_dirty('friends')
|
||||
p.save()
|
||||
|
||||
`An example test migration for ReferenceFields is available on github
|
||||
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/refrencefield_dbref_to_object_id.py>`_.
|
||||
|
||||
UUIDField
|
||||
---------
|
||||
|
||||
@ -145,6 +148,9 @@ eg::
|
||||
a._mark_as_dirty('uuid')
|
||||
a.save()
|
||||
|
||||
`An example test migration for UUIDFields is available on github
|
||||
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/uuidfield_to_binary.py>`_.
|
||||
|
||||
DecimalField
|
||||
------------
|
||||
|
||||
@ -172,7 +178,10 @@ eg::
|
||||
p.save()
|
||||
|
||||
.. note:: DecimalField's have also been improved with the addition of precision
|
||||
and rounding. See :class:`~mongoengine.DecimalField` for more information.
|
||||
and rounding. See :class:`~mongoengine.fields.DecimalField` for more information.
|
||||
|
||||
`An example test migration for DecimalFields is available on github
|
||||
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/decimalfield_as_float.py>`_.
|
||||
|
||||
Cascading Saves
|
||||
---------------
|
||||
@ -187,6 +196,19 @@ you will have to explicitly tell it to cascade on save::
|
||||
# Or on save:
|
||||
my_document.save(cascade=True)
|
||||
|
||||
Storage
|
||||
-------
|
||||
|
||||
Document and Embedded Documents are now serialized based on declared field order.
|
||||
Previously, the data was passed to mongodb as a dictionary and which meant that
|
||||
order wasn't guaranteed - so things like ``$addToSet`` operations on
|
||||
:class:`~mongoengine.EmbeddedDocument` could potentially fail in unexpected
|
||||
ways.
|
||||
|
||||
If this impacts you, you may want to rewrite the objects using the
|
||||
``doc.mark_as_dirty('field')`` pattern described above. If you are using a
|
||||
compound primary key then you will need to ensure the order is fixed and match
|
||||
your EmbeddedDocument to that order.
|
||||
|
||||
Querysets
|
||||
=========
|
||||
@ -213,12 +235,15 @@ update your code like so: ::
|
||||
mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals
|
||||
[m for m in mammals] # This will return all carnivores
|
||||
|
||||
No more len
|
||||
-----------
|
||||
Len iterates the queryset
|
||||
--------------------------
|
||||
|
||||
If you ever did len(queryset) it previously did a count() under the covers, this
|
||||
caused some unusual issues - so now it has been removed in favour of the
|
||||
explicit `queryset.count()` to update::
|
||||
If you ever did `len(queryset)` it previously did a `count()` under the covers,
|
||||
this caused some unusual issues. As `len(queryset)` is most often used by
|
||||
`list(queryset)` we now cache the queryset results and use that for the length.
|
||||
|
||||
This isn't as performant as a `count()` and if you aren't iterating the
|
||||
queryset you should upgrade to use count::
|
||||
|
||||
# Old code
|
||||
len(Animal.objects(type="mammal"))
|
||||
|
@ -15,7 +15,7 @@ import django
|
||||
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
|
||||
list(queryset.__all__) + signals.__all__ + list(errors.__all__))
|
||||
|
||||
VERSION = (0, 8, 0, '+')
|
||||
VERSION = (0, 8, 0, 'RC4')
|
||||
|
||||
|
||||
def get_version():
|
||||
|
@ -3,3 +3,6 @@ from mongoengine.base.datastructures import *
|
||||
from mongoengine.base.document import *
|
||||
from mongoengine.base.fields import *
|
||||
from mongoengine.base.metaclasses import *
|
||||
|
||||
# Help with backwards compatibility
|
||||
from mongoengine.errors import *
|
||||
|
@ -6,6 +6,7 @@ from functools import partial
|
||||
import pymongo
|
||||
from bson import json_util
|
||||
from bson.dbref import DBRef
|
||||
from bson.son import SON
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.common import _import_class
|
||||
@ -228,11 +229,16 @@ class BaseDocument(object):
|
||||
pass
|
||||
|
||||
def to_mongo(self):
|
||||
"""Return data dictionary ready for use with MongoDB.
|
||||
"""Return as SON data ready for use with MongoDB.
|
||||
"""
|
||||
data = {}
|
||||
for field_name, field in self._fields.iteritems():
|
||||
data = SON()
|
||||
data["_id"] = None
|
||||
data['_cls'] = self._class_name
|
||||
|
||||
for field_name in self:
|
||||
value = self._data.get(field_name, None)
|
||||
field = self._fields.get(field_name)
|
||||
|
||||
if value is not None:
|
||||
value = field.to_mongo(value)
|
||||
|
||||
@ -244,19 +250,27 @@ class BaseDocument(object):
|
||||
if value is not None:
|
||||
data[field.db_field] = value
|
||||
|
||||
# Only add _cls if allow_inheritance is True
|
||||
if (hasattr(self, '_meta') and
|
||||
self._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True):
|
||||
data['_cls'] = self._class_name
|
||||
# If "_id" has not been set, then try and set it
|
||||
if data["_id"] is None:
|
||||
data["_id"] = self._data.get("id", None)
|
||||
|
||||
if '_id' in data and data['_id'] is None:
|
||||
del data['_id']
|
||||
if data['_id'] is None:
|
||||
data.pop('_id')
|
||||
|
||||
# Only add _cls if allow_inheritance is True
|
||||
if (not hasattr(self, '_meta') or
|
||||
not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
|
||||
data.pop('_cls')
|
||||
|
||||
if not self._dynamic:
|
||||
return data
|
||||
|
||||
for name, field in self._dynamic_fields.items():
|
||||
# Sort dynamic fields by key
|
||||
dynamic_fields = sorted(self._dynamic_fields.iteritems(),
|
||||
key=operator.itemgetter(0))
|
||||
for name, field in dynamic_fields:
|
||||
data[name] = field.to_mongo(self._data.get(name, None))
|
||||
|
||||
return data
|
||||
|
||||
def validate(self, clean=True):
|
||||
@ -648,7 +662,8 @@ class BaseDocument(object):
|
||||
if include_cls and direction is not pymongo.GEO2D:
|
||||
index_list.insert(0, ('_cls', 1))
|
||||
|
||||
spec['fields'] = index_list
|
||||
if index_list:
|
||||
spec['fields'] = index_list
|
||||
if spec.get('sparse', False) and len(spec['fields']) > 1:
|
||||
raise ValueError(
|
||||
'Sparse indexes can only have one field in them. '
|
||||
@ -690,13 +705,13 @@ class BaseDocument(object):
|
||||
|
||||
# Add the new index to the list
|
||||
fields = [("%s%s" % (namespace, f), pymongo.ASCENDING)
|
||||
for f in unique_fields]
|
||||
for f in unique_fields]
|
||||
index = {'fields': fields, 'unique': True, 'sparse': sparse}
|
||||
unique_indexes.append(index)
|
||||
|
||||
# Grab any embedded document field unique indexes
|
||||
if (field.__class__.__name__ == "EmbeddedDocumentField" and
|
||||
field.document_type != cls):
|
||||
field.document_type != cls):
|
||||
field_namespace = "%s." % field_name
|
||||
doc_cls = field.document_type
|
||||
unique_indexes += doc_cls._unique_with_indexes(field_namespace)
|
||||
@ -704,26 +719,31 @@ class BaseDocument(object):
|
||||
return unique_indexes
|
||||
|
||||
@classmethod
|
||||
def _geo_indices(cls, inspected=None):
|
||||
def _geo_indices(cls, inspected=None, parent_field=None):
|
||||
inspected = inspected or []
|
||||
geo_indices = []
|
||||
inspected.append(cls)
|
||||
|
||||
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||
GeoPointField = _import_class("GeoPointField")
|
||||
geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField",
|
||||
"PointField", "LineStringField", "PolygonField"]
|
||||
|
||||
geo_field_types = tuple([_import_class(field) for field in geo_field_type_names])
|
||||
|
||||
for field in cls._fields.values():
|
||||
if not isinstance(field, (EmbeddedDocumentField, GeoPointField)):
|
||||
if not isinstance(field, geo_field_types):
|
||||
continue
|
||||
if hasattr(field, 'document_type'):
|
||||
field_cls = field.document_type
|
||||
if field_cls in inspected:
|
||||
continue
|
||||
if hasattr(field_cls, '_geo_indices'):
|
||||
geo_indices += field_cls._geo_indices(inspected)
|
||||
geo_indices += field_cls._geo_indices(inspected, parent_field=field.db_field)
|
||||
elif field._geo_index:
|
||||
field_name = field.db_field
|
||||
if parent_field:
|
||||
field_name = "%s.%s" % (parent_field, field_name)
|
||||
geo_indices.append({'fields':
|
||||
[(field.db_field, pymongo.GEO2D)]})
|
||||
[(field_name, field._geo_index)]})
|
||||
return geo_indices
|
||||
|
||||
@classmethod
|
||||
|
@ -2,7 +2,8 @@ import operator
|
||||
import warnings
|
||||
import weakref
|
||||
|
||||
from bson import DBRef, ObjectId
|
||||
from bson import DBRef, ObjectId, SON
|
||||
import pymongo
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import ValidationError
|
||||
@ -10,7 +11,7 @@ from mongoengine.errors import ValidationError
|
||||
from mongoengine.base.common import ALLOW_INHERITANCE
|
||||
from mongoengine.base.datastructures import BaseDict, BaseList
|
||||
|
||||
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField")
|
||||
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
@ -81,13 +82,16 @@ class BaseField(object):
|
||||
def __set__(self, instance, value):
|
||||
"""Descriptor for assigning a value to a field in a document.
|
||||
"""
|
||||
changed = False
|
||||
if (self.name not in instance._data or
|
||||
instance._data[self.name] != value):
|
||||
changed = True
|
||||
instance._data[self.name] = value
|
||||
if changed and instance._initialised:
|
||||
instance._mark_as_changed(self.name)
|
||||
if instance._initialised:
|
||||
try:
|
||||
if (self.name not in instance._data or
|
||||
instance._data[self.name] != value):
|
||||
instance._mark_as_changed(self.name)
|
||||
except:
|
||||
# Values cant be compared eg: naive and tz datetimes
|
||||
# So mark it as changed
|
||||
instance._mark_as_changed(self.name)
|
||||
instance._data[self.name] = value
|
||||
|
||||
def error(self, message="", errors=None, field_name=None):
|
||||
"""Raises a ValidationError.
|
||||
@ -183,7 +187,7 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
# Convert lists / values so we can watch for any changes on them
|
||||
if (isinstance(value, (list, tuple)) and
|
||||
not isinstance(value, BaseList)):
|
||||
not isinstance(value, BaseList)):
|
||||
value = BaseList(value, instance, self.name)
|
||||
instance._data[self.name] = value
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
@ -191,8 +195,8 @@ class ComplexBaseField(BaseField):
|
||||
instance._data[self.name] = value
|
||||
|
||||
if (self._auto_dereference and instance._initialised and
|
||||
isinstance(value, (BaseList, BaseDict))
|
||||
and not value._dereferenced):
|
||||
isinstance(value, (BaseList, BaseDict))
|
||||
and not value._dereferenced):
|
||||
value = self._dereference(
|
||||
value, max_depth=1, instance=instance, name=self.name
|
||||
)
|
||||
@ -228,7 +232,7 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
if self.field:
|
||||
value_dict = dict([(key, self.field.to_python(item))
|
||||
for key, item in value.items()])
|
||||
for key, item in value.items()])
|
||||
else:
|
||||
value_dict = {}
|
||||
for k, v in value.items():
|
||||
@ -279,7 +283,7 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
if self.field:
|
||||
value_dict = dict([(key, self.field.to_mongo(item))
|
||||
for key, item in value.iteritems()])
|
||||
for key, item in value.iteritems()])
|
||||
else:
|
||||
value_dict = {}
|
||||
for k, v in value.iteritems():
|
||||
@ -295,7 +299,7 @@ class ComplexBaseField(BaseField):
|
||||
meta = getattr(v, '_meta', {})
|
||||
allow_inheritance = (
|
||||
meta.get('allow_inheritance', ALLOW_INHERITANCE)
|
||||
== True)
|
||||
is True)
|
||||
if not allow_inheritance and not self.field:
|
||||
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||
else:
|
||||
@ -393,3 +397,100 @@ class ObjectIdField(BaseField):
|
||||
ObjectId(unicode(value))
|
||||
except:
|
||||
self.error('Invalid Object ID')
|
||||
|
||||
|
||||
class GeoJsonBaseField(BaseField):
|
||||
"""A geo json field storing a geojson style object.
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
|
||||
_geo_index = pymongo.GEOSPHERE
|
||||
_type = "GeoBase"
|
||||
|
||||
def __init__(self, auto_index=True, *args, **kwargs):
|
||||
"""
|
||||
:param auto_index: Automatically create a "2dsphere" index. Defaults
|
||||
to `True`.
|
||||
"""
|
||||
self._name = "%sField" % self._type
|
||||
if not auto_index:
|
||||
self._geo_index = False
|
||||
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
|
||||
|
||||
def validate(self, value):
|
||||
"""Validate the GeoJson object based on its type
|
||||
"""
|
||||
if isinstance(value, dict):
|
||||
if set(value.keys()) == set(['type', 'coordinates']):
|
||||
if value['type'] != self._type:
|
||||
self.error('%s type must be "%s"' % (self._name, self._type))
|
||||
return self.validate(value['coordinates'])
|
||||
else:
|
||||
self.error('%s can only accept a valid GeoJson dictionary'
|
||||
' or lists of (x, y)' % self._name)
|
||||
return
|
||||
elif not isinstance(value, (list, tuple)):
|
||||
self.error('%s can only accept lists of [x, y]' % self._name)
|
||||
return
|
||||
|
||||
validate = getattr(self, "_validate_%s" % self._type.lower())
|
||||
error = validate(value)
|
||||
if error:
|
||||
self.error(error)
|
||||
|
||||
def _validate_polygon(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Polygons must contain list of linestrings'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except:
|
||||
return "Invalid Polygon must contain at least one valid linestring"
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
error = self._validate_linestring(val, False)
|
||||
if not error and val[0] != val[-1]:
|
||||
error = 'LineStrings must start and end at the same point'
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
if errors:
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
|
||||
def _validate_linestring(self, value, top_level=True):
|
||||
"""Validates a linestring"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'LineStrings must contain list of coordinate pairs'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except:
|
||||
return "Invalid LineString must contain at least one valid point"
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
error = self._validate_point(val)
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
if errors:
|
||||
if top_level:
|
||||
return "Invalid LineString:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_point(self, value):
|
||||
"""Validate each set of coords"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Points must be a list of coordinate pairs'
|
||||
elif not len(value) == 2:
|
||||
return "Value (%s) must be a two-dimensional point" % repr(value)
|
||||
elif (not isinstance(value[0], (float, int)) or
|
||||
not isinstance(value[1], (float, int))):
|
||||
return "Both values (%s) in point must be float or int" % repr(value)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
return SON([("type", self._type), ("coordinates", value)])
|
||||
|
@ -140,8 +140,31 @@ class DocumentMetaclass(type):
|
||||
base._subclasses += (_cls,)
|
||||
base._types = base._subclasses # TODO depreciate _types
|
||||
|
||||
# Handle delete rules
|
||||
Document, EmbeddedDocument, DictField = cls._import_classes()
|
||||
|
||||
if issubclass(new_class, Document):
|
||||
new_class._collection = None
|
||||
|
||||
# Add class to the _document_registry
|
||||
_document_registry[new_class._class_name] = new_class
|
||||
|
||||
# In Python 2, User-defined methods objects have special read-only
|
||||
# attributes 'im_func' and 'im_self' which contain the function obj
|
||||
# and class instance object respectively. With Python 3 these special
|
||||
# attributes have been replaced by __func__ and __self__. The Blinker
|
||||
# module continues to use im_func and im_self, so the code below
|
||||
# copies __func__ into im_func and __self__ into im_self for
|
||||
# classmethod objects in Document derived classes.
|
||||
if PY3:
|
||||
for key, val in new_class.__dict__.items():
|
||||
if isinstance(val, classmethod):
|
||||
f = val.__get__(new_class)
|
||||
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
|
||||
f.__dict__.update({'im_func': getattr(f, '__func__')})
|
||||
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
|
||||
f.__dict__.update({'im_self': getattr(f, '__self__')})
|
||||
|
||||
# Handle delete rules
|
||||
for field in new_class._fields.itervalues():
|
||||
f = field
|
||||
f.owner_document = new_class
|
||||
@ -167,33 +190,11 @@ class DocumentMetaclass(type):
|
||||
field.name, delete_rule)
|
||||
|
||||
if (field.name and hasattr(Document, field.name) and
|
||||
EmbeddedDocument not in new_class.mro()):
|
||||
EmbeddedDocument not in new_class.mro()):
|
||||
msg = ("%s is a document method and not a valid "
|
||||
"field name" % field.name)
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
if issubclass(new_class, Document):
|
||||
new_class._collection = None
|
||||
|
||||
# Add class to the _document_registry
|
||||
_document_registry[new_class._class_name] = new_class
|
||||
|
||||
# In Python 2, User-defined methods objects have special read-only
|
||||
# attributes 'im_func' and 'im_self' which contain the function obj
|
||||
# and class instance object respectively. With Python 3 these special
|
||||
# attributes have been replaced by __func__ and __self__. The Blinker
|
||||
# module continues to use im_func and im_self, so the code below
|
||||
# copies __func__ into im_func and __self__ into im_self for
|
||||
# classmethod objects in Document derived classes.
|
||||
if PY3:
|
||||
for key, val in new_class.__dict__.items():
|
||||
if isinstance(val, classmethod):
|
||||
f = val.__get__(new_class)
|
||||
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
|
||||
f.__dict__.update({'im_func': getattr(f, '__func__')})
|
||||
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
|
||||
f.__dict__.update({'im_self': getattr(f, '__self__')})
|
||||
|
||||
return new_class
|
||||
|
||||
def add_to_class(self, name, value):
|
||||
|
@ -11,6 +11,7 @@ def _import_class(cls_name):
|
||||
field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField',
|
||||
'FileField', 'GenericReferenceField',
|
||||
'GenericEmbeddedDocumentField', 'GeoPointField',
|
||||
'PointField', 'LineStringField', 'PolygonField',
|
||||
'ReferenceField', 'StringField', 'ComplexBaseField')
|
||||
queryset_classes = ('OperationError',)
|
||||
deref_classes = ('DeReference',)
|
||||
|
@ -137,11 +137,12 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
if alias not in _dbs:
|
||||
conn = get_connection(alias)
|
||||
conn_settings = _connection_settings[alias]
|
||||
_dbs[alias] = conn[conn_settings['name']]
|
||||
db = conn[conn_settings['name']]
|
||||
# Authenticate if necessary
|
||||
if conn_settings['username'] and conn_settings['password']:
|
||||
_dbs[alias].authenticate(conn_settings['username'],
|
||||
conn_settings['password'])
|
||||
db.authenticate(conn_settings['username'],
|
||||
conn_settings['password'])
|
||||
_dbs[alias] = db
|
||||
return _dbs[alias]
|
||||
|
||||
|
||||
|
@ -1,8 +1,10 @@
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
from mongoengine.queryset import OperationError, QuerySet
|
||||
from mongoengine.queryset import QuerySet
|
||||
|
||||
__all__ = ("switch_db", "switch_collection", "no_dereference", "query_counter")
|
||||
|
||||
__all__ = ("switch_db", "switch_collection", "no_dereference",
|
||||
"no_sub_classes", "query_counter")
|
||||
|
||||
|
||||
class switch_db(object):
|
||||
@ -130,6 +132,36 @@ class no_dereference(object):
|
||||
return self.cls
|
||||
|
||||
|
||||
class no_sub_classes(object):
|
||||
""" no_sub_classes context manager.
|
||||
|
||||
Only returns instances of this class and no sub (inherited) classes::
|
||||
|
||||
with no_sub_classes(Group) as Group:
|
||||
Group.objects.find()
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, cls):
|
||||
""" Construct the no_sub_classes context manager.
|
||||
|
||||
:param cls: the class to turn querying sub classes on
|
||||
"""
|
||||
self.cls = cls
|
||||
|
||||
def __enter__(self):
|
||||
""" change the objects default and _auto_dereference values"""
|
||||
self.cls._all_subclasses = self.cls._subclasses
|
||||
self.cls._subclasses = (self.cls,)
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
""" Reset the default and _auto_dereference values"""
|
||||
self.cls._subclasses = self.cls._all_subclasses
|
||||
delattr(self.cls, '_all_subclasses')
|
||||
return self.cls
|
||||
|
||||
|
||||
class QuerySetNoDeRef(QuerySet):
|
||||
"""Special no_dereference QuerySet"""
|
||||
def __dereference(items, max_depth=1, instance=None, name=None):
|
||||
|
@ -1,4 +1,3 @@
|
||||
from __future__ import with_statement
|
||||
import warnings
|
||||
|
||||
import pymongo
|
||||
@ -232,7 +231,6 @@ class Document(BaseDocument):
|
||||
return not updated
|
||||
return created
|
||||
|
||||
upsert = self._created
|
||||
update_query = {}
|
||||
|
||||
if updates:
|
||||
@ -241,7 +239,7 @@ class Document(BaseDocument):
|
||||
update_query["$unset"] = removals
|
||||
if updates or removals:
|
||||
last_error = collection.update(select_dict, update_query,
|
||||
upsert=upsert, **write_concern)
|
||||
upsert=True, **write_concern)
|
||||
created = is_new_object(last_error)
|
||||
|
||||
cascade = (self._meta.get('cascade', True)
|
||||
@ -523,7 +521,6 @@ class Document(BaseDocument):
|
||||
# an extra index on _cls, as mongodb will use the existing
|
||||
# index to service queries against _cls
|
||||
cls_indexed = False
|
||||
|
||||
def includes_cls(fields):
|
||||
first_field = None
|
||||
if len(fields):
|
||||
@ -548,7 +545,7 @@ class Document(BaseDocument):
|
||||
# If _cls is being used (for polymorphism), it needs an index,
|
||||
# only if another index doesn't begin with _cls
|
||||
if (index_cls and not cls_indexed and
|
||||
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True):
|
||||
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
|
||||
collection.ensure_index('_cls', background=background,
|
||||
**index_opts)
|
||||
|
||||
@ -559,7 +556,7 @@ class DynamicDocument(Document):
|
||||
way as an ordinary document but has expando style properties. Any data
|
||||
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
||||
not a field is automatically converted into a
|
||||
:class:`~mongoengine.DynamicField` and data can be attributed to that
|
||||
:class:`~mongoengine.fields.DynamicField` and data can be attributed to that
|
||||
field.
|
||||
|
||||
.. note::
|
||||
|
@ -8,13 +8,14 @@ import uuid
|
||||
import warnings
|
||||
from operator import itemgetter
|
||||
|
||||
import pymongo
|
||||
import gridfs
|
||||
from bson import Binary, DBRef, SON, ObjectId
|
||||
|
||||
from mongoengine.errors import ValidationError
|
||||
from mongoengine.python_support import (PY3, bin_type, txt_type,
|
||||
str_types, StringIO)
|
||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||
from base import (BaseField, ComplexBaseField, ObjectIdField, GeoJsonBaseField,
|
||||
get_document, BaseDocument)
|
||||
from queryset import DO_NOTHING, QuerySet
|
||||
from document import Document, EmbeddedDocument
|
||||
@ -33,9 +34,8 @@ __all__ = ['StringField', 'URLField', 'EmailField', 'IntField', 'LongField',
|
||||
'SortedListField', 'DictField', 'MapField', 'ReferenceField',
|
||||
'GenericReferenceField', 'BinaryField', 'GridFSError',
|
||||
'GridFSProxy', 'FileField', 'ImageGridFsProxy',
|
||||
'ImproperlyConfigured', 'ImageField', 'GeoPointField',
|
||||
'SequenceField', 'UUIDField']
|
||||
|
||||
'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField',
|
||||
'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField']
|
||||
|
||||
|
||||
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
||||
@ -107,11 +107,11 @@ class URLField(StringField):
|
||||
"""
|
||||
|
||||
_URL_REGEX = re.compile(
|
||||
r'^(?:http|ftp)s?://' # http:// or https://
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
|
||||
r'localhost|' #localhost...
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||
r'(?::\d+)?' # optional port
|
||||
r'^(?:http|ftp)s?://' # http:// or https://
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
|
||||
r'localhost|' # localhost...
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||
r'(?::\d+)?' # optional port
|
||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||
|
||||
def __init__(self, verify_exists=False, url_regex=None, **kwargs):
|
||||
@ -128,8 +128,7 @@ class URLField(StringField):
|
||||
warnings.warn(
|
||||
"The URLField verify_exists argument has intractable security "
|
||||
"and performance issues. Accordingly, it has been deprecated.",
|
||||
DeprecationWarning
|
||||
)
|
||||
DeprecationWarning)
|
||||
try:
|
||||
request = urllib2.Request(value)
|
||||
urllib2.urlopen(request)
|
||||
@ -297,8 +296,9 @@ class DecimalField(BaseField):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
return decimal.Decimal(value).quantize(self.precision,
|
||||
rounding=self.rounding)
|
||||
# Convert to string for python 2.6 before casting to Decimal
|
||||
value = decimal.Decimal("%s" % value)
|
||||
return value.quantize(self.precision, rounding=self.rounding)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if value is None:
|
||||
@ -468,7 +468,7 @@ class ComplexDateTimeField(StringField):
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
data = super(ComplexDateTimeField, self).__get__(instance, owner)
|
||||
if data == None:
|
||||
if data is None:
|
||||
return datetime.datetime.now()
|
||||
if isinstance(data, datetime.datetime):
|
||||
return data
|
||||
@ -657,15 +657,15 @@ class ListField(ComplexBaseField):
|
||||
"""Make sure that a list of valid fields is being used.
|
||||
"""
|
||||
if (not isinstance(value, (list, tuple, QuerySet)) or
|
||||
isinstance(value, basestring)):
|
||||
isinstance(value, basestring)):
|
||||
self.error('Only lists and tuples may be used in a list field')
|
||||
super(ListField, self).validate(value)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if self.field:
|
||||
if op in ('set', 'unset') and (not isinstance(value, basestring)
|
||||
and not isinstance(value, BaseDocument)
|
||||
and hasattr(value, '__iter__')):
|
||||
and not isinstance(value, BaseDocument)
|
||||
and hasattr(value, '__iter__')):
|
||||
return [self.field.prepare_query_value(op, v) for v in value]
|
||||
return self.field.prepare_query_value(op, value)
|
||||
return super(ListField, self).prepare_query_value(op, value)
|
||||
@ -700,7 +700,7 @@ class SortedListField(ListField):
|
||||
value = super(SortedListField, self).to_mongo(value)
|
||||
if self._ordering is not None:
|
||||
return sorted(value, key=itemgetter(self._ordering),
|
||||
reverse=self._order_reverse)
|
||||
reverse=self._order_reverse)
|
||||
return sorted(value, reverse=self._order_reverse)
|
||||
|
||||
|
||||
@ -781,7 +781,7 @@ class ReferenceField(BaseField):
|
||||
* NULLIFY - Updates the reference to null.
|
||||
* CASCADE - Deletes the documents associated with the reference.
|
||||
* DENY - Prevent the deletion of the reference object.
|
||||
* PULL - Pull the reference from a :class:`~mongoengine.ListField`
|
||||
* PULL - Pull the reference from a :class:`~mongoengine.fields.ListField`
|
||||
of references
|
||||
|
||||
Alternative syntax for registering delete rules (useful when implementing
|
||||
@ -854,7 +854,7 @@ class ReferenceField(BaseField):
|
||||
return document.id
|
||||
return document
|
||||
elif not self.dbref and isinstance(document, basestring):
|
||||
return document
|
||||
return ObjectId(document)
|
||||
|
||||
id_field_name = self.document_type._meta['id_field']
|
||||
id_field = self.document_type._fields[id_field_name]
|
||||
@ -879,7 +879,7 @@ class ReferenceField(BaseField):
|
||||
"""Convert a MongoDB-compatible type to a Python type.
|
||||
"""
|
||||
if (not self.dbref and
|
||||
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
|
||||
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
|
||||
collection = self.document_type._get_collection_name()
|
||||
value = DBRef(collection, self.document_type.id.to_python(value))
|
||||
return value
|
||||
@ -1000,7 +1000,7 @@ class BinaryField(BaseField):
|
||||
if not isinstance(value, (bin_type, txt_type, Binary)):
|
||||
self.error("BinaryField only accepts instances of "
|
||||
"(%s, %s, Binary)" % (
|
||||
bin_type.__name__, txt_type.__name__))
|
||||
bin_type.__name__, txt_type.__name__))
|
||||
|
||||
if self.max_bytes is not None and len(value) > self.max_bytes:
|
||||
self.error('Binary value is too long')
|
||||
@ -1234,8 +1234,6 @@ class ImageGridFsProxy(GridFSProxy):
|
||||
Insert a image in database
|
||||
applying field properties (size, thumbnail_size)
|
||||
"""
|
||||
if not self.instance:
|
||||
import ipdb; ipdb.set_trace();
|
||||
field = self.instance._fields[self.key]
|
||||
|
||||
try:
|
||||
@ -1307,6 +1305,7 @@ class ImageGridFsProxy(GridFSProxy):
|
||||
height=h,
|
||||
format=format,
|
||||
**kwargs)
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
"""
|
||||
@ -1385,28 +1384,6 @@ class ImageField(FileField):
|
||||
**kwargs)
|
||||
|
||||
|
||||
class GeoPointField(BaseField):
|
||||
"""A list storing a latitude and longitude.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
"""
|
||||
|
||||
_geo_index = True
|
||||
|
||||
def validate(self, value):
|
||||
"""Make sure that a geo-value is of type (x, y)
|
||||
"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
self.error('GeoPointField can only accept tuples or lists '
|
||||
'of (x, y)')
|
||||
|
||||
if not len(value) == 2:
|
||||
self.error('Value must be a two-dimensional point')
|
||||
if (not isinstance(value[0], (float, int)) and
|
||||
not isinstance(value[1], (float, int))):
|
||||
self.error('Both values in point must be float or int')
|
||||
|
||||
|
||||
class SequenceField(BaseField):
|
||||
"""Provides a sequental counter see:
|
||||
http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
|
||||
@ -1558,3 +1535,83 @@ class UUIDField(BaseField):
|
||||
value = uuid.UUID(value)
|
||||
except Exception, exc:
|
||||
self.error('Could not convert to UUID: %s' % exc)
|
||||
|
||||
|
||||
class GeoPointField(BaseField):
|
||||
"""A list storing a latitude and longitude.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
"""
|
||||
|
||||
_geo_index = pymongo.GEO2D
|
||||
|
||||
def validate(self, value):
|
||||
"""Make sure that a geo-value is of type (x, y)
|
||||
"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
self.error('GeoPointField can only accept tuples or lists '
|
||||
'of (x, y)')
|
||||
|
||||
if not len(value) == 2:
|
||||
self.error("Value (%s) must be a two-dimensional point" % repr(value))
|
||||
elif (not isinstance(value[0], (float, int)) or
|
||||
not isinstance(value[1], (float, int))):
|
||||
self.error("Both values (%s) in point must be float or int" % repr(value))
|
||||
|
||||
|
||||
class PointField(GeoJsonBaseField):
|
||||
"""A geo json field storing a latitude and longitude.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "Point" ,
|
||||
"coordinates" : [x, y]}
|
||||
|
||||
You can either pass a dict with the full information or a list
|
||||
to set the value.
|
||||
|
||||
Requires mongodb >= 2.4
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
_type = "Point"
|
||||
|
||||
|
||||
class LineStringField(GeoJsonBaseField):
|
||||
"""A geo json field storing a line of latitude and longitude coordinates.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "LineString" ,
|
||||
"coordinates" : [[x1, y1], [x1, y1] ... [xn, yn]]}
|
||||
|
||||
You can either pass a dict with the full information or a list of points.
|
||||
|
||||
Requires mongodb >= 2.4
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
_type = "LineString"
|
||||
|
||||
|
||||
class PolygonField(GeoJsonBaseField):
|
||||
"""A geo json field storing a polygon of latitude and longitude coordinates.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "Polygon" ,
|
||||
"coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]],
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]]}
|
||||
|
||||
You can either pass a dict with the full information or a list
|
||||
of LineStrings. The first LineString being the outside and the rest being
|
||||
holes.
|
||||
|
||||
Requires mongodb >= 2.4
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
_type = "Polygon"
|
||||
|
@ -26,6 +26,7 @@ __all__ = ('QuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL')
|
||||
|
||||
# The maximum number of items to display in a QuerySet.__repr__
|
||||
REPR_OUTPUT_SIZE = 20
|
||||
ITER_CHUNK_SIZE = 100
|
||||
|
||||
# Delete rules
|
||||
DO_NOTHING = 0
|
||||
@ -63,16 +64,18 @@ class QuerySet(object):
|
||||
self._none = False
|
||||
self._as_pymongo = False
|
||||
self._as_pymongo_coerce = False
|
||||
self._result_cache = []
|
||||
self._has_more = True
|
||||
self._len = None
|
||||
|
||||
# If inheritance is allowed, only return instances and instances of
|
||||
# subclasses of the class being used
|
||||
if document._meta.get('allow_inheritance') == True:
|
||||
if document._meta.get('allow_inheritance') is True:
|
||||
self._initial_query = {"_cls": {"$in": self._document._subclasses}}
|
||||
self._loaded_fields = QueryFieldList(always_include=['_cls'])
|
||||
self._cursor_obj = None
|
||||
self._limit = None
|
||||
self._skip = None
|
||||
self._slice = None
|
||||
self._hint = -1 # Using -1 as None is a valid value for hint
|
||||
|
||||
def __call__(self, q_obj=None, class_check=True, slave_okay=False,
|
||||
@ -110,13 +113,60 @@ class QuerySet(object):
|
||||
queryset._class_check = class_check
|
||||
return queryset
|
||||
|
||||
def __len__(self):
|
||||
"""Since __len__ is called quite frequently (for example, as part of
|
||||
list(qs) we populate the result cache and cache the length.
|
||||
"""
|
||||
if self._len is not None:
|
||||
return self._len
|
||||
if self._has_more:
|
||||
# populate the cache
|
||||
list(self._iter_results())
|
||||
|
||||
self._len = len(self._result_cache)
|
||||
return self._len
|
||||
|
||||
def __iter__(self):
|
||||
"""Support iterator protocol"""
|
||||
queryset = self
|
||||
if queryset._iter:
|
||||
queryset = self.clone()
|
||||
queryset.rewind()
|
||||
return queryset
|
||||
"""Iteration utilises a results cache which iterates the cursor
|
||||
in batches of ``ITER_CHUNK_SIZE``.
|
||||
|
||||
If ``self._has_more`` the cursor hasn't been exhausted so cache then
|
||||
batch. Otherwise iterate the result_cache.
|
||||
"""
|
||||
self._iter = True
|
||||
if self._has_more:
|
||||
return self._iter_results()
|
||||
|
||||
# iterating over the cache.
|
||||
return iter(self._result_cache)
|
||||
|
||||
def _iter_results(self):
|
||||
"""A generator for iterating over the result cache.
|
||||
|
||||
Also populates the cache if there are more possible results to yield.
|
||||
Raises StopIteration when there are no more results"""
|
||||
pos = 0
|
||||
while True:
|
||||
upper = len(self._result_cache)
|
||||
while pos < upper:
|
||||
yield self._result_cache[pos]
|
||||
pos = pos + 1
|
||||
if not self._has_more:
|
||||
raise StopIteration
|
||||
if len(self._result_cache) <= pos:
|
||||
self._populate_cache()
|
||||
|
||||
def _populate_cache(self):
|
||||
"""
|
||||
Populates the result cache with ``ITER_CHUNK_SIZE`` more entries
|
||||
(until the cursor is exhausted).
|
||||
"""
|
||||
if self._has_more:
|
||||
try:
|
||||
for i in xrange(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(self.next())
|
||||
except StopIteration:
|
||||
self._has_more = False
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Support skip and limit using getitem and slicing syntax.
|
||||
@ -127,8 +177,10 @@ class QuerySet(object):
|
||||
if isinstance(key, slice):
|
||||
try:
|
||||
queryset._cursor_obj = queryset._cursor[key]
|
||||
queryset._slice = key
|
||||
queryset._skip, queryset._limit = key.start, key.stop
|
||||
queryset._limit
|
||||
if key.start and key.stop:
|
||||
queryset._limit = key.stop - key.start
|
||||
except IndexError, err:
|
||||
# PyMongo raises an error if key.start == key.stop, catch it,
|
||||
# bin it, kill it.
|
||||
@ -156,22 +208,15 @@ class QuerySet(object):
|
||||
|
||||
def __repr__(self):
|
||||
"""Provides the string representation of the QuerySet
|
||||
|
||||
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
||||
"""
|
||||
|
||||
if self._iter:
|
||||
return '.. queryset mid-iteration ..'
|
||||
|
||||
data = []
|
||||
for i in xrange(REPR_OUTPUT_SIZE + 1):
|
||||
try:
|
||||
data.append(self.next())
|
||||
except StopIteration:
|
||||
break
|
||||
self._populate_cache()
|
||||
data = self._result_cache[:REPR_OUTPUT_SIZE + 1]
|
||||
if len(data) > REPR_OUTPUT_SIZE:
|
||||
data[-1] = "...(remaining elements truncated)..."
|
||||
|
||||
self.rewind()
|
||||
return repr(data)
|
||||
|
||||
# Core functions
|
||||
@ -200,7 +245,7 @@ class QuerySet(object):
|
||||
result = queryset.next()
|
||||
except StopIteration:
|
||||
msg = ("%s matching query does not exist."
|
||||
% queryset._document._class_name)
|
||||
% queryset._document._class_name)
|
||||
raise queryset._document.DoesNotExist(msg)
|
||||
try:
|
||||
queryset.next()
|
||||
@ -351,7 +396,12 @@ class QuerySet(object):
|
||||
"""
|
||||
if self._limit == 0:
|
||||
return 0
|
||||
return self._cursor.count(with_limit_and_skip=with_limit_and_skip)
|
||||
if with_limit_and_skip and self._len is not None:
|
||||
return self._len
|
||||
count = self._cursor.count(with_limit_and_skip=with_limit_and_skip)
|
||||
if with_limit_and_skip:
|
||||
self._len = count
|
||||
return count
|
||||
|
||||
def delete(self, write_concern=None):
|
||||
"""Delete the documents matched by the query.
|
||||
@ -426,7 +476,7 @@ class QuerySet(object):
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
if not update:
|
||||
if not update and not upsert:
|
||||
raise OperationError("No update parameters, would remove data")
|
||||
|
||||
if not write_concern:
|
||||
@ -469,7 +519,8 @@ class QuerySet(object):
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
return self.update(upsert=upsert, multi=False, write_concern=None, **update)
|
||||
return self.update(
|
||||
upsert=upsert, multi=False, write_concern=write_concern, **update)
|
||||
|
||||
def with_id(self, object_id):
|
||||
"""Retrieve the object matching the id provided. Uses `object_id` only
|
||||
@ -518,6 +569,15 @@ class QuerySet(object):
|
||||
queryset._none = True
|
||||
return queryset
|
||||
|
||||
def no_sub_classes(self):
|
||||
"""
|
||||
Only return instances of this document and not any inherited documents
|
||||
"""
|
||||
if self._document._meta.get('allow_inheritance') is True:
|
||||
self._initial_query = {"_cls": self._document._class_name}
|
||||
|
||||
return self
|
||||
|
||||
def clone(self):
|
||||
"""Creates a copy of the current
|
||||
:class:`~mongoengine.queryset.QuerySet`
|
||||
@ -536,20 +596,15 @@ class QuerySet(object):
|
||||
val = getattr(self, prop)
|
||||
setattr(c, prop, copy.copy(val))
|
||||
|
||||
if self._slice:
|
||||
c._slice = self._slice
|
||||
|
||||
if self._cursor_obj:
|
||||
c._cursor_obj = self._cursor_obj.clone()
|
||||
|
||||
if self._slice:
|
||||
c._cursor[self._slice]
|
||||
|
||||
return c
|
||||
|
||||
def select_related(self, max_depth=1):
|
||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
||||
a maximum depth in order to cut down the number queries to mongodb.
|
||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects or
|
||||
:class:`~bson.object_id.ObjectId` a maximum depth in order to cut down
|
||||
the number queries to mongodb.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
@ -570,7 +625,6 @@ class QuerySet(object):
|
||||
else:
|
||||
queryset._cursor.limit(n)
|
||||
queryset._limit = n
|
||||
|
||||
# Return self to allow chaining
|
||||
return queryset
|
||||
|
||||
@ -608,6 +662,9 @@ class QuerySet(object):
|
||||
|
||||
:param field: the field to select distinct values from
|
||||
|
||||
.. note:: This is a command and won't take ordering or limit into
|
||||
account.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 - Fixed handling references
|
||||
.. versionchanged:: 0.6 - Improved db_field refrence handling
|
||||
@ -819,8 +876,7 @@ class QuerySet(object):
|
||||
|
||||
def to_json(self):
|
||||
"""Converts a queryset to JSON"""
|
||||
queryset = self.clone()
|
||||
return json_util.dumps(queryset._collection_obj.find(queryset._query))
|
||||
return json_util.dumps(self.as_pymongo())
|
||||
|
||||
def from_json(self, json_data):
|
||||
"""Converts json data to unsaved objects"""
|
||||
@ -912,7 +968,7 @@ class QuerySet(object):
|
||||
mr_args['out'] = output
|
||||
|
||||
results = getattr(queryset._collection, map_reduce_function)(
|
||||
map_f, reduce_f, **mr_args)
|
||||
map_f, reduce_f, **mr_args)
|
||||
|
||||
if map_reduce_function == 'map_reduce':
|
||||
results = results.find()
|
||||
@ -1049,7 +1105,7 @@ class QuerySet(object):
|
||||
""")
|
||||
|
||||
for result in self.map_reduce(map_func, reduce_func,
|
||||
finalize_f=finalize_func, output='inline'):
|
||||
finalize_f=finalize_func, output='inline'):
|
||||
return result.value
|
||||
else:
|
||||
return 0
|
||||
@ -1062,11 +1118,11 @@ class QuerySet(object):
|
||||
.. note::
|
||||
|
||||
Can only do direct simple mappings and cannot map across
|
||||
:class:`~mongoengine.ReferenceField` or
|
||||
:class:`~mongoengine.GenericReferenceField` for more complex
|
||||
:class:`~mongoengine.fields.ReferenceField` or
|
||||
:class:`~mongoengine.fields.GenericReferenceField` for more complex
|
||||
counting a manual map reduce call would is required.
|
||||
|
||||
If the field is a :class:`~mongoengine.ListField`, the items within
|
||||
If the field is a :class:`~mongoengine.fields.ListField`, the items within
|
||||
each list will be counted individually.
|
||||
|
||||
:param field: the field to use
|
||||
@ -1086,20 +1142,18 @@ class QuerySet(object):
|
||||
def next(self):
|
||||
"""Wrap the result in a :class:`~mongoengine.Document` object.
|
||||
"""
|
||||
self._iter = True
|
||||
try:
|
||||
if self._limit == 0 or self._none:
|
||||
raise StopIteration
|
||||
if self._scalar:
|
||||
return self._get_scalar(self._document._from_son(
|
||||
self._cursor.next()))
|
||||
if self._as_pymongo:
|
||||
return self._get_as_pymongo(self._cursor.next())
|
||||
if self._limit == 0 or self._none:
|
||||
raise StopIteration
|
||||
|
||||
return self._document._from_son(self._cursor.next())
|
||||
except StopIteration, e:
|
||||
self.rewind()
|
||||
raise e
|
||||
raw_doc = self._cursor.next()
|
||||
if self._as_pymongo:
|
||||
return self._get_as_pymongo(raw_doc)
|
||||
|
||||
doc = self._document._from_son(raw_doc)
|
||||
if self._scalar:
|
||||
return self._get_scalar(doc)
|
||||
|
||||
return doc
|
||||
|
||||
def rewind(self):
|
||||
"""Rewind the cursor to its unevaluated state.
|
||||
@ -1152,7 +1206,7 @@ class QuerySet(object):
|
||||
self._cursor_obj.sort(order)
|
||||
|
||||
if self._limit is not None:
|
||||
self._cursor_obj.limit(self._limit - (self._skip or 0))
|
||||
self._cursor_obj.limit(self._limit)
|
||||
|
||||
if self._skip is not None:
|
||||
self._cursor_obj.skip(self._skip)
|
||||
@ -1367,7 +1421,15 @@ class QuerySet(object):
|
||||
new_data = {}
|
||||
for key, value in data.iteritems():
|
||||
new_path = '%s.%s' % (path, key) if path else key
|
||||
if all_fields or new_path in self.__as_pymongo_fields:
|
||||
|
||||
if all_fields:
|
||||
include_field = True
|
||||
elif self._loaded_fields.value == QueryFieldList.ONLY:
|
||||
include_field = new_path in self.__as_pymongo_fields
|
||||
else:
|
||||
include_field = new_path not in self.__as_pymongo_fields
|
||||
|
||||
if include_field:
|
||||
new_data[key] = clean(value, path=new_path)
|
||||
data = new_data
|
||||
elif isinstance(data, list):
|
||||
@ -1412,15 +1474,14 @@ class QuerySet(object):
|
||||
|
||||
code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code)
|
||||
code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub,
|
||||
code)
|
||||
code)
|
||||
return code
|
||||
|
||||
# Deprecated
|
||||
|
||||
def ensure_index(self, **kwargs):
|
||||
"""Deprecated use :func:`~Document.ensure_index`"""
|
||||
msg = ("Doc.objects()._ensure_index() is deprecated. "
|
||||
"Use Doc.ensure_index() instead.")
|
||||
"Use Doc.ensure_index() instead.")
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self._document.__class__.ensure_index(**kwargs)
|
||||
return self
|
||||
@ -1428,6 +1489,6 @@ class QuerySet(object):
|
||||
def _ensure_indexes(self):
|
||||
"""Deprecated use :func:`~Document.ensure_indexes`"""
|
||||
msg = ("Doc.objects()._ensure_indexes() is deprecated. "
|
||||
"Use Doc.ensure_indexes() instead.")
|
||||
"Use Doc.ensure_indexes() instead.")
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self._document.__class__.ensure_indexes()
|
||||
|
@ -1,5 +1,6 @@
|
||||
from collections import defaultdict
|
||||
|
||||
import pymongo
|
||||
from bson import SON
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
@ -12,7 +13,9 @@ COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||
'all', 'size', 'exists', 'not')
|
||||
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||
'within_box', 'within_polygon', 'near', 'near_sphere',
|
||||
'max_distance')
|
||||
'max_distance', 'geo_within', 'geo_within_box',
|
||||
'geo_within_polygon', 'geo_within_center',
|
||||
'geo_within_sphere', 'geo_intersects')
|
||||
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
||||
'istartswith', 'endswith', 'iendswith',
|
||||
'exact', 'iexact')
|
||||
@ -21,7 +24,8 @@ MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
||||
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||
|
||||
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set')
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||
'set_on_insert')
|
||||
|
||||
|
||||
def query(_doc_cls=None, _field_operation=False, **query):
|
||||
@ -81,30 +85,14 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
value = field
|
||||
else:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op in ('in', 'nin', 'all', 'near'):
|
||||
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
|
||||
# 'in', 'nin' and 'all' require a list of values
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
|
||||
# if op and op not in COMPARISON_OPERATORS:
|
||||
if op:
|
||||
if op in GEO_OPERATORS:
|
||||
if op == "within_distance":
|
||||
value = {'$within': {'$center': value}}
|
||||
elif op == "within_spherical_distance":
|
||||
value = {'$within': {'$centerSphere': value}}
|
||||
elif op == "within_polygon":
|
||||
value = {'$within': {'$polygon': value}}
|
||||
elif op == "near":
|
||||
value = {'$near': value}
|
||||
elif op == "near_sphere":
|
||||
value = {'$nearSphere': value}
|
||||
elif op == 'within_box':
|
||||
value = {'$within': {'$box': value}}
|
||||
elif op == "max_distance":
|
||||
value = {'$maxDistance': value}
|
||||
else:
|
||||
raise NotImplementedError("Geo method '%s' has not "
|
||||
"been implemented" % op)
|
||||
value = _geo_operator(field, op, value)
|
||||
elif op in CUSTOM_OPERATORS:
|
||||
if op == 'match':
|
||||
value = {"$elemMatch": value}
|
||||
@ -176,7 +164,9 @@ def update(_doc_cls=None, **update):
|
||||
if value > 0:
|
||||
value = -value
|
||||
elif op == 'add_to_set':
|
||||
op = op.replace('_to_set', 'ToSet')
|
||||
op = 'addToSet'
|
||||
elif op == 'set_on_insert':
|
||||
op = "setOnInsert"
|
||||
|
||||
match = None
|
||||
if parts[-1] in COMPARISON_OPERATORS:
|
||||
@ -250,3 +240,76 @@ def update(_doc_cls=None, **update):
|
||||
mongo_update[key].update(value)
|
||||
|
||||
return mongo_update
|
||||
|
||||
|
||||
def _geo_operator(field, op, value):
|
||||
"""Helper to return the query for a given geo query"""
|
||||
if field._geo_index == pymongo.GEO2D:
|
||||
if op == "within_distance":
|
||||
value = {'$within': {'$center': value}}
|
||||
elif op == "within_spherical_distance":
|
||||
value = {'$within': {'$centerSphere': value}}
|
||||
elif op == "within_polygon":
|
||||
value = {'$within': {'$polygon': value}}
|
||||
elif op == "near":
|
||||
value = {'$near': value}
|
||||
elif op == "near_sphere":
|
||||
value = {'$nearSphere': value}
|
||||
elif op == 'within_box':
|
||||
value = {'$within': {'$box': value}}
|
||||
elif op == "max_distance":
|
||||
value = {'$maxDistance': value}
|
||||
else:
|
||||
raise NotImplementedError("Geo method '%s' has not "
|
||||
"been implemented for a GeoPointField" % op)
|
||||
else:
|
||||
if op == "geo_within":
|
||||
value = {"$geoWithin": _infer_geometry(value)}
|
||||
elif op == "geo_within_box":
|
||||
value = {"$geoWithin": {"$box": value}}
|
||||
elif op == "geo_within_polygon":
|
||||
value = {"$geoWithin": {"$polygon": value}}
|
||||
elif op == "geo_within_center":
|
||||
value = {"$geoWithin": {"$center": value}}
|
||||
elif op == "geo_within_sphere":
|
||||
value = {"$geoWithin": {"$centerSphere": value}}
|
||||
elif op == "geo_intersects":
|
||||
value = {"$geoIntersects": _infer_geometry(value)}
|
||||
elif op == "near":
|
||||
value = {'$near': _infer_geometry(value)}
|
||||
elif op == "max_distance":
|
||||
value = {'$maxDistance': value}
|
||||
else:
|
||||
raise NotImplementedError("Geo method '%s' has not "
|
||||
"been implemented for a %s " % (op, field._name))
|
||||
return value
|
||||
|
||||
|
||||
def _infer_geometry(value):
|
||||
"""Helper method that tries to infer the $geometry shape for a given value"""
|
||||
if isinstance(value, dict):
|
||||
if "$geometry" in value:
|
||||
return value
|
||||
elif 'coordinates' in value and 'type' in value:
|
||||
return {"$geometry": value}
|
||||
raise InvalidQueryError("Invalid $geometry dictionary should have "
|
||||
"type and coordinates keys")
|
||||
elif isinstance(value, (list, set)):
|
||||
try:
|
||||
value[0][0][0]
|
||||
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
value[0][0]
|
||||
return {"$geometry": {"type": "LineString", "coordinates": value}}
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
value[0]
|
||||
return {"$geometry": {"type": "Point", "coordinates": value}}
|
||||
except:
|
||||
pass
|
||||
|
||||
raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary "
|
||||
"or (nested) lists of coordinate(s)")
|
||||
|
@ -5,7 +5,7 @@
|
||||
%define srcname mongoengine
|
||||
|
||||
Name: python-%{srcname}
|
||||
Version: 0.7.10
|
||||
Version: 0.8.0.RC4
|
||||
Release: 1%{?dist}
|
||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||
|
||||
|
6
setup.py
6
setup.py
@ -51,13 +51,13 @@ CLASSIFIERS = [
|
||||
extra_opts = {}
|
||||
if sys.version_info[0] == 3:
|
||||
extra_opts['use_2to3'] = True
|
||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker']
|
||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2']
|
||||
extra_opts['packages'] = find_packages(exclude=('tests',))
|
||||
if "test" in sys.argv or "nosetests" in sys.argv:
|
||||
extra_opts['packages'].append("tests")
|
||||
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
|
||||
else:
|
||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL']
|
||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2']
|
||||
extra_opts['packages'] = find_packages(exclude=('tests',))
|
||||
|
||||
setup(name='mongoengine',
|
||||
@ -74,7 +74,7 @@ setup(name='mongoengine',
|
||||
long_description=LONG_DESCRIPTION,
|
||||
platforms=['any'],
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=['pymongo'],
|
||||
install_requires=['pymongo>=2.5'],
|
||||
test_suite='nose.collector',
|
||||
**extra_opts
|
||||
)
|
||||
|
@ -1,12 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from mongoengine.queryset import NULLIFY
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("ClassMethodsTest", )
|
||||
@ -86,6 +85,25 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
self.assertEqual(self.Person._meta['delete_rules'],
|
||||
{(Job, 'employee'): NULLIFY})
|
||||
|
||||
def test_register_delete_rule_inherited(self):
|
||||
|
||||
class Vaccine(Document):
|
||||
name = StringField(required=True)
|
||||
|
||||
meta = {"indexes": ["name"]}
|
||||
|
||||
class Animal(Document):
|
||||
family = StringField(required=True)
|
||||
vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL))
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["family"]}
|
||||
|
||||
class Cat(Animal):
|
||||
name = StringField(required=True)
|
||||
|
||||
self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL)
|
||||
self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL)
|
||||
|
||||
def test_collection_naming(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
"""
|
||||
|
@ -31,8 +31,9 @@ class DynamicTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James",
|
||||
"age": 34})
|
||||
|
||||
self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"])
|
||||
p.save()
|
||||
self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"])
|
||||
|
||||
self.assertEqual(self.Person.objects.first().age, 34)
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import with_statement
|
||||
import unittest
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
@ -217,7 +216,7 @@ class IndexesTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
self.assertEqual([{'fields': [('location.point', '2d')]}],
|
||||
Place._meta['index_specs'])
|
||||
Place._meta['index_specs'])
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
@ -231,8 +230,7 @@ class IndexesTest(unittest.TestCase):
|
||||
location = DictField()
|
||||
|
||||
class Place(Document):
|
||||
current = DictField(
|
||||
field=EmbeddedDocumentField('EmbeddedLocation'))
|
||||
current = DictField(field=EmbeddedDocumentField('EmbeddedLocation'))
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': [
|
||||
@ -241,7 +239,7 @@ class IndexesTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
self.assertEqual([{'fields': [('current.location.point', '2d')]}],
|
||||
Place._meta['index_specs'])
|
||||
Place._meta['index_specs'])
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
@ -264,7 +262,7 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual([{'fields': [('addDate', -1)], 'unique': True,
|
||||
'sparse': True}],
|
||||
BlogPost._meta['index_specs'])
|
||||
BlogPost._meta['index_specs'])
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@ -382,8 +380,7 @@ class IndexesTest(unittest.TestCase):
|
||||
self.assertEqual(sorted(info.keys()), ['_id_', 'tags.tag_1'])
|
||||
|
||||
post1 = BlogPost(title="Embedded Indexes tests in place",
|
||||
tags=[Tag(name="about"), Tag(name="time")]
|
||||
)
|
||||
tags=[Tag(name="about"), Tag(name="time")])
|
||||
post1.save()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@ -400,29 +397,6 @@ class IndexesTest(unittest.TestCase):
|
||||
info = RecursiveDocument._get_collection().index_information()
|
||||
self.assertEqual(sorted(info.keys()), ['_cls_1', '_id_'])
|
||||
|
||||
def test_geo_indexes_recursion(self):
|
||||
|
||||
class Location(Document):
|
||||
name = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
class Parent(Document):
|
||||
name = StringField()
|
||||
location = ReferenceField(Location, dbref=False)
|
||||
|
||||
Location.drop_collection()
|
||||
Parent.drop_collection()
|
||||
|
||||
list(Parent.objects)
|
||||
|
||||
collection = Parent._get_collection()
|
||||
info = collection.index_information()
|
||||
|
||||
self.assertFalse('location_2d' in info)
|
||||
|
||||
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||
self.assertEqual(len(Location._geo_indices()), 1)
|
||||
|
||||
def test_covered_index(self):
|
||||
"""Ensure that covered indexes can be used
|
||||
"""
|
||||
@ -433,7 +407,7 @@ class IndexesTest(unittest.TestCase):
|
||||
meta = {
|
||||
'indexes': ['a'],
|
||||
'allow_inheritance': False
|
||||
}
|
||||
}
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
@ -633,7 +607,7 @@ class IndexesTest(unittest.TestCase):
|
||||
list(Log.objects)
|
||||
info = Log.objects._collection.index_information()
|
||||
self.assertEqual(3600,
|
||||
info['created_1']['expireAfterSeconds'])
|
||||
info['created_1']['expireAfterSeconds'])
|
||||
|
||||
def test_unique_and_indexes(self):
|
||||
"""Ensure that 'unique' constraints aren't overridden by
|
||||
|
@ -143,7 +143,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish',
|
||||
'Animal.Fish.Pike'))
|
||||
'Animal.Fish.Pike'))
|
||||
|
||||
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike'))
|
||||
@ -168,6 +168,26 @@ class InheritanceTest(unittest.TestCase):
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
|
||||
def test_inheritance_to_mongo_keys(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||
['_cls', 'name', 'age'])
|
||||
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||
['_cls', 'name', 'age', 'salary'])
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
|
||||
def test_polymorphic_queries(self):
|
||||
"""Ensure that the correct subclasses are returned from a query
|
||||
@ -197,7 +217,6 @@ class InheritanceTest(unittest.TestCase):
|
||||
classes = [obj.__class__ for obj in Human.objects]
|
||||
self.assertEqual(classes, [Human])
|
||||
|
||||
|
||||
def test_allow_inheritance(self):
|
||||
"""Ensure that inheritance may be disabled on simple classes and that
|
||||
_cls and _subclasses will not be used.
|
||||
@ -213,8 +232,8 @@ class InheritanceTest(unittest.TestCase):
|
||||
self.assertRaises(ValueError, create_dog_class)
|
||||
|
||||
# Check that _cls etc aren't present on simple documents
|
||||
dog = Animal(name='dog')
|
||||
dog.save()
|
||||
dog = Animal(name='dog').save()
|
||||
self.assertEqual(dog.to_mongo().keys(), ['_id', 'name'])
|
||||
|
||||
collection = self.db[Animal._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
|
@ -1,5 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
@ -320,8 +319,8 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
Location.drop_collection()
|
||||
|
||||
self.assertEquals(Area, get_document("Area"))
|
||||
self.assertEquals(Area, get_document("Location.Area"))
|
||||
self.assertEqual(Area, get_document("Area"))
|
||||
self.assertEqual(Area, get_document("Location.Area"))
|
||||
|
||||
def test_creation(self):
|
||||
"""Ensure that document may be created using keyword arguments.
|
||||
@ -428,6 +427,21 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertFalse('age' in person)
|
||||
self.assertFalse('nationality' in person)
|
||||
|
||||
def test_embedded_document_to_mongo(self):
|
||||
class Person(EmbeddedDocument):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||
['_cls', 'name', 'age'])
|
||||
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||
['_cls', 'name', 'age', 'salary'])
|
||||
|
||||
def test_embedded_document(self):
|
||||
"""Ensure that embedded documents are set up correctly.
|
||||
"""
|
||||
@ -494,12 +508,12 @@ class InstanceTest(unittest.TestCase):
|
||||
t = TestDocument(status="published")
|
||||
t.save(clean=False)
|
||||
|
||||
self.assertEquals(t.pub_date, None)
|
||||
self.assertEqual(t.pub_date, None)
|
||||
|
||||
t = TestDocument(status="published")
|
||||
t.save(clean=True)
|
||||
|
||||
self.assertEquals(type(t.pub_date), datetime)
|
||||
self.assertEqual(type(t.pub_date), datetime)
|
||||
|
||||
def test_document_embedded_clean(self):
|
||||
class TestEmbeddedDocument(EmbeddedDocument):
|
||||
@ -531,7 +545,7 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}})
|
||||
|
||||
t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save()
|
||||
self.assertEquals(t.doc.z, 35)
|
||||
self.assertEqual(t.doc.z, 35)
|
||||
|
||||
# Asserts not raises
|
||||
t = TestDocument(doc=TestEmbeddedDocument(x=15, y=35, z=5))
|
||||
@ -838,6 +852,14 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(person.name, None)
|
||||
self.assertEqual(person.age, None)
|
||||
|
||||
def test_inserts_if_you_set_the_pk(self):
|
||||
p1 = self.Person(name='p1', id=bson.ObjectId()).save()
|
||||
p2 = self.Person(name='p2')
|
||||
p2.id = bson.ObjectId()
|
||||
p2.save()
|
||||
|
||||
self.assertEqual(2, self.Person.objects.count())
|
||||
|
||||
def test_can_save_if_not_included(self):
|
||||
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
@ -1881,11 +1903,11 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
A.objects.all()
|
||||
|
||||
self.assertEquals('testdb-2', B._meta.get('db_alias'))
|
||||
self.assertEquals('mongoenginetest',
|
||||
A._get_collection().database.name)
|
||||
self.assertEquals('mongoenginetest2',
|
||||
B._get_collection().database.name)
|
||||
self.assertEqual('testdb-2', B._meta.get('db_alias'))
|
||||
self.assertEqual('mongoenginetest',
|
||||
A._get_collection().database.name)
|
||||
self.assertEqual('mongoenginetest2',
|
||||
B._get_collection().database.name)
|
||||
|
||||
def test_db_alias_propagates(self):
|
||||
"""db_alias propagates?
|
||||
|
@ -1,2 +1,3 @@
|
||||
from fields import *
|
||||
from file_tests import *
|
||||
from geo import *
|
@ -1,5 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
@ -409,6 +408,27 @@ class FieldTest(unittest.TestCase):
|
||||
log.time = '1pm'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
|
||||
def test_datetime_tz_aware_mark_as_changed(self):
|
||||
from mongoengine import connection
|
||||
|
||||
# Reset the connections
|
||||
connection._connection_settings = {}
|
||||
connection._connections = {}
|
||||
connection._dbs = {}
|
||||
|
||||
connect(db='mongoenginetest', tz_aware=True)
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
LogEntry(time=datetime.datetime(2013, 1, 1, 0, 0, 0)).save()
|
||||
|
||||
log = LogEntry.objects.first()
|
||||
log.time = datetime.datetime(2013, 1, 1, 0, 0, 0)
|
||||
self.assertEqual(['time'], log._changed_fields)
|
||||
|
||||
def test_datetime(self):
|
||||
"""Tests showing pymongo datetime fields handling of microseconds.
|
||||
Microseconds are rounded to the nearest millisecond and pre UTC
|
||||
@ -1841,45 +1861,6 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
def test_geo_indexes(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
Event.drop_collection()
|
||||
event = Event(title="Coltrane Motion @ Double Door",
|
||||
location=[41.909889, -87.677137])
|
||||
event.save()
|
||||
|
||||
info = Event.objects._collection.index_information()
|
||||
self.assertTrue(u'location_2d' in info)
|
||||
self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')])
|
||||
|
||||
Event.drop_collection()
|
||||
|
||||
def test_geo_embedded_indexes(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields on
|
||||
embedded documents.
|
||||
"""
|
||||
class Venue(EmbeddedDocument):
|
||||
location = GeoPointField()
|
||||
name = StringField()
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
Event.drop_collection()
|
||||
venue = Venue(name="Double Door", location=[41.909889, -87.677137])
|
||||
event = Event(title="Coltrane Motion", venue=venue)
|
||||
event.save()
|
||||
|
||||
info = Event.objects._collection.index_information()
|
||||
self.assertTrue(u'location_2d' in info)
|
||||
self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')])
|
||||
|
||||
def test_ensure_unique_default_instances(self):
|
||||
"""Ensure that every field has it's own unique default instance."""
|
||||
class D(Document):
|
||||
|
@ -1,5 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
|
274
tests/fields/geo.py
Normal file
274
tests/fields/geo.py
Normal file
@ -0,0 +1,274 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("GeoFieldTest", )
|
||||
|
||||
|
||||
class GeoFieldTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
def _test_for_expected_error(self, Cls, loc, expected):
|
||||
try:
|
||||
Cls(loc=loc).validate()
|
||||
self.fail()
|
||||
except ValidationError, e:
|
||||
self.assertEqual(expected, e.to_dict()['loc'])
|
||||
|
||||
def test_geopoint_validation(self):
|
||||
class Location(Document):
|
||||
loc = GeoPointField()
|
||||
|
||||
invalid_coords = [{"x": 1, "y": 2}, 5, "a"]
|
||||
expected = 'GeoPointField can only accept tuples or lists of (x, y)'
|
||||
|
||||
for coord in invalid_coords:
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
invalid_coords = [[], [1], [1, 2, 3]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Value (%s) must be a two-dimensional point" % repr(coord)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
invalid_coords = [[{}, {}], ("a", "b")]
|
||||
for coord in invalid_coords:
|
||||
expected = "Both values (%s) in point must be float or int" % repr(coord)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
def test_point_validation(self):
|
||||
class Location(Document):
|
||||
loc = PointField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": []}
|
||||
expected = 'PointField type must be "Point"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "Point", "coordinates": [1, 2, 3]}
|
||||
expected = "Value ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [5, "a"]
|
||||
expected = "PointField can only accept lists of [x, y]"
|
||||
for coord in invalid_coords:
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
invalid_coords = [[], [1], [1, 2, 3]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Value (%s) must be a two-dimensional point" % repr(coord)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
invalid_coords = [[{}, {}], ("a", "b")]
|
||||
for coord in invalid_coords:
|
||||
expected = "Both values (%s) in point must be float or int" % repr(coord)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[1, 2]).validate()
|
||||
|
||||
def test_linestring_validation(self):
|
||||
class Location(Document):
|
||||
loc = LineStringField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
expected = 'LineStringField type must be "LineString"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]}
|
||||
expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [5, "a"]
|
||||
expected = "Invalid LineString must contain at least one valid point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[1]]
|
||||
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[1, 2, 3]]
|
||||
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[{}, {}]], [("a", "b")]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate()
|
||||
|
||||
def test_polygon_validation(self):
|
||||
class Location(Document):
|
||||
loc = PolygonField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
expected = 'PolygonField type must be "Polygon"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "Polygon", "coordinates": [[[1, 2, 3]]]}
|
||||
expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[5, "a"]]]
|
||||
expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[]]]
|
||||
expected = "Invalid Polygon must contain at least one valid linestring"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1, 2, 3]]]
|
||||
expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[{}, {}]], [("a", "b")]]
|
||||
expected = "Invalid Polygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1, 2], [3, 4]]]
|
||||
expected = "Invalid Polygon:\nLineStrings must start and end at the same point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
|
||||
|
||||
def test_indexes_geopoint(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}])
|
||||
|
||||
def test_geopoint_embedded_indexes(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields on
|
||||
embedded documents.
|
||||
"""
|
||||
class Venue(EmbeddedDocument):
|
||||
location = GeoPointField()
|
||||
name = StringField()
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}])
|
||||
|
||||
def test_indexes_2dsphere(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
point = PointField()
|
||||
line = LineStringField()
|
||||
polygon = PolygonField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies)
|
||||
|
||||
def test_indexes_2dsphere_embedded(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
class Venue(EmbeddedDocument):
|
||||
name = StringField()
|
||||
point = PointField()
|
||||
line = LineStringField()
|
||||
polygon = PolygonField()
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies)
|
||||
|
||||
def test_geo_indexes_recursion(self):
|
||||
|
||||
class Location(Document):
|
||||
name = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
class Parent(Document):
|
||||
name = StringField()
|
||||
location = ReferenceField(Location)
|
||||
|
||||
Location.drop_collection()
|
||||
Parent.drop_collection()
|
||||
|
||||
list(Parent.objects)
|
||||
|
||||
collection = Parent._get_collection()
|
||||
info = collection.index_information()
|
||||
|
||||
self.assertFalse('location_2d' in info)
|
||||
|
||||
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||
self.assertEqual(len(Location._geo_indices()), 1)
|
||||
|
||||
def test_geo_indexes_auto_index(self):
|
||||
|
||||
# Test just listing the fields
|
||||
class Log(Document):
|
||||
location = PointField(auto_index=False)
|
||||
datetime = DateTimeField()
|
||||
|
||||
meta = {
|
||||
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
|
||||
}
|
||||
|
||||
self.assertEqual([], Log._geo_indices())
|
||||
|
||||
Log.drop_collection()
|
||||
Log.ensure_indexes()
|
||||
|
||||
info = Log._get_collection().index_information()
|
||||
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
|
||||
[('location', '2dsphere'), ('datetime', 1)])
|
||||
|
||||
# Test listing explicitly
|
||||
class Log(Document):
|
||||
location = PointField(auto_index=False)
|
||||
datetime = DateTimeField()
|
||||
|
||||
meta = {
|
||||
'indexes': [
|
||||
{'fields': [("location", "2dsphere"), ("datetime", 1)]}
|
||||
]
|
||||
}
|
||||
|
||||
self.assertEqual([], Log._geo_indices())
|
||||
|
||||
Log.drop_collection()
|
||||
Log.ensure_indexes()
|
||||
|
||||
info = Log._get_collection().index_information()
|
||||
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
|
||||
[('location', '2dsphere'), ('datetime', 1)])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -1,5 +1,5 @@
|
||||
|
||||
from transform import *
|
||||
from field_list import *
|
||||
from queryset import *
|
||||
from visitor import *
|
||||
from geo import *
|
||||
|
418
tests/queryset/geo.py
Normal file
418
tests/queryset/geo.py
Normal file
@ -0,0 +1,418 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
from datetime import datetime, timedelta
|
||||
from mongoengine import *
|
||||
|
||||
__all__ = ("GeoQueriesTest",)
|
||||
|
||||
|
||||
class GeoQueriesTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
def test_geospatial_operators(self):
|
||||
"""Ensure that geospatial queries are working.
|
||||
"""
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
date = DateTimeField()
|
||||
location = GeoPointField()
|
||||
|
||||
def __unicode__(self):
|
||||
return self.title
|
||||
|
||||
Event.drop_collection()
|
||||
|
||||
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||
date=datetime.now() - timedelta(days=1),
|
||||
location=[-87.677137, 41.909889]).save()
|
||||
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||
date=datetime.now() - timedelta(days=10),
|
||||
location=[-122.4194155, 37.7749295]).save()
|
||||
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||
date=datetime.now(),
|
||||
location=[-87.686638, 41.900474]).save()
|
||||
|
||||
# find all events "near" pitchfork office, chicago.
|
||||
# note that "near" will show the san francisco event, too,
|
||||
# although it sorts to last.
|
||||
events = Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event1, event3, event2])
|
||||
|
||||
# find events within 5 degrees of pitchfork office, chicago
|
||||
point_and_distance = [[-87.67892, 41.9120459], 5]
|
||||
events = Event.objects(location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = list(events)
|
||||
self.assertTrue(event2 not in events)
|
||||
self.assertTrue(event1 in events)
|
||||
self.assertTrue(event3 in events)
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event3, event1, event2])
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = Event.objects(location__near=point, location__max_distance=10)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point_and_distance = [[-122.415579, 37.7566023], 10]
|
||||
events = Event.objects(location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# find events within 1 degree of greenpoint, broolyn, nyc, ny
|
||||
point_and_distance = [[-73.9509714, 40.7237134], 1]
|
||||
events = Event.objects(location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 0)
|
||||
|
||||
# ensure ordering is respected by "within_distance"
|
||||
point_and_distance = [[-87.67892, 41.9120459], 10]
|
||||
events = Event.objects(location__within_distance=point_and_distance)
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 2)
|
||||
self.assertEqual(events[0], event3)
|
||||
|
||||
# check that within_box works
|
||||
box = [(-125.0, 35.0), (-100.0, 40.0)]
|
||||
events = Event.objects(location__within_box=box)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event2.id)
|
||||
|
||||
polygon = [
|
||||
(-87.694445, 41.912114),
|
||||
(-87.69084, 41.919395),
|
||||
(-87.681742, 41.927186),
|
||||
(-87.654276, 41.911731),
|
||||
(-87.656164, 41.898061),
|
||||
]
|
||||
events = Event.objects(location__within_polygon=polygon)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event1.id)
|
||||
|
||||
polygon2 = [
|
||||
(-1.742249, 54.033586),
|
||||
(-1.225891, 52.792797),
|
||||
(-4.40094, 53.389881)
|
||||
]
|
||||
events = Event.objects(location__within_polygon=polygon2)
|
||||
self.assertEqual(events.count(), 0)
|
||||
|
||||
def test_geo_spatial_embedded(self):
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
location = GeoPointField()
|
||||
name = StringField()
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
Event.drop_collection()
|
||||
|
||||
venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889])
|
||||
venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295])
|
||||
|
||||
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||
venue=venue1).save()
|
||||
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||
venue=venue2).save()
|
||||
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||
venue=venue1).save()
|
||||
|
||||
# find all events "near" pitchfork office, chicago.
|
||||
# note that "near" will show the san francisco event, too,
|
||||
# although it sorts to last.
|
||||
events = Event.objects(venue__location__near=[-87.67892, 41.9120459])
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event1, event3, event2])
|
||||
|
||||
def test_spherical_geospatial_operators(self):
|
||||
"""Ensure that spherical geospatial queries are working
|
||||
"""
|
||||
class Point(Document):
|
||||
location = GeoPointField()
|
||||
|
||||
Point.drop_collection()
|
||||
|
||||
# These points are one degree apart, which (according to Google Maps)
|
||||
# is about 110 km apart at this place on the Earth.
|
||||
north_point = Point(location=[-122, 38]).save() # Near Concord, CA
|
||||
south_point = Point(location=[-122, 37]).save() # Near Santa Cruz, CA
|
||||
|
||||
earth_radius = 6378.009 # in km (needs to be a float for dividing by)
|
||||
|
||||
# Finds both points because they are within 60 km of the reference
|
||||
# point equidistant between them.
|
||||
points = Point.objects(location__near_sphere=[-122, 37.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
# Same behavior for _within_spherical_distance
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[[-122, 37.5], 60/earth_radius]
|
||||
)
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
points = Point.objects(location__near_sphere=[-122, 37.5],
|
||||
location__max_distance=60 / earth_radius)
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
# Finds both points, but orders the north point first because it's
|
||||
# closer to the reference point to the north.
|
||||
points = Point.objects(location__near_sphere=[-122, 38.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
self.assertEqual(points[0].id, north_point.id)
|
||||
self.assertEqual(points[1].id, south_point.id)
|
||||
|
||||
# Finds both points, but orders the south point first because it's
|
||||
# closer to the reference point to the south.
|
||||
points = Point.objects(location__near_sphere=[-122, 36.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
self.assertEqual(points[0].id, south_point.id)
|
||||
self.assertEqual(points[1].id, north_point.id)
|
||||
|
||||
# Finds only one point because only the first point is within 60km of
|
||||
# the reference point to the south.
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[[-122, 36.5], 60/earth_radius])
|
||||
self.assertEqual(points.count(), 1)
|
||||
self.assertEqual(points[0].id, south_point.id)
|
||||
|
||||
def test_2dsphere_point(self):
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
date = DateTimeField()
|
||||
location = PointField()
|
||||
|
||||
def __unicode__(self):
|
||||
return self.title
|
||||
|
||||
Event.drop_collection()
|
||||
|
||||
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||
date=datetime.now() - timedelta(days=1),
|
||||
location=[-87.677137, 41.909889])
|
||||
event1.save()
|
||||
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||
date=datetime.now() - timedelta(days=10),
|
||||
location=[-122.4194155, 37.7749295]).save()
|
||||
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||
date=datetime.now(),
|
||||
location=[-87.686638, 41.900474]).save()
|
||||
|
||||
# find all events "near" pitchfork office, chicago.
|
||||
# note that "near" will show the san francisco event, too,
|
||||
# although it sorts to last.
|
||||
events = Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event1, event3, event2])
|
||||
|
||||
# find events within 5 degrees of pitchfork office, chicago
|
||||
point_and_distance = [[-87.67892, 41.9120459], 2]
|
||||
events = Event.objects(location__geo_within_center=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = list(events)
|
||||
self.assertTrue(event2 not in events)
|
||||
self.assertTrue(event1 in events)
|
||||
self.assertTrue(event3 in events)
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event3, event1, event2])
|
||||
|
||||
# find events within 10km of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = Event.objects(location__near=point, location__max_distance=10000)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# find events within 1km of greenpoint, broolyn, nyc, ny
|
||||
events = Event.objects(location__near=[-73.9509714, 40.7237134], location__max_distance=1000)
|
||||
self.assertEqual(events.count(), 0)
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = Event.objects(location__near=[-87.67892, 41.9120459],
|
||||
location__max_distance=10000).order_by("-date")
|
||||
self.assertEqual(events.count(), 2)
|
||||
self.assertEqual(events[0], event3)
|
||||
|
||||
# check that within_box works
|
||||
box = [(-125.0, 35.0), (-100.0, 40.0)]
|
||||
events = Event.objects(location__geo_within_box=box)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event2.id)
|
||||
|
||||
polygon = [
|
||||
(-87.694445, 41.912114),
|
||||
(-87.69084, 41.919395),
|
||||
(-87.681742, 41.927186),
|
||||
(-87.654276, 41.911731),
|
||||
(-87.656164, 41.898061),
|
||||
]
|
||||
events = Event.objects(location__geo_within_polygon=polygon)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event1.id)
|
||||
|
||||
polygon2 = [
|
||||
(-1.742249, 54.033586),
|
||||
(-1.225891, 52.792797),
|
||||
(-4.40094, 53.389881)
|
||||
]
|
||||
events = Event.objects(location__geo_within_polygon=polygon2)
|
||||
self.assertEqual(events.count(), 0)
|
||||
|
||||
def test_2dsphere_point_embedded(self):
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
location = GeoPointField()
|
||||
name = StringField()
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
Event.drop_collection()
|
||||
|
||||
venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889])
|
||||
venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295])
|
||||
|
||||
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||
venue=venue1).save()
|
||||
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||
venue=venue2).save()
|
||||
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||
venue=venue1).save()
|
||||
|
||||
# find all events "near" pitchfork office, chicago.
|
||||
# note that "near" will show the san francisco event, too,
|
||||
# although it sorts to last.
|
||||
events = Event.objects(venue__location__near=[-87.67892, 41.9120459])
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event1, event3, event2])
|
||||
|
||||
def test_linestring(self):
|
||||
|
||||
class Road(Document):
|
||||
name = StringField()
|
||||
line = LineStringField()
|
||||
|
||||
Road.drop_collection()
|
||||
|
||||
Road(name="66", line=[[40, 5], [41, 6]]).save()
|
||||
|
||||
# near
|
||||
point = {"type": "Point", "coordinates": [40, 5]}
|
||||
roads = Road.objects.filter(line__near=point["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(line__near=point).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(line__near={"$geometry": point}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
# Within
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(line__geo_within=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(line__geo_within={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
# Intersects
|
||||
line = {"type": "LineString",
|
||||
"coordinates": [[40, 5], [40, 6]]}
|
||||
roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects=line).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
def test_polygon(self):
|
||||
|
||||
class Road(Document):
|
||||
name = StringField()
|
||||
poly = PolygonField()
|
||||
|
||||
Road.drop_collection()
|
||||
|
||||
Road(name="66", poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save()
|
||||
|
||||
# near
|
||||
point = {"type": "Point", "coordinates": [40, 5]}
|
||||
roads = Road.objects.filter(poly__near=point["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(poly__near=point).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(poly__near={"$geometry": point}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
# Within
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(poly__geo_within=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
# Intersects
|
||||
line = {"type": "LineString",
|
||||
"coordinates": [[40, 5], [41, 6]]}
|
||||
roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects=line).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -1,4 +1,3 @@
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
@ -116,6 +115,15 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(len(people), 1)
|
||||
self.assertEqual(people[0].name, 'User B')
|
||||
|
||||
# Test slice limit and skip cursor reset
|
||||
qs = self.Person.objects[1:2]
|
||||
# fetch then delete the cursor
|
||||
qs._cursor
|
||||
qs._cursor_obj = None
|
||||
people = list(qs)
|
||||
self.assertEqual(len(people), 1)
|
||||
self.assertEqual(people[0].name, 'User B')
|
||||
|
||||
people = list(self.Person.objects[1:1])
|
||||
self.assertEqual(len(people), 0)
|
||||
|
||||
@ -274,7 +282,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
a_objects = A.objects(s='test1')
|
||||
query = B.objects(ref__in=a_objects)
|
||||
query = query.filter(boolfield=True)
|
||||
self.assertEquals(query.count(), 1)
|
||||
self.assertEqual(query.count(), 1)
|
||||
|
||||
def test_update_write_concern(self):
|
||||
"""Test that passing write_concern works"""
|
||||
@ -287,15 +295,19 @@ class QuerySetTest(unittest.TestCase):
|
||||
name='Test User', write_concern=write_concern)
|
||||
author.save(write_concern=write_concern)
|
||||
|
||||
self.Person.objects.update(set__name='Ross',
|
||||
write_concern=write_concern)
|
||||
result = self.Person.objects.update(
|
||||
set__name='Ross', write_concern={"w": 1})
|
||||
self.assertEqual(result, 1)
|
||||
result = self.Person.objects.update(
|
||||
set__name='Ross', write_concern={"w": 0})
|
||||
self.assertEqual(result, None)
|
||||
|
||||
author = self.Person.objects.first()
|
||||
self.assertEqual(author.name, 'Ross')
|
||||
|
||||
self.Person.objects.update_one(set__name='Test User', write_concern=write_concern)
|
||||
author = self.Person.objects.first()
|
||||
self.assertEqual(author.name, 'Test User')
|
||||
result = self.Person.objects.update_one(
|
||||
set__name='Test User', write_concern={"w": 1})
|
||||
self.assertEqual(result, 1)
|
||||
result = self.Person.objects.update_one(
|
||||
set__name='Test User', write_concern={"w": 0})
|
||||
self.assertEqual(result, None)
|
||||
|
||||
def test_update_update_has_a_value(self):
|
||||
"""Test to ensure that update is passed a value to update to"""
|
||||
@ -524,6 +536,24 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(club.members['John']['gender'], "F")
|
||||
self.assertEqual(club.members['John']['age'], 14)
|
||||
|
||||
def test_upsert(self):
|
||||
self.Person.drop_collection()
|
||||
|
||||
self.Person.objects(pk=ObjectId(), name="Bob", age=30).update(upsert=True)
|
||||
|
||||
bob = self.Person.objects.first()
|
||||
self.assertEqual("Bob", bob.name)
|
||||
self.assertEqual(30, bob.age)
|
||||
|
||||
def test_set_on_insert(self):
|
||||
self.Person.drop_collection()
|
||||
|
||||
self.Person.objects(pk=ObjectId()).update(set__name='Bob', set_on_insert__age=30, upsert=True)
|
||||
|
||||
bob = self.Person.objects.first()
|
||||
self.assertEqual("Bob", bob.name)
|
||||
self.assertEqual(30, bob.age)
|
||||
|
||||
def test_get_or_create(self):
|
||||
"""Ensure that ``get_or_create`` returns one result or creates a new
|
||||
document.
|
||||
@ -763,7 +793,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
p = p.snapshot(True).slave_okay(True).timeout(True)
|
||||
self.assertEqual(p._cursor_args,
|
||||
{'snapshot': True, 'slave_okay': True, 'timeout': True})
|
||||
{'snapshot': True, 'slave_okay': True, 'timeout': True})
|
||||
|
||||
def test_repeated_iteration(self):
|
||||
"""Ensure that QuerySet rewinds itself one iteration finishes.
|
||||
@ -805,6 +835,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertTrue("Doc: 0" in docs_string)
|
||||
|
||||
self.assertEqual(docs.count(), 1000)
|
||||
self.assertTrue('(remaining elements truncated)' in "%s" % docs)
|
||||
|
||||
# Limit and skip
|
||||
docs = docs[1:4]
|
||||
@ -1233,7 +1264,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
authors = ListField(ReferenceField(self.Person,
|
||||
reverse_delete_rule=PULL))
|
||||
reverse_delete_rule=PULL))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
self.Person.drop_collection()
|
||||
@ -1291,6 +1322,49 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.Person.objects()[:1].delete()
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
|
||||
|
||||
def test_reference_field_find(self):
|
||||
"""Ensure cascading deletion of referring documents from the database.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = ReferenceField(self.Person)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
self.Person.drop_collection()
|
||||
|
||||
me = self.Person(name='Test User').save()
|
||||
BlogPost(content="test 123", author=me).save()
|
||||
|
||||
self.assertEqual(1, BlogPost.objects(author=me).count())
|
||||
self.assertEqual(1, BlogPost.objects(author=me.pk).count())
|
||||
self.assertEqual(1, BlogPost.objects(author="%s" % me.pk).count())
|
||||
|
||||
self.assertEqual(1, BlogPost.objects(author__in=[me]).count())
|
||||
self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count())
|
||||
self.assertEqual(1, BlogPost.objects(author__in=["%s" % me.pk]).count())
|
||||
|
||||
def test_reference_field_find_dbref(self):
|
||||
"""Ensure cascading deletion of referring documents from the database.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = ReferenceField(self.Person, dbref=True)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
self.Person.drop_collection()
|
||||
|
||||
me = self.Person(name='Test User').save()
|
||||
BlogPost(content="test 123", author=me).save()
|
||||
|
||||
self.assertEqual(1, BlogPost.objects(author=me).count())
|
||||
self.assertEqual(1, BlogPost.objects(author=me.pk).count())
|
||||
self.assertEqual(1, BlogPost.objects(author="%s" % me.pk).count())
|
||||
|
||||
self.assertEqual(1, BlogPost.objects(author__in=[me]).count())
|
||||
self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count())
|
||||
self.assertEqual(1, BlogPost.objects(author__in=["%s" % me.pk]).count())
|
||||
|
||||
def test_update(self):
|
||||
"""Ensure that atomic updates work properly.
|
||||
"""
|
||||
@ -2380,167 +2454,6 @@ class QuerySetTest(unittest.TestCase):
|
||||
def tearDown(self):
|
||||
self.Person.drop_collection()
|
||||
|
||||
def test_geospatial_operators(self):
|
||||
"""Ensure that geospatial queries are working.
|
||||
"""
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
date = DateTimeField()
|
||||
location = GeoPointField()
|
||||
|
||||
def __unicode__(self):
|
||||
return self.title
|
||||
|
||||
Event.drop_collection()
|
||||
|
||||
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||
date=datetime.now() - timedelta(days=1),
|
||||
location=[41.909889, -87.677137])
|
||||
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||
date=datetime.now() - timedelta(days=10),
|
||||
location=[37.7749295, -122.4194155])
|
||||
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||
date=datetime.now(),
|
||||
location=[41.900474, -87.686638])
|
||||
|
||||
event1.save()
|
||||
event2.save()
|
||||
event3.save()
|
||||
|
||||
# find all events "near" pitchfork office, chicago.
|
||||
# note that "near" will show the san francisco event, too,
|
||||
# although it sorts to last.
|
||||
events = Event.objects(location__near=[41.9120459, -87.67892])
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event1, event3, event2])
|
||||
|
||||
# find events within 5 degrees of pitchfork office, chicago
|
||||
point_and_distance = [[41.9120459, -87.67892], 5]
|
||||
events = Event.objects(location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = list(events)
|
||||
self.assertTrue(event2 not in events)
|
||||
self.assertTrue(event1 in events)
|
||||
self.assertTrue(event3 in events)
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = Event.objects(location__near=[41.9120459, -87.67892])
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event3, event1, event2])
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point = [37.7566023, -122.415579]
|
||||
events = Event.objects(location__near=point, location__max_distance=10)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point_and_distance = [[37.7566023, -122.415579], 10]
|
||||
events = Event.objects(location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# find events within 1 degree of greenpoint, broolyn, nyc, ny
|
||||
point_and_distance = [[40.7237134, -73.9509714], 1]
|
||||
events = Event.objects(location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 0)
|
||||
|
||||
# ensure ordering is respected by "within_distance"
|
||||
point_and_distance = [[41.9120459, -87.67892], 10]
|
||||
events = Event.objects(location__within_distance=point_and_distance)
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 2)
|
||||
self.assertEqual(events[0], event3)
|
||||
|
||||
# check that within_box works
|
||||
box = [(35.0, -125.0), (40.0, -100.0)]
|
||||
events = Event.objects(location__within_box=box)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event2.id)
|
||||
|
||||
# check that polygon works for users who have a server >= 1.9
|
||||
server_version = tuple(
|
||||
get_connection().server_info()['version'].split('.')
|
||||
)
|
||||
required_version = tuple("1.9.0".split("."))
|
||||
if server_version >= required_version:
|
||||
polygon = [
|
||||
(41.912114,-87.694445),
|
||||
(41.919395,-87.69084),
|
||||
(41.927186,-87.681742),
|
||||
(41.911731,-87.654276),
|
||||
(41.898061,-87.656164),
|
||||
]
|
||||
events = Event.objects(location__within_polygon=polygon)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event1.id)
|
||||
|
||||
polygon2 = [
|
||||
(54.033586,-1.742249),
|
||||
(52.792797,-1.225891),
|
||||
(53.389881,-4.40094)
|
||||
]
|
||||
events = Event.objects(location__within_polygon=polygon2)
|
||||
self.assertEqual(events.count(), 0)
|
||||
|
||||
Event.drop_collection()
|
||||
|
||||
def test_spherical_geospatial_operators(self):
|
||||
"""Ensure that spherical geospatial queries are working
|
||||
"""
|
||||
class Point(Document):
|
||||
location = GeoPointField()
|
||||
|
||||
Point.drop_collection()
|
||||
|
||||
# These points are one degree apart, which (according to Google Maps)
|
||||
# is about 110 km apart at this place on the Earth.
|
||||
north_point = Point(location=[-122, 38]) # Near Concord, CA
|
||||
south_point = Point(location=[-122, 37]) # Near Santa Cruz, CA
|
||||
north_point.save()
|
||||
south_point.save()
|
||||
|
||||
earth_radius = 6378.009; # in km (needs to be a float for dividing by)
|
||||
|
||||
# Finds both points because they are within 60 km of the reference
|
||||
# point equidistant between them.
|
||||
points = Point.objects(location__near_sphere=[-122, 37.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
# Same behavior for _within_spherical_distance
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[[-122, 37.5], 60/earth_radius]
|
||||
);
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
points = Point.objects(location__near_sphere=[-122, 37.5],
|
||||
location__max_distance=60 / earth_radius);
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
# Finds both points, but orders the north point first because it's
|
||||
# closer to the reference point to the north.
|
||||
points = Point.objects(location__near_sphere=[-122, 38.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
self.assertEqual(points[0].id, north_point.id)
|
||||
self.assertEqual(points[1].id, south_point.id)
|
||||
|
||||
# Finds both points, but orders the south point first because it's
|
||||
# closer to the reference point to the south.
|
||||
points = Point.objects(location__near_sphere=[-122, 36.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
self.assertEqual(points[0].id, south_point.id)
|
||||
self.assertEqual(points[1].id, north_point.id)
|
||||
|
||||
# Finds only one point because only the first point is within 60km of
|
||||
# the reference point to the south.
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[[-122, 36.5], 60/earth_radius])
|
||||
self.assertEqual(points.count(), 1)
|
||||
self.assertEqual(points[0].id, south_point.id)
|
||||
|
||||
Point.drop_collection()
|
||||
|
||||
def test_custom_querysets(self):
|
||||
"""Ensure that custom QuerySet classes may be used.
|
||||
"""
|
||||
@ -3276,6 +3189,28 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(results[1]['name'], 'Barack Obama')
|
||||
self.assertEqual(results[1]['price'], Decimal('2.22'))
|
||||
|
||||
def test_as_pymongo_json_limit_fields(self):
|
||||
|
||||
class User(Document):
|
||||
email = EmailField(unique=True, required=True)
|
||||
password_hash = StringField(db_field='password_hash', required=True)
|
||||
password_salt = StringField(db_field='password_salt', required=True)
|
||||
|
||||
User.drop_collection()
|
||||
User(email="ross@example.com", password_salt="SomeSalt", password_hash="SomeHash").save()
|
||||
|
||||
serialized_user = User.objects.exclude('password_salt', 'password_hash').as_pymongo()[0]
|
||||
self.assertEqual(set(['_id', 'email']), set(serialized_user.keys()))
|
||||
|
||||
serialized_user = User.objects.exclude('id', 'password_salt', 'password_hash').to_json()
|
||||
self.assertEqual('[{"email": "ross@example.com"}]', serialized_user)
|
||||
|
||||
serialized_user = User.objects.exclude('password_salt').only('email').as_pymongo()[0]
|
||||
self.assertEqual(set(['email']), set(serialized_user.keys()))
|
||||
|
||||
serialized_user = User.objects.exclude('password_salt').only('email').to_json()
|
||||
self.assertEqual('[{"email": "ross@example.com"}]', serialized_user)
|
||||
|
||||
def test_no_dereference(self):
|
||||
|
||||
class Organization(Document):
|
||||
@ -3297,6 +3232,51 @@ class QuerySetTest(unittest.TestCase):
|
||||
Organization))
|
||||
self.assertTrue(isinstance(qs.first().organization, Organization))
|
||||
|
||||
def test_cached_queryset(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
|
||||
Person.drop_collection()
|
||||
for i in xrange(100):
|
||||
Person(name="No: %s" % i).save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
people = Person.objects
|
||||
|
||||
[x for x in people]
|
||||
self.assertEqual(100, len(people._result_cache))
|
||||
self.assertEqual(None, people._len)
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
list(people)
|
||||
self.assertEqual(100, people._len) # Caused by list calling len
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
people.count() # count is cached
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
def test_cache_not_cloned(self):
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User(name="Alice").save()
|
||||
User(name="Bob").save()
|
||||
|
||||
users = User.objects.all().order_by('name')
|
||||
self.assertEqual("%s" % users, "[<User: Alice>, <User: Bob>]")
|
||||
self.assertEqual(2, len(users._result_cache))
|
||||
|
||||
users = users.filter(name="Bob")
|
||||
self.assertEqual("%s" % users, "[<User: Bob>]")
|
||||
self.assertEqual(1, len(users._result_cache))
|
||||
|
||||
def test_nested_queryset_iterator(self):
|
||||
# Try iterating the same queryset twice, nested.
|
||||
names = ['Alice', 'Bob', 'Chuck', 'David', 'Eric', 'Francis', 'George']
|
||||
@ -3313,30 +3293,73 @@ class QuerySetTest(unittest.TestCase):
|
||||
User(name=name).save()
|
||||
|
||||
users = User.objects.all().order_by('name')
|
||||
|
||||
outer_count = 0
|
||||
inner_count = 0
|
||||
inner_total_count = 0
|
||||
|
||||
self.assertEqual(users.count(), 7)
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
for i, outer_user in enumerate(users):
|
||||
self.assertEqual(outer_user.name, names[i])
|
||||
outer_count += 1
|
||||
inner_count = 0
|
||||
|
||||
# Calling len might disrupt the inner loop if there are bugs
|
||||
self.assertEqual(users.count(), 7)
|
||||
|
||||
for j, inner_user in enumerate(users):
|
||||
self.assertEqual(inner_user.name, names[j])
|
||||
inner_count += 1
|
||||
inner_total_count += 1
|
||||
for i, outer_user in enumerate(users):
|
||||
self.assertEqual(outer_user.name, names[i])
|
||||
outer_count += 1
|
||||
inner_count = 0
|
||||
|
||||
self.assertEqual(inner_count, 7) # inner loop should always be executed seven times
|
||||
# Calling len might disrupt the inner loop if there are bugs
|
||||
self.assertEqual(users.count(), 7)
|
||||
|
||||
self.assertEqual(outer_count, 7) # outer loop should be executed seven times total
|
||||
self.assertEqual(inner_total_count, 7 * 7) # inner loop should be executed fourtynine times total
|
||||
for j, inner_user in enumerate(users):
|
||||
self.assertEqual(inner_user.name, names[j])
|
||||
inner_count += 1
|
||||
inner_total_count += 1
|
||||
|
||||
self.assertEqual(inner_count, 7) # inner loop should always be executed seven times
|
||||
|
||||
self.assertEqual(outer_count, 7) # outer loop should be executed seven times total
|
||||
self.assertEqual(inner_total_count, 7 * 7) # inner loop should be executed fourtynine times total
|
||||
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
def test_no_sub_classes(self):
|
||||
class A(Document):
|
||||
x = IntField()
|
||||
y = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class B(A):
|
||||
z = IntField()
|
||||
|
||||
class C(B):
|
||||
zz = IntField()
|
||||
|
||||
A.drop_collection()
|
||||
|
||||
A(x=10, y=20).save()
|
||||
A(x=15, y=30).save()
|
||||
B(x=20, y=40).save()
|
||||
B(x=30, y=50).save()
|
||||
C(x=40, y=60).save()
|
||||
|
||||
self.assertEqual(A.objects.no_sub_classes().count(), 2)
|
||||
self.assertEqual(A.objects.count(), 5)
|
||||
|
||||
self.assertEqual(B.objects.no_sub_classes().count(), 2)
|
||||
self.assertEqual(B.objects.count(), 3)
|
||||
|
||||
self.assertEqual(C.objects.no_sub_classes().count(), 1)
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
for obj in A.objects.no_sub_classes():
|
||||
self.assertEqual(obj.__class__, A)
|
||||
|
||||
for obj in B.objects.no_sub_classes():
|
||||
self.assertEqual(obj.__class__, B)
|
||||
|
||||
for obj in C.objects.no_sub_classes():
|
||||
self.assertEqual(obj.__class__, C)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -1,4 +1,3 @@
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
|
@ -1,4 +1,3 @@
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
@ -6,7 +5,8 @@ import unittest
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.context_managers import (switch_db, switch_collection,
|
||||
no_dereference, query_counter)
|
||||
no_sub_classes, no_dereference,
|
||||
query_counter)
|
||||
|
||||
|
||||
class ContextManagersTest(unittest.TestCase):
|
||||
@ -139,6 +139,54 @@ class ContextManagersTest(unittest.TestCase):
|
||||
self.assertTrue(isinstance(group.ref, User))
|
||||
self.assertTrue(isinstance(group.generic, User))
|
||||
|
||||
def test_no_sub_classes(self):
|
||||
class A(Document):
|
||||
x = IntField()
|
||||
y = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class B(A):
|
||||
z = IntField()
|
||||
|
||||
class C(B):
|
||||
zz = IntField()
|
||||
|
||||
A.drop_collection()
|
||||
|
||||
A(x=10, y=20).save()
|
||||
A(x=15, y=30).save()
|
||||
B(x=20, y=40).save()
|
||||
B(x=30, y=50).save()
|
||||
C(x=40, y=60).save()
|
||||
|
||||
self.assertEqual(A.objects.count(), 5)
|
||||
self.assertEqual(B.objects.count(), 3)
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
with no_sub_classes(A) as A:
|
||||
self.assertEqual(A.objects.count(), 2)
|
||||
|
||||
for obj in A.objects:
|
||||
self.assertEqual(obj.__class__, A)
|
||||
|
||||
with no_sub_classes(B) as B:
|
||||
self.assertEqual(B.objects.count(), 2)
|
||||
|
||||
for obj in B.objects:
|
||||
self.assertEqual(obj.__class__, B)
|
||||
|
||||
with no_sub_classes(C) as C:
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
for obj in C.objects:
|
||||
self.assertEqual(obj.__class__, C)
|
||||
|
||||
# Confirm context manager exit correctly
|
||||
self.assertEqual(A.objects.count(), 5)
|
||||
self.assertEqual(B.objects.count(), 3)
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
def test_query_counter(self):
|
||||
connect('mongoenginetest')
|
||||
db = get_db()
|
||||
|
@ -1,5 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
|
@ -1,4 +1,3 @@
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
@ -151,22 +150,74 @@ class QuerySetTest(unittest.TestCase):
|
||||
# Try iterating the same queryset twice, nested, in a Django template.
|
||||
names = ['A', 'B', 'C', 'D']
|
||||
|
||||
class User(Document):
|
||||
class CustomUser(Document):
|
||||
name = StringField()
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
User.drop_collection()
|
||||
CustomUser.drop_collection()
|
||||
|
||||
for name in names:
|
||||
User(name=name).save()
|
||||
CustomUser(name=name).save()
|
||||
|
||||
users = User.objects.all().order_by('name')
|
||||
users = CustomUser.objects.all().order_by('name')
|
||||
template = Template("{% for user in users %}{{ user.name }}{% ifequal forloop.counter 2 %} {% for inner_user in users %}{{ inner_user.name }}{% endfor %} {% endifequal %}{% endfor %}")
|
||||
rendered = template.render(Context({'users': users}))
|
||||
self.assertEqual(rendered, 'AB ABCD CD')
|
||||
|
||||
def test_filter(self):
|
||||
"""Ensure that a queryset and filters work as expected
|
||||
"""
|
||||
|
||||
class Note(Document):
|
||||
text = StringField()
|
||||
|
||||
for i in xrange(1, 101):
|
||||
Note(name="Note: %s" % i).save()
|
||||
|
||||
# Check the count
|
||||
self.assertEqual(Note.objects.count(), 100)
|
||||
|
||||
# Get the first 10 and confirm
|
||||
notes = Note.objects[:10]
|
||||
self.assertEqual(notes.count(), 10)
|
||||
|
||||
# Test djangos template filters
|
||||
# self.assertEqual(length(notes), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
# Test with skip
|
||||
notes = Note.objects.skip(90)
|
||||
self.assertEqual(notes.count(), 10)
|
||||
|
||||
# Test djangos template filters
|
||||
self.assertEqual(notes.count(), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
# Test with limit
|
||||
notes = Note.objects.skip(90)
|
||||
self.assertEqual(notes.count(), 10)
|
||||
|
||||
# Test djangos template filters
|
||||
self.assertEqual(notes.count(), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
# Test with skip and limit
|
||||
notes = Note.objects.skip(10).limit(10)
|
||||
|
||||
# Test djangos template filters
|
||||
self.assertEqual(notes.count(), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
|
||||
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
|
||||
backend = SessionStore
|
||||
|
47
tests/test_jinja.py
Normal file
47
tests/test_jinja.py
Normal file
@ -0,0 +1,47 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
import jinja2
|
||||
|
||||
|
||||
class TemplateFilterTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
def test_jinja2(self):
|
||||
env = jinja2.Environment()
|
||||
|
||||
class TestData(Document):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
TestData.drop_collection()
|
||||
|
||||
examples = [('A', '1'),
|
||||
('B', '2'),
|
||||
('C', '3')]
|
||||
|
||||
for title, description in examples:
|
||||
TestData(title=title, description=description).save()
|
||||
|
||||
tmpl = """
|
||||
{%- for record in content -%}
|
||||
{%- if loop.first -%}{ {%- endif -%}
|
||||
"{{ record.title }}": "{{ record.description }}"
|
||||
{%- if loop.last -%} }{%- else -%},{% endif -%}
|
||||
{%- endfor -%}
|
||||
"""
|
||||
ctx = {'content': TestData.objects}
|
||||
template = env.from_string(tmpl)
|
||||
rendered = template.render(**ctx)
|
||||
|
||||
self.assertEqual('{"A": "1","B": "2","C": "3"}', rendered)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Loading…
x
Reference in New Issue
Block a user