Merge branch 'master' into pr/674

This commit is contained in:
Ross Lawley 2014-06-26 16:22:21 +01:00
commit bb1367cfb9
16 changed files with 503 additions and 76 deletions

View File

@ -6,26 +6,60 @@ python:
- "2.7"
- "3.2"
- "3.3"
- "3.4"
- "pypy"
env:
- PYMONGO=dev DJANGO=1.6
- PYMONGO=dev DJANGO=1.5.5
- PYMONGO=dev DJANGO=1.4.10
- PYMONGO=2.5 DJANGO=1.6
- PYMONGO=2.5 DJANGO=1.5.5
- PYMONGO=2.5 DJANGO=1.4.10
- PYMONGO=3.2 DJANGO=1.6
- PYMONGO=3.2 DJANGO=1.5.5
- PYMONGO=3.3 DJANGO=1.6
- PYMONGO=3.3 DJANGO=1.5.5
- PYMONGO=dev DJANGO=1.6.5
- PYMONGO=dev DJANGO=1.5.8
- PYMONGO=dev DJANGO=1.4.13
- PYMONGO=2.5.2 DJANGO=1.6.5
- PYMONGO=2.5.2 DJANGO=1.5.8
- PYMONGO=2.5.2 DJANGO=1.4.13
- PYMONGO=2.6.3 DJANGO=1.6.5
- PYMONGO=2.6.3 DJANGO=1.5.8
- PYMONGO=2.6.3 DJANGO=1.4.13
- PYMONGO=2.7.1 DJANGO=1.6.5
- PYMONGO=2.7.1 DJANGO=1.5.8
- PYMONGO=2.7.1 DJANGO=1.4.13
matrix:
fast_finish: true
exclude:
- python: "3.2"
env: PYMONGO=dev DJANGO=1.4.13
- python: "3.2"
env: PYMONGO=2.5.2 DJANGO=1.4.13
- python: "3.2"
env: PYMONGO=2.6.3 DJANGO=1.4.13
- python: "3.2"
env: PYMONGO=2.7.1 DJANGO=1.4.13
- python: "3.3"
env: PYMONGO=dev DJANGO=1.4.13
- python: "3.3"
env: PYMONGO=2.5.2 DJANGO=1.4.13
- python: "3.3"
env: PYMONGO=2.6.3 DJANGO=1.4.13
- python: "3.3"
env: PYMONGO=2.7.1 DJANGO=1.4.13
- python: "3.4"
env: PYMONGO=dev DJANGO=1.4.13
- python: "3.4"
env: PYMONGO=2.5.2 DJANGO=1.4.13
- python: "3.4"
env: PYMONGO=2.6.3 DJANGO=1.4.13
- python: "3.4"
env: PYMONGO=2.7.1 DJANGO=1.4.13
install:
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev python-tk
- if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
- if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi
- if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO; true; fi
- pip install Django==$DJANGO
- pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b
- python setup.py install
script:
- python setup.py test
- if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then 2to3 . -w; fi;
- python benchmark.py
notifications:
irc: "irc.freenode.org#mongoengine"
branches:

View File

@ -171,7 +171,7 @@ that much better:
* Michael Bartnett (https://github.com/michaelbartnett)
* Alon Horev (https://github.com/alonho)
* Kelvin Hammond (https://github.com/kelvinhammond)
* Jatin- (https://github.com/jatin-)
* Jatin Chopra (https://github.com/jatin)
* Paul Uithol (https://github.com/PaulUithol)
* Thom Knowles (https://github.com/fleat)
* Paul (https://github.com/squamous)
@ -189,3 +189,6 @@ that much better:
* Tom (https://github.com/tomprimozic)
* j0hnsmith (https://github.com/j0hnsmith)
* Damien Churchill (https://github.com/damoxc)
* Jonathan Simon Prates (https://github.com/jonathansp)
* Thiago Papageorgiou (https://github.com/tmpapageorgiou)
* Omer Katz (https://github.com/thedrow)

View File

@ -29,9 +29,18 @@ setup.py install``.
Dependencies
============
- pymongo 2.5+
- pymongo>=2.5
- sphinx (optional - for documentation generation)
Optional Dependencies
---------------------
- **Django Integration:** Django>=1.4.0 for Python 2.x or PyPy and Django>=1.5.0 for Python 3.x
- **Image Fields**: Pillow>=2.0.0 or PIL (not recommended since MongoEngine is tested with Pillow)
- dateutil>=2.1.0
.. note
MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: Django 1.6.5
Examples
========
Some simple examples of what MongoEngine code looks like::

View File

@ -15,7 +15,7 @@ def cprofile_main():
class Noddy(Document):
fields = DictField()
for i in xrange(1):
for i in range(1):
noddy = Noddy()
for j in range(20):
noddy.fields["key" + str(j)] = "value " + str(j)
@ -113,6 +113,7 @@ def main():
4.68946313858
----------------------------------------------------------------------------------------------------
"""
print("Benchmarking...")
setup = """
from pymongo import MongoClient
@ -127,7 +128,7 @@ connection = MongoClient()
db = connection.timeit_test
noddy = db.noddy
for i in xrange(10000):
for i in range(10000):
example = {'fields': {}}
for j in range(20):
example['fields']["key"+str(j)] = "value "+str(j)
@ -138,10 +139,10 @@ myNoddys = noddy.find()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - Pymongo"""
print("-" * 100)
print("""Creating 10000 dictionaries - Pymongo""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
from pymongo import MongoClient
@ -150,7 +151,7 @@ connection = MongoClient()
db = connection.timeit_test
noddy = db.noddy
for i in xrange(10000):
for i in range(10000):
example = {'fields': {}}
for j in range(20):
example['fields']["key"+str(j)] = "value "+str(j)
@ -161,10 +162,10 @@ myNoddys = noddy.find()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - Pymongo write_concern={"w": 0}"""
print("-" * 100)
print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
setup = """
from pymongo import MongoClient
@ -180,7 +181,7 @@ class Noddy(Document):
"""
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
@ -190,13 +191,13 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
fields = {}
for j in range(20):
@ -208,13 +209,13 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries without continual assign - MongoEngine"""
print("-" * 100)
print("""Creating 10000 dictionaries without continual assign - MongoEngine""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
@ -224,13 +225,13 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
@ -240,13 +241,13 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
@ -256,13 +257,13 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
@ -272,11 +273,11 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
if __name__ == "__main__":
main()
main()

View File

@ -2,6 +2,11 @@
Changelog
=========
Changes in 0.9.X - DEV
======================
- pypy support #673
Changes in 0.8.7
================
- Calling reload on deleted / nonexistant documents raises DoesNotExist (#538)

View File

@ -531,6 +531,8 @@ field name to the index definition.
Sometimes its more efficient to index parts of Embedded / dictionary fields,
in this case use 'dot' notation to identify the value to index eg: `rank.title`
.. _geospatial-indexes:
Geospatial indexes
------------------

View File

@ -488,8 +488,9 @@ calling it with keyword arguments::
Atomic updates
==============
Documents may be updated atomically by using the
:meth:`~mongoengine.queryset.QuerySet.update_one` and
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
:meth:`~mongoengine.queryset.QuerySet.update_one`,
:meth:`~mongoengine.queryset.QuerySet.update` and
:meth:`~mongoengine.queryset.QuerySet.modify` methods on a
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
that you may use with these methods:

View File

@ -760,7 +760,7 @@ class DictField(ComplexBaseField):
similar to an embedded document, but the structure is not defined.
.. note::
Required means it cannot be empty - as the default for ListFields is []
Required means it cannot be empty - as the default for DictFields is {}
.. versionadded:: 0.3
.. versionchanged:: 0.5 - Can now handle complex / varying types of data
@ -1613,7 +1613,12 @@ class UUIDField(BaseField):
class GeoPointField(BaseField):
"""A list storing a latitude and longitude.
"""A list storing a longitude and latitude coordinate.
.. note:: this represents a generic point in a 2D plane and a legacy way of
representing a geo point. It admits 2d indexes but not "2dsphere" indexes
in MongoDB > 2.4 which are more natural for modeling geospatial points.
See :ref:`geospatial-indexes`
.. versionadded:: 0.4
"""
@ -1635,7 +1640,7 @@ class GeoPointField(BaseField):
class PointField(GeoJsonBaseField):
"""A geo json field storing a latitude and longitude.
"""A GeoJSON field storing a longitude and latitude coordinate.
The data is represented as:
@ -1654,7 +1659,7 @@ class PointField(GeoJsonBaseField):
class LineStringField(GeoJsonBaseField):
"""A geo json field storing a line of latitude and longitude coordinates.
"""A GeoJSON field storing a line of longitude and latitude coordinates.
The data is represented as:
@ -1672,7 +1677,7 @@ class LineStringField(GeoJsonBaseField):
class PolygonField(GeoJsonBaseField):
"""A geo json field storing a polygon of latitude and longitude coordinates.
"""A GeoJSON field storing a polygon of longitude and latitude coordinates.
The data is represented as:

View File

@ -10,6 +10,7 @@ import warnings
from bson.code import Code
from bson import json_util
import pymongo
import pymongo.errors
from pymongo.common import validate_read_preference
from mongoengine import signals
@ -50,7 +51,7 @@ class BaseQuerySet(object):
self._initial_query = {}
self._where_clause = None
self._loaded_fields = QueryFieldList()
self._ordering = []
self._ordering = None
self._snapshot = False
self._timeout = True
self._class_check = True
@ -154,6 +155,22 @@ class BaseQuerySet(object):
def __iter__(self):
raise NotImplementedError
def _has_data(self):
""" Retrieves whether cursor has any data. """
queryset = self.order_by()
return False if queryset.first() is None else True
def __nonzero__(self):
""" Avoid to open all records in an if stmt in Py2. """
return self._has_data()
def __bool__(self):
""" Avoid to open all records in an if stmt in Py3. """
return self._has_data()
# Core functions
def all(self):
@ -443,6 +460,8 @@ class BaseQuerySet(object):
return result
elif result:
return result['n']
except pymongo.errors.DuplicateKeyError, err:
raise NotUniqueError(u'Update failed (%s)' % unicode(err))
except pymongo.errors.OperationFailure, err:
if unicode(err) == u'multi not coded yet':
message = u'update() method requires MongoDB 1.1.3+'
@ -466,6 +485,59 @@ class BaseQuerySet(object):
return self.update(
upsert=upsert, multi=False, write_concern=write_concern, **update)
def modify(self, upsert=False, full_response=False, remove=False, new=False, **update):
"""Update and return the updated document.
Returns either the document before or after modification based on `new`
parameter. If no documents match the query and `upsert` is false,
returns ``None``. If upserting and `new` is false, returns ``None``.
If the full_response parameter is ``True``, the return value will be
the entire response object from the server, including the 'ok' and
'lastErrorObject' fields, rather than just the modified document.
This is useful mainly because the 'lastErrorObject' document holds
information about the command's execution.
:param upsert: insert if document doesn't exist (default ``False``)
:param full_response: return the entire response object from the
server (default ``False``)
:param remove: remove rather than updating (default ``False``)
:param new: return updated rather than original document
(default ``False``)
:param update: Django-style update keyword arguments
.. versionadded:: 0.9
"""
if remove and new:
raise OperationError("Conflicting parameters: remove and new")
if not update and not upsert and not remove:
raise OperationError("No update parameters, must either update or remove")
queryset = self.clone()
query = queryset._query
update = transform.update(queryset._document, **update)
sort = queryset._ordering
try:
result = queryset._collection.find_and_modify(
query, update, upsert=upsert, sort=sort, remove=remove, new=new,
full_response=full_response, **self._cursor_args)
except pymongo.errors.DuplicateKeyError, err:
raise NotUniqueError(u"Update failed (%s)" % err)
except pymongo.errors.OperationFailure, err:
raise OperationError(u"Update failed (%s)" % err)
if full_response:
if result["value"] is not None:
result["value"] = self._document._from_son(result["value"])
else:
if result is not None:
result = self._document._from_son(result)
return result
def with_id(self, object_id):
"""Retrieve the object matching the id provided. Uses `object_id` only
and raises InvalidQueryError if a filter has been applied. Returns
@ -1189,8 +1261,9 @@ class BaseQuerySet(object):
if self._ordering:
# Apply query ordering
self._cursor_obj.sort(self._ordering)
elif self._document._meta['ordering']:
# Otherwise, apply the ordering from the document model
elif self._ordering is None and self._document._meta['ordering']:
# Otherwise, apply the ordering from the document model, unless
# it's been explicitly cleared via order_by with no arguments
order = self._get_order_by(self._document._meta['ordering'])
self._cursor_obj.sort(order)
@ -1392,7 +1465,7 @@ class BaseQuerySet(object):
pass
key_list.append((key, direction))
if self._cursor_obj:
if self._cursor_obj and key_list:
self._cursor_obj.sort(key_list)
return key_list

View File

@ -38,7 +38,7 @@ def query(_doc_cls=None, _field_operation=False, **query):
mongo_query.update(value)
continue
parts = key.split('__')
parts = key.rsplit('__')
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
parts = [part for part in parts if not part.isdigit()]
# Check for an operator and transform to mongo-style if there is

View File

@ -51,12 +51,12 @@ CLASSIFIERS = [
extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])}
if sys.version_info[0] == 3:
extra_opts['use_2to3'] = True
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'django>=1.5.1']
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'Pillow>=2.0.0', 'django>=1.5.1']
if "test" in sys.argv or "nosetests" in sys.argv:
extra_opts['packages'] = find_packages()
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
else:
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2>=2.6', 'python-dateutil']
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'Pillow>=2.0.0', 'jinja2>=2.6', 'python-dateutil']
if sys.version_info[0] == 2 and sys.version_info[1] == 6:
extra_opts['tests_require'].append('unittest2')
@ -75,7 +75,7 @@ setup(name='mongoengine',
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=['pymongo>=2.5'],
install_requires=['pymongo>=2.7'],
test_suite='nose.collector',
**extra_opts
)

View File

@ -15,7 +15,7 @@ from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest,
from mongoengine import *
from mongoengine.errors import (NotRegistered, InvalidDocumentError,
InvalidQueryError)
InvalidQueryError, NotUniqueError)
from mongoengine.queryset import NULLIFY, Q
from mongoengine.connection import get_db
from mongoengine.base import get_document
@ -57,7 +57,7 @@ class InstanceTest(unittest.TestCase):
date = DateTimeField(default=datetime.now)
meta = {
'max_documents': 10,
'max_size': 90000,
'max_size': 4096,
}
Log.drop_collection()
@ -75,7 +75,7 @@ class InstanceTest(unittest.TestCase):
options = Log.objects._collection.options()
self.assertEqual(options['capped'], True)
self.assertEqual(options['max'], 10)
self.assertEqual(options['size'], 90000)
self.assertTrue(options['size'] >= 4096)
# Check that the document cannot be redefined with different options
def recreate_log_document():
@ -990,6 +990,16 @@ class InstanceTest(unittest.TestCase):
self.assertRaises(InvalidQueryError, update_no_op_raises)
def test_update_unique_field(self):
class Doc(Document):
name = StringField(unique=True)
doc1 = Doc(name="first").save()
doc2 = Doc(name="second").save()
self.assertRaises(NotUniqueError, lambda:
doc2.update(set__name=doc1.name))
def test_embedded_update(self):
"""
Test update on `EmbeddedDocumentField` fields
@ -2411,7 +2421,7 @@ class InstanceTest(unittest.TestCase):
for parameter_name, parameter in self.parameters.iteritems():
parameter.expand()
class System(Document):
class NodesSystem(Document):
name = StringField(required=True)
nodes = MapField(ReferenceField(Node, dbref=False))
@ -2419,18 +2429,18 @@ class InstanceTest(unittest.TestCase):
for node_name, node in self.nodes.iteritems():
node.expand()
node.save(*args, **kwargs)
super(System, self).save(*args, **kwargs)
super(NodesSystem, self).save(*args, **kwargs)
System.drop_collection()
NodesSystem.drop_collection()
Node.drop_collection()
system = System(name="system")
system = NodesSystem(name="system")
system.nodes["node"] = Node()
system.save()
system.nodes["node"].parameters["param"] = Parameter()
system.save()
system = System.objects.first()
system = NodesSystem.objects.first()
self.assertEqual("UNDEFINED", system.nodes["node"].parameters["param"].macros["test"].value)
def test_embedded_document_equality(self):

View File

@ -279,7 +279,7 @@ class FileTest(unittest.TestCase):
t.image.put(f)
self.fail("Should have raised an invalidation error")
except ValidationError, e:
self.assertEqual("%s" % e, "Invalid image: cannot identify image file")
self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f)
t = TestImage()
t.image.put(open(TEST_IMAGE_PATH, 'rb'))

View File

@ -3,3 +3,4 @@ from field_list import *
from queryset import *
from visitor import *
from geo import *
from modify import *

102
tests/queryset/modify.py Normal file
View File

@ -0,0 +1,102 @@
import sys
sys.path[0:0] = [""]
import unittest
from mongoengine import connect, Document, IntField
__all__ = ("FindAndModifyTest",)
class Doc(Document):
id = IntField(primary_key=True)
value = IntField()
class FindAndModifyTest(unittest.TestCase):
def setUp(self):
connect(db="mongoenginetest")
Doc.drop_collection()
def assertDbEqual(self, docs):
self.assertEqual(list(Doc._collection.find().sort("id")), docs)
def test_modify(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).modify(set__value=-1)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
def test_modify_with_new(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
new_doc = Doc.objects(id=1).modify(set__value=-1, new=True)
doc.value = -1
self.assertEqual(new_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
def test_modify_not_existing(self):
Doc(id=0, value=0).save()
self.assertEqual(Doc.objects(id=1).modify(set__value=-1), None)
self.assertDbEqual([{"_id": 0, "value": 0}])
def test_modify_with_upsert(self):
Doc(id=0, value=0).save()
old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True)
self.assertEqual(old_doc, None)
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
def test_modify_with_upsert_existing(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).modify(set__value=-1, upsert=True)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
def test_modify_with_upsert_with_new(self):
Doc(id=0, value=0).save()
new_doc = Doc.objects(id=1).modify(upsert=True, new=True, set__value=1)
self.assertEqual(new_doc.to_mongo(), {"_id": 1, "value": 1})
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
def test_modify_with_remove(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).modify(remove=True)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}])
def test_find_and_modify_with_remove_not_existing(self):
Doc(id=0, value=0).save()
self.assertEqual(Doc.objects(id=1).modify(remove=True), None)
self.assertDbEqual([{"_id": 0, "value": 0}])
def test_modify_with_order_by(self):
Doc(id=0, value=3).save()
Doc(id=1, value=2).save()
Doc(id=2, value=1).save()
doc = Doc(id=3, value=0).save()
old_doc = Doc.objects().order_by("-id").modify(set__value=-1)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([
{"_id": 0, "value": 3}, {"_id": 1, "value": 2},
{"_id": 2, "value": 1}, {"_id": 3, "value": -1}])
def test_modify_with_fields(self):
Doc(id=0, value=0).save()
Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).only("id").modify(set__value=-1)
self.assertEqual(old_doc.to_mongo(), {"_id": 1})
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
if __name__ == '__main__':
unittest.main()

View File

@ -650,7 +650,7 @@ class QuerySetTest(unittest.TestCase):
blogs.append(Blog(title="post %s" % i, posts=[post1, post2]))
Blog.objects.insert(blogs, load_bulk=False)
self.assertEqual(q, 1) # 1 for the insert
self.assertEqual(q, 99) # profiling logs each doc now :(
Blog.drop_collection()
Blog.ensure_indexes()
@ -659,7 +659,7 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(q, 0)
Blog.objects.insert(blogs)
self.assertEqual(q, 2) # 1 for insert, and 1 for in bulk fetch
self.assertEqual(q, 100) # 99 or insert, and 1 for in bulk fetch
Blog.drop_collection()
@ -1040,6 +1040,76 @@ class QuerySetTest(unittest.TestCase):
expected = [blog_post_1, blog_post_2, blog_post_3]
self.assertSequence(qs, expected)
def test_clear_ordering(self):
""" Make sure one can clear the query set ordering by applying a
consecutive order_by()
"""
class Person(Document):
name = StringField()
Person.drop_collection()
Person(name="A").save()
Person(name="B").save()
qs = Person.objects.order_by('-name')
# Make sure we can clear a previously specified ordering
with query_counter() as q:
lst = list(qs.order_by())
op = q.db.system.profile.find({"ns":
{"$ne": "%s.system.indexes" % q.db.name}})[0]
self.assertTrue('$orderby' not in op['query'])
self.assertEqual(lst[0].name, 'A')
# Make sure previously specified ordering is preserved during
# consecutive calls to the same query set
with query_counter() as q:
lst = list(qs)
op = q.db.system.profile.find({"ns":
{"$ne": "%s.system.indexes" % q.db.name}})[0]
self.assertTrue('$orderby' in op['query'])
self.assertEqual(lst[0].name, 'B')
def test_clear_default_ordering(self):
class Person(Document):
name = StringField()
meta = {
'ordering': ['-name']
}
Person.drop_collection()
Person(name="A").save()
Person(name="B").save()
qs = Person.objects
# Make sure clearing default ordering works
with query_counter() as q:
lst = list(qs.order_by())
op = q.db.system.profile.find({"ns":
{"$ne": "%s.system.indexes" % q.db.name}})[0]
self.assertTrue('$orderby' not in op['query'])
self.assertEqual(lst[0].name, 'A')
# Make sure default ordering is preserved during consecutive calls
# to the same query set
with query_counter() as q:
lst = list(qs)
op = q.db.system.profile.find({"ns":
{"$ne": "%s.system.indexes" % q.db.name}})[0]
self.assertTrue('$orderby' in op['query'])
self.assertEqual(lst[0].name, 'B')
def test_find_embedded(self):
"""Ensure that an embedded document is properly returned from a query.
"""
@ -3586,7 +3656,13 @@ class QuerySetTest(unittest.TestCase):
[x for x in people]
self.assertEqual(100, len(people._result_cache))
self.assertEqual(None, people._len)
import platform
if platform.python_implementation() != "PyPy":
# PyPy evaluates __len__ when iterating with list comprehensions while CPython does not.
# This may be a bug in PyPy (PyPy/#1802) but it does not affect the behavior of MongoEngine.
self.assertEqual(None, people._len)
self.assertEqual(q, 1)
list(people)
@ -3814,6 +3890,111 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(Example.objects(size=instance_size).count(), 1)
self.assertEqual(Example.objects(size__in=[instance_size]).count(), 1)
def test_cursor_in_an_if_stmt(self):
class Test(Document):
test_field = StringField()
Test.drop_collection()
queryset = Test.objects
if queryset:
raise AssertionError('Empty cursor returns True')
test = Test()
test.test_field = 'test'
test.save()
queryset = Test.objects
if not test:
raise AssertionError('Cursor has data and returned False')
queryset.next()
if not queryset:
raise AssertionError('Cursor has data and it must returns True,'
' even in the last item.')
def test_bool_performance(self):
class Person(Document):
name = StringField()
Person.drop_collection()
for i in xrange(100):
Person(name="No: %s" % i).save()
with query_counter() as q:
if Person.objects:
pass
self.assertEqual(q, 1)
op = q.db.system.profile.find({"ns":
{"$ne": "%s.system.indexes" % q.db.name}})[0]
self.assertEqual(op['nreturned'], 1)
def test_bool_with_ordering(self):
class Person(Document):
name = StringField()
Person.drop_collection()
Person(name="Test").save()
qs = Person.objects.order_by('name')
with query_counter() as q:
if qs:
pass
op = q.db.system.profile.find({"ns":
{"$ne": "%s.system.indexes" % q.db.name}})[0]
self.assertFalse('$orderby' in op['query'],
'BaseQuerySet cannot use orderby in if stmt')
with query_counter() as p:
for x in qs:
pass
op = p.db.system.profile.find({"ns":
{"$ne": "%s.system.indexes" % q.db.name}})[0]
self.assertTrue('$orderby' in op['query'],
'BaseQuerySet cannot remove orderby in for loop')
def test_bool_with_ordering_from_meta_dict(self):
class Person(Document):
name = StringField()
meta = {
'ordering': ['name']
}
Person.drop_collection()
Person(name="B").save()
Person(name="C").save()
Person(name="A").save()
with query_counter() as q:
if Person.objects:
pass
op = q.db.system.profile.find({"ns":
{"$ne": "%s.system.indexes" % q.db.name}})[0]
self.assertFalse('$orderby' in op['query'],
'BaseQuerySet must remove orderby from meta in boolen test')
self.assertEqual(Person.objects.first().name, 'A')
self.assertTrue(Person.objects._has_data(),
'Cursor has data and returned False')
if __name__ == '__main__':
unittest.main()