Merge branch 'master' into master

This commit is contained in:
erdenezul 2018-05-21 16:22:07 +08:00 committed by GitHub
commit 7fb1c9dd35
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 1480 additions and 211 deletions

View File

@ -1,5 +1,6 @@
#!/bin/bash #!/bin/bash
sudo apt-get remove mongodb-org-server
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
if [ "$MONGODB" = "2.4" ]; then if [ "$MONGODB" = "2.4" ]; then
@ -13,7 +14,7 @@ elif [ "$MONGODB" = "2.6" ]; then
sudo apt-get install mongodb-org-server=2.6.12 sudo apt-get install mongodb-org-server=2.6.12
# service should be started automatically # service should be started automatically
elif [ "$MONGODB" = "3.0" ]; then elif [ "$MONGODB" = "3.0" ]; then
echo "deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
sudo apt-get update sudo apt-get update
sudo apt-get install mongodb-org-server=3.0.14 sudo apt-get install mongodb-org-server=3.0.14
# service should be started automatically # service should be started automatically
@ -21,3 +22,6 @@ else
echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0." echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0."
exit 1 exit 1
fi; fi;
mkdir db
1>db/logs mongod --dbpath=db &

View File

@ -15,12 +15,11 @@ language: python
python: python:
- 2.7 - 2.7
- 3.5 - 3.5
- 3.6
- pypy - pypy
env: env:
- MONGODB=2.6 PYMONGO=2.7 - MONGODB=2.6 PYMONGO=3.x
- MONGODB=2.6 PYMONGO=2.8
- MONGODB=2.6 PYMONGO=3.0
matrix: matrix:
# Finish the build as soon as one job fails # Finish the build as soon as one job fails
@ -28,20 +27,22 @@ matrix:
include: include:
- python: 2.7 - python: 2.7
env: MONGODB=2.4 PYMONGO=2.7 env: MONGODB=2.4 PYMONGO=3.5
- python: 2.7 - python: 2.7
env: MONGODB=2.4 PYMONGO=3.0 env: MONGODB=3.0 PYMONGO=3.x
- python: 2.7
env: MONGODB=3.0 PYMONGO=3.0
- python: 3.5 - python: 3.5
env: MONGODB=2.4 PYMONGO=2.7 env: MONGODB=2.4 PYMONGO=3.5
- python: 3.5 - python: 3.5
env: MONGODB=2.4 PYMONGO=3.0 env: MONGODB=3.0 PYMONGO=3.x
- python: 3.5 - python: 3.6
env: MONGODB=3.0 PYMONGO=3.0 env: MONGODB=2.4 PYMONGO=3.5
- python: 3.6
env: MONGODB=3.0 PYMONGO=3.x
before_install: before_install:
- bash .install_mongodb_on_travis.sh - bash .install_mongodb_on_travis.sh
- sleep 15 # https://docs.travis-ci.com/user/database-setup/#MongoDB-does-not-immediately-accept-connections
- mongo --eval 'db.version();'
install: install:
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
@ -90,7 +91,7 @@ deploy:
distributions: "sdist bdist_wheel" distributions: "sdist bdist_wheel"
# only deploy on tagged commits (aka GitHub releases) and only for the # only deploy on tagged commits (aka GitHub releases) and only for the
# parent repo's builds running Python 2.7 along with dev PyMongo (we run # parent repo's builds running Python 2.7 along with PyMongo v3.0 (we run
# Travis against many different Python and PyMongo versions and we don't # Travis against many different Python and PyMongo versions and we don't
# want the deploy to occur multiple times). # want the deploy to occur multiple times).
on: on:

View File

@ -243,3 +243,6 @@ that much better:
* Victor Varvaryuk * Victor Varvaryuk
* Stanislav Kaledin (https://github.com/sallyruthstruik) * Stanislav Kaledin (https://github.com/sallyruthstruik)
* Dmitry Yantsen (https://github.com/mrTable) * Dmitry Yantsen (https://github.com/mrTable)
* Renjianxin (https://github.com/Davidrjx)
* Erdenezul Batmunkh (https://github.com/erdenezul)
* Andy Yankovsky (https://github.com/werat)

View File

@ -87,7 +87,9 @@ Fields
.. autoclass:: mongoengine.fields.DictField .. autoclass:: mongoengine.fields.DictField
.. autoclass:: mongoengine.fields.MapField .. autoclass:: mongoengine.fields.MapField
.. autoclass:: mongoengine.fields.ReferenceField .. autoclass:: mongoengine.fields.ReferenceField
.. autoclass:: mongoengine.fields.LazyReferenceField
.. autoclass:: mongoengine.fields.GenericReferenceField .. autoclass:: mongoengine.fields.GenericReferenceField
.. autoclass:: mongoengine.fields.GenericLazyReferenceField
.. autoclass:: mongoengine.fields.CachedReferenceField .. autoclass:: mongoengine.fields.CachedReferenceField
.. autoclass:: mongoengine.fields.BinaryField .. autoclass:: mongoengine.fields.BinaryField
.. autoclass:: mongoengine.fields.FileField .. autoclass:: mongoengine.fields.FileField

View File

@ -2,9 +2,30 @@
Changelog Changelog
========= =========
Development dev
=========== ===
- (Fill this out as you fix issues and develop your features). - Subfield resolve error in generic_emdedded_document query #1651 #1652
- use each modifier only with $position #1673 #1675
- Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704
- Fix validation error instance in GenericEmbeddedDocumentField #1067
- Update cached fields when fields argument is given #1712
- Add a db parameter to register_connection for compatibility with connect
- Use insert_one, insert_many in Document.insert #1491
- Use new update_one, update_many on document/queryset update #1491
- Use insert_one, insert_many in Document.insert #1491
- Fix reload(fields) affect changed fields #1371
Changes in 0.15.0
=================
- Add LazyReferenceField and GenericLazyReferenceField to address #1230
Changes in 0.14.1
=================
- Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630
- Added support for the `$position` param in the `$push` operator #1566
- Fixed `DateTimeField` interpreting an empty string as today #1533
- Added a missing `__ne__` method to the `GridFSProxy` class #1632
- Fixed `BaseQuerySet._fields_to_db_fields` #1553
Changes in 0.14.0 Changes in 0.14.0
================= =================

View File

@ -22,7 +22,7 @@ objects** as class attributes to the document class::
class Page(Document): class Page(Document):
title = StringField(max_length=200, required=True) title = StringField(max_length=200, required=True)
date_modified = DateTimeField(default=datetime.datetime.now) date_modified = DateTimeField(default=datetime.datetime.utcnow)
As BSON (the binary format for storing data in mongodb) is order dependent, As BSON (the binary format for storing data in mongodb) is order dependent,
documents are serialized based on their field order. documents are serialized based on their field order.
@ -80,6 +80,7 @@ are as follows:
* :class:`~mongoengine.fields.FloatField` * :class:`~mongoengine.fields.FloatField`
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField` * :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
* :class:`~mongoengine.fields.GenericReferenceField` * :class:`~mongoengine.fields.GenericReferenceField`
* :class:`~mongoengine.fields.GenericLazyReferenceField`
* :class:`~mongoengine.fields.GeoPointField` * :class:`~mongoengine.fields.GeoPointField`
* :class:`~mongoengine.fields.ImageField` * :class:`~mongoengine.fields.ImageField`
* :class:`~mongoengine.fields.IntField` * :class:`~mongoengine.fields.IntField`
@ -87,6 +88,7 @@ are as follows:
* :class:`~mongoengine.fields.MapField` * :class:`~mongoengine.fields.MapField`
* :class:`~mongoengine.fields.ObjectIdField` * :class:`~mongoengine.fields.ObjectIdField`
* :class:`~mongoengine.fields.ReferenceField` * :class:`~mongoengine.fields.ReferenceField`
* :class:`~mongoengine.fields.LazyReferenceField`
* :class:`~mongoengine.fields.SequenceField` * :class:`~mongoengine.fields.SequenceField`
* :class:`~mongoengine.fields.SortedListField` * :class:`~mongoengine.fields.SortedListField`
* :class:`~mongoengine.fields.StringField` * :class:`~mongoengine.fields.StringField`
@ -224,7 +226,7 @@ store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate
user = ReferenceField(User) user = ReferenceField(User)
answers = DictField() answers = DictField()
survey_response = SurveyResponse(date=datetime.now(), user=request.user) survey_response = SurveyResponse(date=datetime.utcnow(), user=request.user)
response_form = ResponseForm(request.POST) response_form = ResponseForm(request.POST)
survey_response.answers = response_form.cleaned_data() survey_response.answers = response_form.cleaned_data()
survey_response.save() survey_response.save()
@ -526,8 +528,9 @@ There are a few top level defaults for all indexes that can be set::
meta = { meta = {
'index_options': {}, 'index_options': {},
'index_background': True, 'index_background': True,
'index_cls': False,
'auto_create_index': True,
'index_drop_dups': True, 'index_drop_dups': True,
'index_cls': False
} }
@ -540,6 +543,12 @@ There are a few top level defaults for all indexes that can be set::
:attr:`index_cls` (Optional) :attr:`index_cls` (Optional)
A way to turn off a specific index for _cls. A way to turn off a specific index for _cls.
:attr:`auto_create_index` (Optional)
When this is True (default), MongoEngine will ensure that the correct
indexes exist in MongoDB each time a command is run. This can be disabled
in systems where indexes are managed separately. Disabling this will improve
performance.
:attr:`index_drop_dups` (Optional) :attr:`index_drop_dups` (Optional)
Set the default value for if an index should drop duplicates Set the default value for if an index should drop duplicates
@ -618,7 +627,7 @@ collection after a given period. See the official
documentation for more information. A common usecase might be session data:: documentation for more information. A common usecase might be session data::
class Session(Document): class Session(Document):
created = DateTimeField(default=datetime.now) created = DateTimeField(default=datetime.utcnow)
meta = { meta = {
'indexes': [ 'indexes': [
{'fields': ['created'], 'expireAfterSeconds': 3600} {'fields': ['created'], 'expireAfterSeconds': 3600}

View File

@ -565,6 +565,15 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
>>> post.tags >>> post.tags
['database', 'mongodb'] ['database', 'mongodb']
From MongoDB version 2.6, push operator supports $position value which allows
to push values with index.
>>> post = BlogPost(title="Test", tags=["mongo"])
>>> post.save()
>>> post.update(push__tags__0=["database", "code"])
>>> post.reload()
>>> post.tags
['database', 'code', 'mongo']
.. note:: .. note::
Currently only top level lists are handled, future versions of mongodb / Currently only top level lists are handled, future versions of mongodb /
pymongo plan to support nested positional operators. See `The $ positional pymongo plan to support nested positional operators. See `The $ positional

View File

@ -43,10 +43,10 @@ Available signals include:
has taken place but before saving. has taken place but before saving.
`post_save` `post_save`
Called within :meth:`~mongoengine.Document.save` after all actions Called within :meth:`~mongoengine.Document.save` after most actions
(validation, insert/update, cascades, clearing dirty flags) have completed (validation, insert/update, and cascades, but not clearing dirty flags) have
successfully. Passed the additional boolean keyword argument `created` to completed successfully. Passed the additional boolean keyword argument
indicate if the save was an insert or an update. `created` to indicate if the save was an insert or an update.
`pre_delete` `pre_delete`
Called within :meth:`~mongoengine.Document.delete` prior to Called within :meth:`~mongoengine.Document.delete` prior to

View File

@ -48,4 +48,4 @@ Ordering by text score
:: ::
objects = News.objects.search('mongo').order_by('$text_score') objects = News.objects.search_text('mongo').order_by('$text_score')

View File

@ -86,7 +86,7 @@ of them stand out as particularly intuitive solutions.
Posts Posts
^^^^^ ^^^^^
Happily mongoDB *isn't* a relational database, so we're not going to do it that Happily MongoDB *isn't* a relational database, so we're not going to do it that
way. As it turns out, we can use MongoDB's schemaless nature to provide us with way. As it turns out, we can use MongoDB's schemaless nature to provide us with
a much nicer solution. We will store all of the posts in *one collection* and a much nicer solution. We will store all of the posts in *one collection* and
each post type will only store the fields it needs. If we later want to add each post type will only store the fields it needs. If we later want to add
@ -153,7 +153,7 @@ post. This works, but there is no real reason to be storing the comments
separately from their associated posts, other than to work around the separately from their associated posts, other than to work around the
relational model. Using MongoDB we can store the comments as a list of relational model. Using MongoDB we can store the comments as a list of
*embedded documents* directly on a post document. An embedded document should *embedded documents* directly on a post document. An embedded document should
be treated no differently that a regular document; it just doesn't have its own be treated no differently than a regular document; it just doesn't have its own
collection in the database. Using MongoEngine, we can define the structure of collection in the database. Using MongoEngine, we can define the structure of
embedded documents, along with utility methods, in exactly the same way we do embedded documents, along with utility methods, in exactly the same way we do
with regular documents:: with regular documents::

View File

@ -23,7 +23,7 @@ __all__ = (list(document.__all__) + list(fields.__all__) +
list(signals.__all__) + list(errors.__all__)) list(signals.__all__) + list(errors.__all__))
VERSION = (0, 14, 0) VERSION = (0, 15, 0)
def get_version(): def get_version():

View File

@ -15,7 +15,7 @@ __all__ = (
'UPDATE_OPERATORS', '_document_registry', 'get_document', 'UPDATE_OPERATORS', '_document_registry', 'get_document',
# datastructures # datastructures
'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference',
# document # document
'BaseDocument', 'BaseDocument',

View File

@ -3,9 +3,10 @@ from mongoengine.errors import NotRegistered
__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'mul',
'push_all', 'pull', 'pull_all', 'add_to_set', 'pop', 'push', 'push_all', 'pull',
'set_on_insert', 'min', 'max', 'rename']) 'pull_all', 'add_to_set', 'set_on_insert',
'min', 'max', 'rename'])
_document_registry = {} _document_registry = {}

View File

@ -1,12 +1,13 @@
import itertools import itertools
import weakref import weakref
from bson import DBRef
import six import six
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList') __all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference')
class BaseDict(dict): class BaseDict(dict):
@ -127,7 +128,7 @@ class BaseList(list):
return value return value
def __iter__(self): def __iter__(self):
for i in xrange(self.__len__()): for i in six.moves.range(self.__len__()):
yield self[i] yield self[i]
def __setitem__(self, key, value, *args, **kwargs): def __setitem__(self, key, value, *args, **kwargs):
@ -350,7 +351,8 @@ class EmbeddedDocumentList(BaseList):
def update(self, **update): def update(self, **update):
""" """
Updates the embedded documents with the given update values. Updates the embedded documents with the given replacement values. This
function does not support mongoDB update operators such as ``inc__``.
.. note:: .. note::
The embedded document changes are not automatically saved The embedded document changes are not automatically saved
@ -447,40 +449,40 @@ class StrictDict(object):
return cls._classes[allowed_keys] return cls._classes[allowed_keys]
class SemiStrictDict(StrictDict): class LazyReference(DBRef):
__slots__ = ('_extras', ) __slots__ = ('_cached_doc', 'passthrough', 'document_type')
_classes = {}
def __getattr__(self, attr): def fetch(self, force=False):
try: if not self._cached_doc or force:
super(SemiStrictDict, self).__getattr__(attr) self._cached_doc = self.document_type.objects.get(pk=self.pk)
except AttributeError: if not self._cached_doc:
try: raise DoesNotExist('Trying to dereference unknown document %s' % (self))
return self.__getattribute__('_extras')[attr] return self._cached_doc
except KeyError as e:
raise AttributeError(e)
def __setattr__(self, attr, value): @property
try: def pk(self):
super(SemiStrictDict, self).__setattr__(attr, value) return self.id
except AttributeError:
try:
self._extras[attr] = value
except AttributeError:
self._extras = {attr: value}
def __delattr__(self, attr): def __init__(self, document_type, pk, cached_doc=None, passthrough=False):
try: self.document_type = document_type
super(SemiStrictDict, self).__delattr__(attr) self._cached_doc = cached_doc
except AttributeError: self.passthrough = passthrough
try: super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk)
del self._extras[attr]
except KeyError as e:
raise AttributeError(e)
def __iter__(self): def __getitem__(self, name):
if not self.passthrough:
raise KeyError()
document = self.fetch()
return document[name]
def __getattr__(self, name):
if not object.__getattribute__(self, 'passthrough'):
raise AttributeError()
document = self.fetch()
try: try:
extras_iter = iter(self.__getattribute__('_extras')) return document[name]
except AttributeError: except KeyError:
extras_iter = () raise AttributeError()
return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter)
def __repr__(self):
return "<LazyReference(%s, %r)>" % (self.document_type, self.pk)

View File

@ -13,13 +13,14 @@ from mongoengine import signals
from mongoengine.base.common import get_document from mongoengine.base.common import get_document
from mongoengine.base.datastructures import (BaseDict, BaseList, from mongoengine.base.datastructures import (BaseDict, BaseList,
EmbeddedDocumentList, EmbeddedDocumentList,
SemiStrictDict, StrictDict) LazyReference,
StrictDict)
from mongoengine.base.fields import ComplexBaseField from mongoengine.base.fields import ComplexBaseField
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError,
LookUpError, OperationError, ValidationError) LookUpError, OperationError, ValidationError)
__all__ = ('BaseDocument',) __all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
NON_FIELD_ERRORS = '__all__' NON_FIELD_ERRORS = '__all__'
@ -79,8 +80,7 @@ class BaseDocument(object):
if self.STRICT and not self._dynamic: if self.STRICT and not self._dynamic:
self._data = StrictDict.create(allowed_keys=self._fields_ordered)() self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
else: else:
self._data = SemiStrictDict.create( self._data = {}
allowed_keys=self._fields_ordered)()
self._dynamic_fields = SON() self._dynamic_fields = SON()
@ -147,7 +147,7 @@ class BaseDocument(object):
if not hasattr(self, name) and not name.startswith('_'): if not hasattr(self, name) and not name.startswith('_'):
DynamicField = _import_class('DynamicField') DynamicField = _import_class('DynamicField')
field = DynamicField(db_field=name) field = DynamicField(db_field=name, null=True)
field.name = name field.name = name
self._dynamic_fields[name] = field self._dynamic_fields[name] = field
self._fields_ordered += (name,) self._fields_ordered += (name,)
@ -337,7 +337,7 @@ class BaseDocument(object):
value = field.generate() value = field.generate()
self._data[field_name] = value self._data[field_name] = value
if value is not None: if (value is not None) or (field.null):
if use_db_field: if use_db_field:
data[field.db_field] = value data[field.db_field] = value
else: else:
@ -489,7 +489,7 @@ class BaseDocument(object):
else: else:
data = getattr(data, part, None) data = getattr(data, part, None)
if hasattr(data, '_changed_fields'): if not isinstance(data, LazyReference) and hasattr(data, '_changed_fields'):
if getattr(data, '_is_document', False): if getattr(data, '_is_document', False):
continue continue

View File

@ -28,7 +28,7 @@ _connections = {}
_dbs = {} _dbs = {}
def register_connection(alias, name=None, host=None, port=None, def register_connection(alias, db=None, name=None, host=None, port=None,
read_preference=READ_PREFERENCE, read_preference=READ_PREFERENCE,
username=None, password=None, username=None, password=None,
authentication_source=None, authentication_source=None,
@ -39,6 +39,7 @@ def register_connection(alias, name=None, host=None, port=None,
:param alias: the name that will be used to refer to this connection :param alias: the name that will be used to refer to this connection
throughout MongoEngine throughout MongoEngine
:param name: the name of the specific database to use :param name: the name of the specific database to use
:param db: the name of the database to use, for compatibility with connect
:param host: the host name of the :program:`mongod` instance to connect to :param host: the host name of the :program:`mongod` instance to connect to
:param port: the port that the :program:`mongod` instance is running on :param port: the port that the :program:`mongod` instance is running on
:param read_preference: The read preference for the collection :param read_preference: The read preference for the collection
@ -58,7 +59,7 @@ def register_connection(alias, name=None, host=None, port=None,
.. versionchanged:: 0.10.6 - added mongomock support .. versionchanged:: 0.10.6 - added mongomock support
""" """
conn_settings = { conn_settings = {
'name': name or 'test', 'name': name or db or 'test',
'host': host or 'localhost', 'host': host or 'localhost',
'port': port or 27017, 'port': port or 27017,
'read_preference': read_preference, 'read_preference': read_preference,
@ -146,13 +147,14 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
raise MongoEngineConnectionError(msg) raise MongoEngineConnectionError(msg)
def _clean_settings(settings_dict): def _clean_settings(settings_dict):
irrelevant_fields = set([ # set literal more efficient than calling set function
'name', 'username', 'password', 'authentication_source', irrelevant_fields_set = {
'authentication_mechanism' 'name', 'username', 'password',
]) 'authentication_source', 'authentication_mechanism'
}
return { return {
k: v for k, v in settings_dict.items() k: v for k, v in settings_dict.items()
if k not in irrelevant_fields if k not in irrelevant_fields_set
} }
# Retrieve a copy of the connection settings associated with the requested # Retrieve a copy of the connection settings associated with the requested

View File

@ -1,9 +1,11 @@
from contextlib import contextmanager
from pymongo.write_concern import WriteConcern
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
__all__ = ('switch_db', 'switch_collection', 'no_dereference', __all__ = ('switch_db', 'switch_collection', 'no_dereference',
'no_sub_classes', 'query_counter') 'no_sub_classes', 'query_counter', 'set_write_concern')
class switch_db(object): class switch_db(object):
@ -215,3 +217,10 @@ class query_counter(object):
count = self.db.system.profile.find(ignore_query).count() - self.counter count = self.db.system.profile.find(ignore_query).count() - self.counter
self.counter += 1 self.counter += 1
return count return count
@contextmanager
def set_write_concern(collection, write_concerns):
combined_concerns = dict(collection.write_concern.document.items())
combined_concerns.update(write_concerns)
yield collection.with_options(write_concern=WriteConcern(**combined_concerns))

View File

@ -3,6 +3,7 @@ import six
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
TopLevelDocumentMetaclass, get_document) TopLevelDocumentMetaclass, get_document)
from mongoengine.base.datastructures import LazyReference
from mongoengine.connection import get_db from mongoengine.connection import get_db
from mongoengine.document import Document, EmbeddedDocument from mongoengine.document import Document, EmbeddedDocument
from mongoengine.fields import DictField, ListField, MapField, ReferenceField from mongoengine.fields import DictField, ListField, MapField, ReferenceField
@ -99,7 +100,10 @@ class DeReference(object):
if isinstance(item, (Document, EmbeddedDocument)): if isinstance(item, (Document, EmbeddedDocument)):
for field_name, field in item._fields.iteritems(): for field_name, field in item._fields.iteritems():
v = item._data.get(field_name, None) v = item._data.get(field_name, None)
if isinstance(v, DBRef): if isinstance(v, LazyReference):
# LazyReference inherits DBRef but should not be dereferenced here !
continue
elif isinstance(v, DBRef):
reference_map.setdefault(field.document_type, set()).add(v.id) reference_map.setdefault(field.document_type, set()).add(v.id)
elif isinstance(v, (dict, SON)) and '_ref' in v: elif isinstance(v, (dict, SON)) and '_ref' in v:
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
@ -110,6 +114,9 @@ class DeReference(object):
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
key = field_cls key = field_cls
reference_map.setdefault(key, set()).update(refs) reference_map.setdefault(key, set()).update(refs)
elif isinstance(item, LazyReference):
# LazyReference inherits DBRef but should not be dereferenced here !
continue
elif isinstance(item, DBRef): elif isinstance(item, DBRef):
reference_map.setdefault(item.collection, set()).add(item.id) reference_map.setdefault(item.collection, set()).add(item.id)
elif isinstance(item, (dict, SON)) and '_ref' in item: elif isinstance(item, (dict, SON)) and '_ref' in item:
@ -230,7 +237,7 @@ class DeReference(object):
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
item_name = '%s.%s' % (name, k) if name else name item_name = '%s.%s' % (name, k) if name else name
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
elif hasattr(v, 'id'): elif isinstance(v, DBRef) and hasattr(v, 'id'):
data[k] = self.object_map.get((v.collection, v.id), v) data[k] = self.object_map.get((v.collection, v.id), v)
if instance and name: if instance and name:

View File

@ -280,6 +280,9 @@ class Document(BaseDocument):
elif query[id_field] != self.pk: elif query[id_field] != self.pk:
raise InvalidQueryError('Invalid document modify query: it must modify only this document.') raise InvalidQueryError('Invalid document modify query: it must modify only this document.')
# Need to add shard key to query, or you get an error
query.update(self._object_key)
updated = self._qs(**query).modify(new=True, **update) updated = self._qs(**query).modify(new=True, **update)
if updated is None: if updated is None:
return False return False
@ -576,7 +579,7 @@ class Document(BaseDocument):
"""Delete the :class:`~mongoengine.Document` from the database. This """Delete the :class:`~mongoengine.Document` from the database. This
will only take effect if the document has been previously saved. will only take effect if the document has been previously saved.
:parm signal_kwargs: (optional) kwargs dictionary to be passed to :param signal_kwargs: (optional) kwargs dictionary to be passed to
the signal calls. the signal calls.
:param write_concern: Extra keyword arguments are passed down which :param write_concern: Extra keyword arguments are passed down which
will be used as options for the resultant will be used as options for the resultant
@ -702,7 +705,6 @@ class Document(BaseDocument):
obj = obj[0] obj = obj[0]
else: else:
raise self.DoesNotExist('Document does not exist') raise self.DoesNotExist('Document does not exist')
for field in obj._data: for field in obj._data:
if not fields or field in fields: if not fields or field in fields:
try: try:
@ -718,7 +720,9 @@ class Document(BaseDocument):
# i.e. obj.update(unset__field=1) followed by obj.reload() # i.e. obj.update(unset__field=1) followed by obj.reload()
delattr(self, field) delattr(self, field)
self._changed_fields = obj._changed_fields self._changed_fields = list(
set(self._changed_fields) - set(fields)
) if fields else obj._changed_fields
self._created = False self._created = False
return self return self
@ -964,8 +968,16 @@ class Document(BaseDocument):
""" """
required = cls.list_indexes() required = cls.list_indexes()
existing = [info['key']
for info in cls._get_collection().index_information().values()] existing = []
for info in cls._get_collection().index_information().values():
if '_fts' in info['key'][0]:
index_type = info['key'][0][1]
text_index_fields = info.get('weights').keys()
existing.append(
[(key, index_type) for key in text_index_fields])
else:
existing.append(info['key'])
missing = [index for index in required if index not in existing] missing = [index for index in required if index not in existing]
extra = [index for index in existing if index not in required] extra = [index for index in existing if index not in required]
@ -1010,6 +1022,7 @@ class DynamicDocument(Document):
field_name = args[0] field_name = args[0]
if field_name in self._dynamic_fields: if field_name in self._dynamic_fields:
setattr(self, field_name, None) setattr(self, field_name, None)
self._dynamic_fields[field_name].null = False
else: else:
super(DynamicDocument, self).__delattr__(*args, **kwargs) super(DynamicDocument, self).__delattr__(*args, **kwargs)

View File

@ -26,7 +26,9 @@ except ImportError:
Int64 = long Int64 = long
from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField, from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField,
GeoJsonBaseField, ObjectIdField, get_document) GeoJsonBaseField, LazyReference, ObjectIdField,
get_document)
from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
from mongoengine.document import Document, EmbeddedDocument from mongoengine.document import Document, EmbeddedDocument
from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError
@ -46,6 +48,7 @@ __all__ = (
'GenericEmbeddedDocumentField', 'DynamicField', 'ListField', 'GenericEmbeddedDocumentField', 'DynamicField', 'ListField',
'SortedListField', 'EmbeddedDocumentListField', 'DictField', 'SortedListField', 'EmbeddedDocumentListField', 'DictField',
'MapField', 'ReferenceField', 'CachedReferenceField', 'MapField', 'ReferenceField', 'CachedReferenceField',
'LazyReferenceField', 'GenericLazyReferenceField',
'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy', 'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy',
'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField', 'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField',
'GeoPointField', 'PointField', 'LineStringField', 'PolygonField', 'GeoPointField', 'PointField', 'LineStringField', 'PolygonField',
@ -611,6 +614,7 @@ class EmbeddedDocumentField(BaseField):
""" """
def __init__(self, document_type, **kwargs): def __init__(self, document_type, **kwargs):
# XXX ValidationError raised outside of the "validate" method.
if not ( if not (
isinstance(document_type, six.string_types) or isinstance(document_type, six.string_types) or
issubclass(document_type, EmbeddedDocument) issubclass(document_type, EmbeddedDocument)
@ -686,16 +690,28 @@ class GenericEmbeddedDocumentField(BaseField):
return value return value
def validate(self, value, clean=True): def validate(self, value, clean=True):
if self.choices and isinstance(value, SON):
for choice in self.choices:
if value['_cls'] == choice._class_name:
return True
if not isinstance(value, EmbeddedDocument): if not isinstance(value, EmbeddedDocument):
self.error('Invalid embedded document instance provided to an ' self.error('Invalid embedded document instance provided to an '
'GenericEmbeddedDocumentField') 'GenericEmbeddedDocumentField')
value.validate(clean=clean) value.validate(clean=clean)
def lookup_member(self, member_name):
if self.choices:
for choice in self.choices:
field = choice._fields.get(member_name)
if field:
return field
return None
def to_mongo(self, document, use_db_field=True, fields=None): def to_mongo(self, document, use_db_field=True, fields=None):
if document is None: if document is None:
return None return None
data = document.to_mongo(use_db_field, fields) data = document.to_mongo(use_db_field, fields)
if '_cls' not in data: if '_cls' not in data:
data['_cls'] = document._class_name data['_cls'] = document._class_name
@ -779,6 +795,17 @@ class ListField(ComplexBaseField):
kwargs.setdefault('default', lambda: []) kwargs.setdefault('default', lambda: [])
super(ListField, self).__init__(**kwargs) super(ListField, self).__init__(**kwargs)
def __get__(self, instance, owner):
if instance is None:
# Document class being used rather than a document object
return self
value = instance._data.get(self.name)
LazyReferenceField = _import_class('LazyReferenceField')
GenericLazyReferenceField = _import_class('GenericLazyReferenceField')
if isinstance(self.field, (LazyReferenceField, GenericLazyReferenceField)) and value:
instance._data[self.name] = [self.field.build_lazyref(x) for x in value]
return super(ListField, self).__get__(instance, owner)
def validate(self, value): def validate(self, value):
"""Make sure that a list of valid fields is being used.""" """Make sure that a list of valid fields is being used."""
if (not isinstance(value, (list, tuple, QuerySet)) or if (not isinstance(value, (list, tuple, QuerySet)) or
@ -893,8 +920,11 @@ class DictField(ComplexBaseField):
self.field = field self.field = field
self._auto_dereference = False self._auto_dereference = False
self.basecls = basecls or BaseField self.basecls = basecls or BaseField
# XXX ValidationError raised outside of the "validate" method.
if not issubclass(self.basecls, BaseField): if not issubclass(self.basecls, BaseField):
self.error('DictField only accepts dict values') self.error('DictField only accepts dict values')
kwargs.setdefault('default', lambda: {}) kwargs.setdefault('default', lambda: {})
super(DictField, self).__init__(*args, **kwargs) super(DictField, self).__init__(*args, **kwargs)
@ -943,6 +973,7 @@ class MapField(DictField):
""" """
def __init__(self, field=None, *args, **kwargs): def __init__(self, field=None, *args, **kwargs):
# XXX ValidationError raised outside of the "validate" method.
if not isinstance(field, BaseField): if not isinstance(field, BaseField):
self.error('Argument to MapField constructor must be a valid ' self.error('Argument to MapField constructor must be a valid '
'field') 'field')
@ -953,6 +984,15 @@ class ReferenceField(BaseField):
"""A reference to a document that will be automatically dereferenced on """A reference to a document that will be automatically dereferenced on
access (lazily). access (lazily).
Note this means you will get a database I/O access everytime you access
this field. This is necessary because the field returns a :class:`~mongoengine.Document`
which precise type can depend of the value of the `_cls` field present in the
document in database.
In short, using this type of field can lead to poor performances (especially
if you access this field only to retrieve it `pk` field which is already
known before dereference). To solve this you should consider using the
:class:`~mongoengine.fields.LazyReferenceField`.
Use the `reverse_delete_rule` to handle what should happen if the document Use the `reverse_delete_rule` to handle what should happen if the document
the field is referencing is deleted. EmbeddedDocuments, DictFields and the field is referencing is deleted. EmbeddedDocuments, DictFields and
MapFields does not support reverse_delete_rule and an `InvalidDocumentError` MapFields does not support reverse_delete_rule and an `InvalidDocumentError`
@ -993,6 +1033,7 @@ class ReferenceField(BaseField):
A reference to an abstract document type is always stored as a A reference to an abstract document type is always stored as a
:class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`. :class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`.
""" """
# XXX ValidationError raised outside of the "validate" method.
if ( if (
not isinstance(document_type, six.string_types) and not isinstance(document_type, six.string_types) and
not issubclass(document_type, Document) not issubclass(document_type, Document)
@ -1047,6 +1088,8 @@ class ReferenceField(BaseField):
if isinstance(document, Document): if isinstance(document, Document):
# We need the id from the saved object to create the DBRef # We need the id from the saved object to create the DBRef
id_ = document.pk id_ = document.pk
# XXX ValidationError raised outside of the "validate" method.
if id_ is None: if id_ is None:
self.error('You can only reference documents once they have' self.error('You can only reference documents once they have'
' been saved to the database') ' been saved to the database')
@ -1086,19 +1129,21 @@ class ReferenceField(BaseField):
return self.to_mongo(value) return self.to_mongo(value)
def validate(self, value): def validate(self, value):
if not isinstance(value, (self.document_type, LazyReference, DBRef, ObjectId)):
if not isinstance(value, (self.document_type, DBRef, ObjectId)): self.error('A ReferenceField only accepts DBRef, LazyReference, ObjectId or documents')
self.error('A ReferenceField only accepts DBRef, ObjectId or documents')
if isinstance(value, Document) and value.id is None: if isinstance(value, Document) and value.id is None:
self.error('You can only reference documents once they have been ' self.error('You can only reference documents once they have been '
'saved to the database') 'saved to the database')
if self.document_type._meta.get('abstract') and \ if (
not isinstance(value, self.document_type): self.document_type._meta.get('abstract') and
not isinstance(value, self.document_type)
):
self.error( self.error(
'%s is not an instance of abstract reference type %s' % ( '%s is not an instance of abstract reference type %s' % (
self.document_type._class_name) self.document_type._class_name
)
) )
def lookup_member(self, member_name): def lookup_member(self, member_name):
@ -1121,6 +1166,7 @@ class CachedReferenceField(BaseField):
if fields is None: if fields is None:
fields = [] fields = []
# XXX ValidationError raised outside of the "validate" method.
if ( if (
not isinstance(document_type, six.string_types) and not isinstance(document_type, six.string_types) and
not issubclass(document_type, Document) not issubclass(document_type, Document)
@ -1195,6 +1241,7 @@ class CachedReferenceField(BaseField):
id_field_name = self.document_type._meta['id_field'] id_field_name = self.document_type._meta['id_field']
id_field = self.document_type._fields[id_field_name] id_field = self.document_type._fields[id_field_name]
# XXX ValidationError raised outside of the "validate" method.
if isinstance(document, Document): if isinstance(document, Document):
# We need the id from the saved object to create the DBRef # We need the id from the saved object to create the DBRef
id_ = document.pk id_ = document.pk
@ -1203,7 +1250,6 @@ class CachedReferenceField(BaseField):
' been saved to the database') ' been saved to the database')
else: else:
self.error('Only accept a document object') self.error('Only accept a document object')
# TODO: should raise here or will fail next statement
value = SON(( value = SON((
('_id', id_field.to_mongo(id_)), ('_id', id_field.to_mongo(id_)),
@ -1221,16 +1267,20 @@ class CachedReferenceField(BaseField):
if value is None: if value is None:
return None return None
# XXX ValidationError raised outside of the "validate" method.
if isinstance(value, Document): if isinstance(value, Document):
if value.pk is None: if value.pk is None:
self.error('You can only reference documents once they have' self.error('You can only reference documents once they have'
' been saved to the database') ' been saved to the database')
return {'_id': value.pk} value_dict = {'_id': value.pk}
for field in self.fields:
value_dict.update({field: value[field]})
return value_dict
raise NotImplementedError raise NotImplementedError
def validate(self, value): def validate(self, value):
if not isinstance(value, self.document_type): if not isinstance(value, self.document_type):
self.error('A CachedReferenceField only accepts documents') self.error('A CachedReferenceField only accepts documents')
@ -1263,6 +1313,12 @@ class GenericReferenceField(BaseField):
"""A reference to *any* :class:`~mongoengine.document.Document` subclass """A reference to *any* :class:`~mongoengine.document.Document` subclass
that will be automatically dereferenced on access (lazily). that will be automatically dereferenced on access (lazily).
Note this field works the same way as :class:`~mongoengine.document.ReferenceField`,
doing database I/O access the first time it is accessed (even if it's to access
it ``pk`` or ``id`` field).
To solve this you should consider using the
:class:`~mongoengine.fields.GenericLazyReferenceField`.
.. note :: .. note ::
* Any documents used as a generic reference must be registered in the * Any documents used as a generic reference must be registered in the
document registry. Importing the model will automatically register document registry. Importing the model will automatically register
@ -1285,6 +1341,8 @@ class GenericReferenceField(BaseField):
elif isinstance(choice, type) and issubclass(choice, Document): elif isinstance(choice, type) and issubclass(choice, Document):
self.choices.append(choice._class_name) self.choices.append(choice._class_name)
else: else:
# XXX ValidationError raised outside of the "validate"
# method.
self.error('Invalid choices provided: must be a list of' self.error('Invalid choices provided: must be a list of'
'Document subclasses and/or six.string_typess') 'Document subclasses and/or six.string_typess')
@ -1348,6 +1406,7 @@ class GenericReferenceField(BaseField):
# We need the id from the saved object to create the DBRef # We need the id from the saved object to create the DBRef
id_ = document.id id_ = document.id
if id_ is None: if id_ is None:
# XXX ValidationError raised outside of the "validate" method.
self.error('You can only reference documents once they have' self.error('You can only reference documents once they have'
' been saved to the database') ' been saved to the database')
else: else:
@ -1465,6 +1524,9 @@ class GridFSProxy(object):
else: else:
return False return False
def __ne__(self, other):
return not self == other
@property @property
def fs(self): def fs(self):
if not self._fs: if not self._fs:
@ -2138,3 +2200,201 @@ class MultiPolygonField(GeoJsonBaseField):
.. versionadded:: 0.9 .. versionadded:: 0.9
""" """
_type = 'MultiPolygon' _type = 'MultiPolygon'
class LazyReferenceField(BaseField):
"""A really lazy reference to a document.
Unlike the :class:`~mongoengine.fields.ReferenceField` it will
**not** be automatically (lazily) dereferenced on access.
Instead, access will return a :class:`~mongoengine.base.LazyReference` class
instance, allowing access to `pk` or manual dereference by using
``fetch()`` method.
.. versionadded:: 0.15
"""
def __init__(self, document_type, passthrough=False, dbref=False,
reverse_delete_rule=DO_NOTHING, **kwargs):
"""Initialises the Reference Field.
:param dbref: Store the reference as :class:`~pymongo.dbref.DBRef`
or as the :class:`~pymongo.objectid.ObjectId`.id .
:param reverse_delete_rule: Determines what to do when the referring
object is deleted
:param passthrough: When trying to access unknown fields, the
:class:`~mongoengine.base.datastructure.LazyReference` instance will
automatically call `fetch()` and try to retrive the field on the fetched
document. Note this only work getting field (not setting or deleting).
"""
# XXX ValidationError raised outside of the "validate" method.
if (
not isinstance(document_type, six.string_types) and
not issubclass(document_type, Document)
):
self.error('Argument to LazyReferenceField constructor must be a '
'document class or a string')
self.dbref = dbref
self.passthrough = passthrough
self.document_type_obj = document_type
self.reverse_delete_rule = reverse_delete_rule
super(LazyReferenceField, self).__init__(**kwargs)
@property
def document_type(self):
if isinstance(self.document_type_obj, six.string_types):
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
self.document_type_obj = self.owner_document
else:
self.document_type_obj = get_document(self.document_type_obj)
return self.document_type_obj
def build_lazyref(self, value):
if isinstance(value, LazyReference):
if value.passthrough != self.passthrough:
value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough)
elif value is not None:
if isinstance(value, self.document_type):
value = LazyReference(self.document_type, value.pk, passthrough=self.passthrough)
elif isinstance(value, DBRef):
value = LazyReference(self.document_type, value.id, passthrough=self.passthrough)
else:
# value is the primary key of the referenced document
value = LazyReference(self.document_type, value, passthrough=self.passthrough)
return value
def __get__(self, instance, owner):
"""Descriptor to allow lazy dereferencing."""
if instance is None:
# Document class being used rather than a document object
return self
value = self.build_lazyref(instance._data.get(self.name))
if value:
instance._data[self.name] = value
return super(LazyReferenceField, self).__get__(instance, owner)
def to_mongo(self, value):
if isinstance(value, LazyReference):
pk = value.pk
elif isinstance(value, self.document_type):
pk = value.pk
elif isinstance(value, DBRef):
pk = value.id
else:
# value is the primary key of the referenced document
pk = value
id_field_name = self.document_type._meta['id_field']
id_field = self.document_type._fields[id_field_name]
pk = id_field.to_mongo(pk)
if self.dbref:
return DBRef(self.document_type._get_collection_name(), pk)
else:
return pk
def validate(self, value):
if isinstance(value, LazyReference):
if value.collection != self.document_type._get_collection_name():
self.error('Reference must be on a `%s` document.' % self.document_type)
pk = value.pk
elif isinstance(value, self.document_type):
pk = value.pk
elif isinstance(value, DBRef):
# TODO: check collection ?
collection = self.document_type._get_collection_name()
if value.collection != collection:
self.error("DBRef on bad collection (must be on `%s`)" % collection)
pk = value.id
else:
# value is the primary key of the referenced document
id_field_name = self.document_type._meta['id_field']
id_field = getattr(self.document_type, id_field_name)
pk = value
try:
id_field.validate(pk)
except ValidationError:
self.error(
"value should be `{0}` document, LazyReference or DBRef on `{0}` "
"or `{0}`'s primary key (i.e. `{1}`)".format(
self.document_type.__name__, type(id_field).__name__))
if pk is None:
self.error('You can only reference documents once they have been '
'saved to the database')
def prepare_query_value(self, op, value):
if value is None:
return None
super(LazyReferenceField, self).prepare_query_value(op, value)
return self.to_mongo(value)
def lookup_member(self, member_name):
return self.document_type._fields.get(member_name)
class GenericLazyReferenceField(GenericReferenceField):
"""A reference to *any* :class:`~mongoengine.document.Document` subclass.
Unlike the :class:`~mongoengine.fields.GenericReferenceField` it will
**not** be automatically (lazily) dereferenced on access.
Instead, access will return a :class:`~mongoengine.base.LazyReference` class
instance, allowing access to `pk` or manual dereference by using
``fetch()`` method.
.. note ::
* Any documents used as a generic reference must be registered in the
document registry. Importing the model will automatically register
it.
* You can use the choices param to limit the acceptable Document types
.. versionadded:: 0.15
"""
def __init__(self, *args, **kwargs):
self.passthrough = kwargs.pop('passthrough', False)
super(GenericLazyReferenceField, self).__init__(*args, **kwargs)
def _validate_choices(self, value):
if isinstance(value, LazyReference):
value = value.document_type._class_name
super(GenericLazyReferenceField, self)._validate_choices(value)
def build_lazyref(self, value):
if isinstance(value, LazyReference):
if value.passthrough != self.passthrough:
value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough)
elif value is not None:
if isinstance(value, (dict, SON)):
value = LazyReference(get_document(value['_cls']), value['_ref'].id, passthrough=self.passthrough)
elif isinstance(value, Document):
value = LazyReference(type(value), value.pk, passthrough=self.passthrough)
return value
def __get__(self, instance, owner):
if instance is None:
return self
value = self.build_lazyref(instance._data.get(self.name))
if value:
instance._data[self.name] = value
return super(GenericLazyReferenceField, self).__get__(instance, owner)
def validate(self, value):
if isinstance(value, LazyReference) and value.pk is None:
self.error('You can only reference documents once they have been'
' saved to the database')
return super(GenericLazyReferenceField, self).validate(value)
def to_mongo(self, document):
if document is None:
return None
if isinstance(document, LazyReference):
return SON((
('_cls', document.document_type._class_name),
('_ref', DBRef(document.document_type._get_collection_name(), document.pk))
))
else:
return super(GenericLazyReferenceField, self).to_mongo(document)

View File

@ -18,7 +18,7 @@ from mongoengine import signals
from mongoengine.base import get_document from mongoengine.base import get_document
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.connection import get_db from mongoengine.connection import get_db
from mongoengine.context_managers import switch_db from mongoengine.context_managers import set_write_concern, switch_db
from mongoengine.errors import (InvalidQueryError, LookUpError, from mongoengine.errors import (InvalidQueryError, LookUpError,
NotUniqueError, OperationError) NotUniqueError, OperationError)
from mongoengine.python_support import IS_PYMONGO_3 from mongoengine.python_support import IS_PYMONGO_3
@ -350,11 +350,24 @@ class BaseQuerySet(object):
documents=docs, **signal_kwargs) documents=docs, **signal_kwargs)
raw = [doc.to_mongo() for doc in docs] raw = [doc.to_mongo() for doc in docs]
with set_write_concern(self._collection, write_concern) as collection:
insert_func = collection.insert_many
if return_one:
raw = raw[0]
insert_func = collection.insert_one
try: try:
ids = self._collection.insert(raw, **write_concern) inserted_result = insert_func(raw)
ids = return_one and [inserted_result.inserted_id] or inserted_result.inserted_ids
except pymongo.errors.DuplicateKeyError as err: except pymongo.errors.DuplicateKeyError as err:
message = 'Could not save document (%s)' message = 'Could not save document (%s)'
raise NotUniqueError(message % six.text_type(err)) raise NotUniqueError(message % six.text_type(err))
except pymongo.errors.BulkWriteError as err:
# inserting documents that already have an _id field will
# give huge performance debt or raise
message = u'Document must not have _id value before bulk write (%s)'
raise NotUniqueError(message % six.text_type(err))
except pymongo.errors.OperationFailure as err: except pymongo.errors.OperationFailure as err:
message = 'Could not save document (%s)' message = 'Could not save document (%s)'
if re.match('^E1100[01] duplicate key', six.text_type(err)): if re.match('^E1100[01] duplicate key', six.text_type(err)):
@ -368,7 +381,6 @@ class BaseQuerySet(object):
signals.post_bulk_insert.send( signals.post_bulk_insert.send(
self._document, documents=docs, loaded=False, **signal_kwargs) self._document, documents=docs, loaded=False, **signal_kwargs)
return return_one and ids[0] or ids return return_one and ids[0] or ids
documents = self.in_bulk(ids) documents = self.in_bulk(ids)
results = [] results = []
for obj_id in ids: for obj_id in ids:
@ -486,8 +498,9 @@ class BaseQuerySet(object):
``save(..., write_concern={w: 2, fsync: True}, ...)`` will ``save(..., write_concern={w: 2, fsync: True}, ...)`` will
wait until at least two servers have recorded the write and wait until at least two servers have recorded the write and
will force an fsync on the primary server. will force an fsync on the primary server.
:param full_result: Return the full result rather than just the number :param full_result: Return the full result dictionary rather than just the number
updated. updated, e.g. return
``{'n': 2, 'nModified': 2, 'ok': 1.0, 'updatedExisting': True}``.
:param update: Django-style update keyword arguments :param update: Django-style update keyword arguments
.. versionadded:: 0.2 .. versionadded:: 0.2
@ -510,12 +523,15 @@ class BaseQuerySet(object):
else: else:
update['$set'] = {'_cls': queryset._document._class_name} update['$set'] = {'_cls': queryset._document._class_name}
try: try:
result = queryset._collection.update(query, update, multi=multi, with set_write_concern(queryset._collection, write_concern) as collection:
upsert=upsert, **write_concern) update_func = collection.update_one
if multi:
update_func = collection.update_many
result = update_func(query, update, upsert=upsert)
if full_result: if full_result:
return result return result
elif result: elif result.raw_result:
return result['n'] return result.raw_result['n']
except pymongo.errors.DuplicateKeyError as err: except pymongo.errors.DuplicateKeyError as err:
raise NotUniqueError(u'Update failed (%s)' % six.text_type(err)) raise NotUniqueError(u'Update failed (%s)' % six.text_type(err))
except pymongo.errors.OperationFailure as err: except pymongo.errors.OperationFailure as err:
@ -544,10 +560,10 @@ class BaseQuerySet(object):
write_concern=write_concern, write_concern=write_concern,
full_result=True, **update) full_result=True, **update)
if atomic_update['updatedExisting']: if atomic_update.raw_result['updatedExisting']:
document = self.get() document = self.get()
else: else:
document = self._document.objects.with_id(atomic_update['upserted']) document = self._document.objects.with_id(atomic_update.upserted_id)
return document return document
def update_one(self, upsert=False, write_concern=None, **update): def update_one(self, upsert=False, write_concern=None, **update):
@ -1578,6 +1594,9 @@ class BaseQuerySet(object):
if self._batch_size is not None: if self._batch_size is not None:
self._cursor_obj.batch_size(self._batch_size) self._cursor_obj.batch_size(self._batch_size)
if self._comment is not None:
self._cursor_obj.comment(self._comment)
return self._cursor_obj return self._cursor_obj
def __deepcopy__(self, memo): def __deepcopy__(self, memo):

View File

@ -1,3 +1,5 @@
import six
from mongoengine.errors import OperationError from mongoengine.errors import OperationError
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING, from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
NULLIFY, PULL) NULLIFY, PULL)
@ -90,7 +92,7 @@ class QuerySet(BaseQuerySet):
# Raise StopIteration if we already established there were no more # Raise StopIteration if we already established there were no more
# docs in the db cursor. # docs in the db cursor.
if not self._has_more: if not self._has_more:
raise StopIteration return
# Otherwise, populate more of the cache and repeat. # Otherwise, populate more of the cache and repeat.
if len(self._result_cache) <= pos: if len(self._result_cache) <= pos:
@ -112,7 +114,7 @@ class QuerySet(BaseQuerySet):
# Pull in ITER_CHUNK_SIZE docs from the database and store them in # Pull in ITER_CHUNK_SIZE docs from the database and store them in
# the result cache. # the result cache.
try: try:
for _ in xrange(ITER_CHUNK_SIZE): for _ in six.moves.range(ITER_CHUNK_SIZE):
self._result_cache.append(self.next()) self._result_cache.append(self.next())
except StopIteration: except StopIteration:
# Getting this exception means there are no more docs in the # Getting this exception means there are no more docs in the
@ -166,7 +168,7 @@ class QuerySetNoCache(BaseQuerySet):
return '.. queryset mid-iteration ..' return '.. queryset mid-iteration ..'
data = [] data = []
for _ in xrange(REPR_OUTPUT_SIZE + 1): for _ in six.moves.range(REPR_OUTPUT_SIZE + 1):
try: try:
data.append(self.next()) data.append(self.next())
except StopIteration: except StopIteration:

View File

@ -101,21 +101,8 @@ def query(_doc_cls=None, **kwargs):
value = value['_id'] value = value['_id']
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
# Raise an error if the in/nin/all/near param is not iterable. We need a # Raise an error if the in/nin/all/near param is not iterable.
# special check for BaseDocument, because - although it's iterable - using value = _prepare_query_for_iterable(field, op, value)
# it as such in the context of this method is most definitely a mistake.
BaseDocument = _import_class('BaseDocument')
if isinstance(value, BaseDocument):
raise TypeError("When using the `in`, `nin`, `all`, or "
"`near`-operators you can\'t use a "
"`Document`, you must wrap your object "
"in a list (object -> [object]).")
elif not hasattr(value, '__iter__'):
raise TypeError("The `in`, `nin`, `all`, or "
"`near`-operators must be applied to an "
"iterable (e.g. a list).")
else:
value = [field.prepare_query_value(op, v) for v in value]
# If we're querying a GenericReferenceField, we need to alter the # If we're querying a GenericReferenceField, we need to alter the
# key depending on the value: # key depending on the value:
@ -284,7 +271,15 @@ def update(_doc_cls=None, **update):
if isinstance(field, GeoJsonBaseField): if isinstance(field, GeoJsonBaseField):
value = field.to_mongo(value) value = field.to_mongo(value)
if op in (None, 'set', 'push', 'pull'): if op == 'pull':
if field.required or value is not None:
if match == 'in' and not isinstance(value, dict):
value = _prepare_query_for_iterable(field, op, value)
else:
value = field.prepare_query_value(op, value)
elif op == 'push' and isinstance(value, (list, tuple, set)):
value = [field.prepare_query_value(op, v) for v in value]
elif op in (None, 'set', 'push'):
if field.required or value is not None: if field.required or value is not None:
value = field.prepare_query_value(op, value) value = field.prepare_query_value(op, value)
elif op in ('pushAll', 'pullAll'): elif op in ('pushAll', 'pullAll'):
@ -319,11 +314,17 @@ def update(_doc_cls=None, **update):
field_classes = [c.__class__ for c in cleaned_fields] field_classes = [c.__class__ for c in cleaned_fields]
field_classes.reverse() field_classes.reverse()
ListField = _import_class('ListField') ListField = _import_class('ListField')
if ListField in field_classes: EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
# Join all fields via dot notation to the last ListField if ListField in field_classes or EmbeddedDocumentListField in field_classes:
# Join all fields via dot notation to the last ListField or EmbeddedDocumentListField
# Then process as normal # Then process as normal
if ListField in field_classes:
_check_field = ListField
else:
_check_field = EmbeddedDocumentListField
last_listField = len( last_listField = len(
cleaned_fields) - field_classes.index(ListField) cleaned_fields) - field_classes.index(_check_field)
key = '.'.join(parts[:last_listField]) key = '.'.join(parts[:last_listField])
parts = parts[last_listField:] parts = parts[last_listField:]
parts.insert(0, key) parts.insert(0, key)
@ -333,10 +334,26 @@ def update(_doc_cls=None, **update):
value = {key: value} value = {key: value}
elif op == 'addToSet' and isinstance(value, list): elif op == 'addToSet' and isinstance(value, list):
value = {key: {'$each': value}} value = {key: {'$each': value}}
elif op in ('push', 'pushAll'):
if parts[-1].isdigit():
key = parts[0]
position = int(parts[-1])
# $position expects an iterable. If pushing a single value,
# wrap it in a list.
if not isinstance(value, (set, tuple, list)):
value = [value]
value = {key: {'$each': value, '$position': position}}
else:
if op == 'pushAll':
op = 'push' # convert to non-deprecated keyword
if not isinstance(value, (set, tuple, list)):
value = [value]
value = {key: {'$each': value}}
else:
value = {key: value}
else: else:
value = {key: value} value = {key: value}
key = '$' + op key = '$' + op
if key not in mongo_update: if key not in mongo_update:
mongo_update[key] = value mongo_update[key] = value
elif key in mongo_update and isinstance(mongo_update[key], dict): elif key in mongo_update and isinstance(mongo_update[key], dict):
@ -425,3 +442,22 @@ def _infer_geometry(value):
raise InvalidQueryError('Invalid $geometry data. Can be either a ' raise InvalidQueryError('Invalid $geometry data. Can be either a '
'dictionary or (nested) lists of coordinate(s)') 'dictionary or (nested) lists of coordinate(s)')
def _prepare_query_for_iterable(field, op, value):
# We need a special check for BaseDocument, because - although it's iterable - using
# it as such in the context of this method is most definitely a mistake.
BaseDocument = _import_class('BaseDocument')
if isinstance(value, BaseDocument):
raise TypeError("When using the `in`, `nin`, `all`, or "
"`near`-operators you can\'t use a "
"`Document`, you must wrap your object "
"in a list (object -> [object]).")
if not hasattr(value, '__iter__'):
raise TypeError("The `in`, `nin`, `all`, or "
"`near`-operators must be applied to an "
"iterable (e.g. a list).")
return [field.prepare_query_value(op, v) for v in value]

View File

@ -1,11 +1,11 @@
[nosetests] [nosetests]
verbosity=2 verbosity=2
detailed-errors=1 detailed-errors=1
tests=tests #tests=tests
cover-package=mongoengine cover-package=mongoengine
[flake8] [flake8]
ignore=E501,F401,F403,F405,I201 ignore=E501,F401,F403,F405,I201,I202
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
max-complexity=47 max-complexity=47
application-import-names=mongoengine,tests application-import-names=mongoengine,tests

View File

@ -70,9 +70,9 @@ setup(
name='mongoengine', name='mongoengine',
version=VERSION, version=VERSION,
author='Harry Marr', author='Harry Marr',
author_email='harry.marr@{nospam}gmail.com', author_email='harry.marr@gmail.com',
maintainer="Ross Lawley", maintainer="Stefan Wojcik",
maintainer_email="ross.lawley@{nospam}gmail.com", maintainer_email="wojcikstefan@gmail.com",
url='http://mongoengine.org/', url='http://mongoengine.org/',
download_url='https://github.com/MongoEngine/mongoengine/tarball/master', download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
license='MIT', license='MIT',

View File

@ -5,6 +5,7 @@ from mongoengine import *
from mongoengine.queryset import NULLIFY, PULL from mongoengine.queryset import NULLIFY, PULL
from mongoengine.connection import get_db from mongoengine.connection import get_db
from tests.utils import needs_mongodb_v26
__all__ = ("ClassMethodsTest", ) __all__ = ("ClassMethodsTest", )
@ -187,6 +188,26 @@ class ClassMethodsTest(unittest.TestCase):
self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] }) self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] })
self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] }) self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] })
@needs_mongodb_v26
def test_compare_indexes_for_text_indexes(self):
""" Ensure that compare_indexes behaves correctly for text indexes """
class Doc(Document):
a = StringField()
b = StringField()
meta = {'indexes': [
{'fields': ['$a', "$b"],
'default_language': 'english',
'weights': {'a': 10, 'b': 2}
}
]}
Doc.drop_collection()
Doc.ensure_indexes()
actual = Doc.compare_indexes()
expected = {'missing': [], 'extra': []}
self.assertEqual(actual, expected)
def test_list_indexes_inheritance(self): def test_list_indexes_inheritance(self):
""" ensure that all of the indexes are listed regardless of the super- """ ensure that all of the indexes are listed regardless of the super-
or sub-class that we call it from or sub-class that we call it from

View File

@ -22,6 +22,8 @@ from mongoengine.queryset import NULLIFY, Q
from mongoengine.context_managers import switch_db, query_counter from mongoengine.context_managers import switch_db, query_counter
from mongoengine import signals from mongoengine import signals
from tests.utils import needs_mongodb_v26
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
'../fields/mongoengine.png') '../fields/mongoengine.png')
@ -474,6 +476,24 @@ class InstanceTest(unittest.TestCase):
doc.save() doc.save()
doc.reload() doc.reload()
def test_reload_with_changed_fields(self):
"""Ensures reloading will not affect changed fields"""
class User(Document):
name = StringField()
number = IntField()
User.drop_collection()
user = User(name="Bob", number=1).save()
user.name = "John"
user.number = 2
self.assertEqual(user._get_changed_fields(), ['name', 'number'])
user.reload('number')
self.assertEqual(user._get_changed_fields(), ['name'])
user.save()
user.reload()
self.assertEqual(user.name, "John")
def test_reload_referencing(self): def test_reload_referencing(self):
"""Ensures reloading updates weakrefs correctly.""" """Ensures reloading updates weakrefs correctly."""
class Embedded(EmbeddedDocument): class Embedded(EmbeddedDocument):
@ -519,7 +539,7 @@ class InstanceTest(unittest.TestCase):
doc.save() doc.save()
doc.dict_field['extra'] = 1 doc.dict_field['extra'] = 1
doc = doc.reload(10, 'list_field') doc = doc.reload(10, 'list_field')
self.assertEqual(doc._get_changed_fields(), []) self.assertEqual(doc._get_changed_fields(), ['dict_field.extra'])
self.assertEqual(len(doc.list_field), 5) self.assertEqual(len(doc.list_field), 5)
self.assertEqual(len(doc.dict_field), 3) self.assertEqual(len(doc.dict_field), 3)
self.assertEqual(len(doc.embedded_field.list_field), 4) self.assertEqual(len(doc.embedded_field.list_field), 4)
@ -826,6 +846,22 @@ class InstanceTest(unittest.TestCase):
self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())]) self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())])
@needs_mongodb_v26
def test_modify_with_positional_push(self):
class BlogPost(Document):
tags = ListField(StringField())
post = BlogPost.objects.create(tags=['python'])
self.assertEqual(post.tags, ['python'])
post.modify(push__tags__0=['code', 'mongo'])
self.assertEqual(post.tags, ['code', 'mongo', 'python'])
# Assert same order of the list items is maintained in the db
self.assertEqual(
BlogPost._get_collection().find_one({'_id': post.pk})['tags'],
['code', 'mongo', 'python']
)
def test_save(self): def test_save(self):
"""Ensure that a document may be saved in the database.""" """Ensure that a document may be saved in the database."""
@ -1323,6 +1359,23 @@ class InstanceTest(unittest.TestCase):
site = Site.objects.first() site = Site.objects.first()
self.assertEqual(site.page.log_message, "Error: Dummy message") self.assertEqual(site.page.log_message, "Error: Dummy message")
def test_update_list_field(self):
"""Test update on `ListField` with $pull + $in.
"""
class Doc(Document):
foo = ListField(StringField())
Doc.drop_collection()
doc = Doc(foo=['a', 'b', 'c'])
doc.save()
# Update
doc = Doc.objects.first()
doc.update(pull__foo__in=['a', 'c'])
doc = Doc.objects.first()
self.assertEqual(doc.foo, ['b'])
def test_embedded_update_db_field(self): def test_embedded_update_db_field(self):
"""Test update on `EmbeddedDocumentField` fields when db_field """Test update on `EmbeddedDocumentField` fields when db_field
is other than default. is other than default.
@ -1866,6 +1919,25 @@ class InstanceTest(unittest.TestCase):
author.delete() author.delete()
self.assertEqual(BlogPost.objects.count(), 0) self.assertEqual(BlogPost.objects.count(), 0)
def test_reverse_delete_rule_pull(self):
"""Ensure that a referenced document is also deleted with
pull.
"""
class Record(Document):
name = StringField()
children = ListField(ReferenceField('self', reverse_delete_rule=PULL))
Record.drop_collection()
parent_record = Record(name='parent').save()
child_record = Record(name='child').save()
parent_record.children.append(child_record)
parent_record.save()
child_record.delete()
self.assertEqual(Record.objects(name='parent').get().children, [])
def test_reverse_delete_rule_with_custom_id_field(self): def test_reverse_delete_rule_with_custom_id_field(self):
"""Ensure that a referenced document with custom primary key """Ensure that a referenced document with custom primary key
is also deleted upon deletion. is also deleted upon deletion.
@ -3149,6 +3221,33 @@ class InstanceTest(unittest.TestCase):
person.update(set__height=2.0) person.update(set__height=2.0)
@needs_mongodb_v26
def test_push_with_position(self):
"""Ensure that push with position works properly for an instance."""
class BlogPost(Document):
slug = StringField()
tags = ListField(StringField())
blog = BlogPost()
blog.slug = "ABC"
blog.tags = ["python"]
blog.save()
blog.update(push__tags__0=["mongodb", "code"])
blog.reload()
self.assertEqual(blog.tags, ['mongodb', 'code', 'python'])
def test_push_nested_list(self):
"""Ensure that push update works in nested list"""
class BlogPost(Document):
slug = StringField()
tags = ListField()
blog = BlogPost(slug="test").save()
blog.update(push__tags=["value1", 123])
blog.reload()
self.assertEqual(blog.tags, [["value1", 123]])
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -26,7 +26,7 @@ except ImportError:
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList, from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList,
_document_registry) _document_registry, LazyReference)
from tests.utils import MongoDBTestCase from tests.utils import MongoDBTestCase
@ -937,7 +937,9 @@ class FieldTest(MongoDBTestCase):
comments = ListField(EmbeddedDocumentField(Comment)) comments = ListField(EmbeddedDocumentField(Comment))
tags = ListField(StringField()) tags = ListField(StringField())
authors = ListField(ReferenceField(User)) authors = ListField(ReferenceField(User))
authors_as_lazy = ListField(LazyReferenceField(User))
generic = ListField(GenericReferenceField()) generic = ListField(GenericReferenceField())
generic_as_lazy = ListField(GenericLazyReferenceField())
access_list = ListField(choices=AccessLevelChoices, display_sep=', ') access_list = ListField(choices=AccessLevelChoices, display_sep=', ')
User.drop_collection() User.drop_collection()
@ -987,6 +989,15 @@ class FieldTest(MongoDBTestCase):
post.authors = [user] post.authors = [user]
post.validate() post.validate()
post.authors_as_lazy = [Comment()]
self.assertRaises(ValidationError, post.validate)
post.authors_as_lazy = [User()]
self.assertRaises(ValidationError, post.validate)
post.authors_as_lazy = [user]
post.validate()
post.generic = [1, 2] post.generic = [1, 2]
self.assertRaises(ValidationError, post.validate) self.assertRaises(ValidationError, post.validate)
@ -999,6 +1010,18 @@ class FieldTest(MongoDBTestCase):
post.generic = [user] post.generic = [user]
post.validate() post.validate()
post.generic_as_lazy = [1, 2]
self.assertRaises(ValidationError, post.validate)
post.generic_as_lazy = [User(), Comment()]
self.assertRaises(ValidationError, post.validate)
post.generic_as_lazy = [Comment()]
self.assertRaises(ValidationError, post.validate)
post.generic_as_lazy = [user]
post.validate()
def test_sorted_list_sorting(self): def test_sorted_list_sorting(self):
"""Ensure that a sorted list field properly sorts values. """Ensure that a sorted list field properly sorts values.
""" """
@ -4374,6 +4397,51 @@ class CachedReferenceFieldTest(MongoDBTestCase):
self.assertEqual(SocialData.objects(person__group=g2).count(), 1) self.assertEqual(SocialData.objects(person__group=g2).count(), 1)
self.assertEqual(SocialData.objects(person__group=g2).first(), s2) self.assertEqual(SocialData.objects(person__group=g2).first(), s2)
def test_cached_reference_field_push_with_fields(self):
class Product(Document):
name = StringField()
Product.drop_collection()
class Basket(Document):
products = ListField(CachedReferenceField(Product, fields=['name']))
Basket.drop_collection()
product1 = Product(name='abc').save()
product2 = Product(name='def').save()
basket = Basket(products=[product1]).save()
self.assertEqual(
Basket.objects._collection.find_one(),
{
'_id': basket.pk,
'products': [
{
'_id': product1.pk,
'name': product1.name
}
]
}
)
# push to list
basket.update(push__products=product2)
basket.reload()
self.assertEqual(
Basket.objects._collection.find_one(),
{
'_id': basket.pk,
'products': [
{
'_id': product1.pk,
'name': product1.name
},
{
'_id': product2.pk,
'name': product2.name
}
]
}
)
def test_cached_reference_field_update_all(self): def test_cached_reference_field_update_all(self):
class Person(Document): class Person(Document):
TYPES = ( TYPES = (
@ -4616,5 +4684,522 @@ class CachedReferenceFieldTest(MongoDBTestCase):
self.assertTrue(isinstance(ocorrence.animal, Animal)) self.assertTrue(isinstance(ocorrence.animal, Animal))
class LazyReferenceFieldTest(MongoDBTestCase):
def test_lazy_reference_config(self):
# Make sure ReferenceField only accepts a document class or a string
# with a document class name.
self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument)
def test_lazy_reference_simple(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
animal = Animal(name="Leopard", tag="heavy").save()
Ocurrence(person="test", animal=animal).save()
p = Ocurrence.objects.get()
self.assertIsInstance(p.animal, LazyReference)
fetched_animal = p.animal.fetch()
self.assertEqual(fetched_animal, animal)
# `fetch` keep cache on referenced document by default...
animal.tag = "not so heavy"
animal.save()
double_fetch = p.animal.fetch()
self.assertIs(fetched_animal, double_fetch)
self.assertEqual(double_fetch.tag, "heavy")
# ...unless specified otherwise
fetch_force = p.animal.fetch(force=True)
self.assertIsNot(fetch_force, fetched_animal)
self.assertEqual(fetch_force.tag, "not so heavy")
def test_lazy_reference_fetch_invalid_ref(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
animal = Animal(name="Leopard", tag="heavy").save()
Ocurrence(person="test", animal=animal).save()
animal.delete()
p = Ocurrence.objects.get()
self.assertIsInstance(p.animal, LazyReference)
with self.assertRaises(DoesNotExist):
p.animal.fetch()
def test_lazy_reference_set(self):
class Animal(Document):
meta = {'allow_inheritance': True}
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
class SubAnimal(Animal):
nick = StringField()
animal = Animal(name="Leopard", tag="heavy").save()
sub_animal = SubAnimal(nick='doggo', name='dog').save()
for ref in (
animal,
animal.pk,
DBRef(animal._get_collection_name(), animal.pk),
LazyReference(Animal, animal.pk),
sub_animal,
sub_animal.pk,
DBRef(sub_animal._get_collection_name(), sub_animal.pk),
LazyReference(SubAnimal, sub_animal.pk),
):
p = Ocurrence(person="test", animal=ref).save()
p.reload()
self.assertIsInstance(p.animal, LazyReference)
p.animal.fetch()
def test_lazy_reference_bad_set(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
class BadDoc(Document):
pass
animal = Animal(name="Leopard", tag="heavy").save()
baddoc = BadDoc().save()
for bad in (
42,
'foo',
baddoc,
DBRef(baddoc._get_collection_name(), animal.pk),
LazyReference(BadDoc, animal.pk)
):
with self.assertRaises(ValidationError):
p = Ocurrence(person="test", animal=bad).save()
def test_lazy_reference_query_conversion(self):
"""Ensure that LazyReferenceFields can be queried using objects and values
of the type of the primary key of the referenced object.
"""
class Member(Document):
user_num = IntField(primary_key=True)
class BlogPost(Document):
title = StringField()
author = LazyReferenceField(Member, dbref=False)
Member.drop_collection()
BlogPost.drop_collection()
m1 = Member(user_num=1)
m1.save()
m2 = Member(user_num=2)
m2.save()
post1 = BlogPost(title='post 1', author=m1)
post1.save()
post2 = BlogPost(title='post 2', author=m2)
post2.save()
post = BlogPost.objects(author=m1).first()
self.assertEqual(post.id, post1.id)
post = BlogPost.objects(author=m2).first()
self.assertEqual(post.id, post2.id)
# Same thing by passing a LazyReference instance
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
self.assertEqual(post.id, post2.id)
def test_lazy_reference_query_conversion_dbref(self):
"""Ensure that LazyReferenceFields can be queried using objects and values
of the type of the primary key of the referenced object.
"""
class Member(Document):
user_num = IntField(primary_key=True)
class BlogPost(Document):
title = StringField()
author = LazyReferenceField(Member, dbref=True)
Member.drop_collection()
BlogPost.drop_collection()
m1 = Member(user_num=1)
m1.save()
m2 = Member(user_num=2)
m2.save()
post1 = BlogPost(title='post 1', author=m1)
post1.save()
post2 = BlogPost(title='post 2', author=m2)
post2.save()
post = BlogPost.objects(author=m1).first()
self.assertEqual(post.id, post1.id)
post = BlogPost.objects(author=m2).first()
self.assertEqual(post.id, post2.id)
# Same thing by passing a LazyReference instance
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
self.assertEqual(post.id, post2.id)
def test_lazy_reference_passthrough(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
animal = LazyReferenceField(Animal, passthrough=False)
animal_passthrough = LazyReferenceField(Animal, passthrough=True)
Animal.drop_collection()
Ocurrence.drop_collection()
animal = Animal(name="Leopard", tag="heavy").save()
Ocurrence(animal=animal, animal_passthrough=animal).save()
p = Ocurrence.objects.get()
self.assertIsInstance(p.animal, LazyReference)
with self.assertRaises(KeyError):
p.animal['name']
with self.assertRaises(AttributeError):
p.animal.name
self.assertEqual(p.animal.pk, animal.pk)
self.assertEqual(p.animal_passthrough.name, "Leopard")
self.assertEqual(p.animal_passthrough['name'], "Leopard")
# Should not be able to access referenced document's methods
with self.assertRaises(AttributeError):
p.animal.save
with self.assertRaises(KeyError):
p.animal['save']
def test_lazy_reference_not_set(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
Ocurrence(person='foo').save()
p = Ocurrence.objects.get()
self.assertIs(p.animal, None)
def test_lazy_reference_equality(self):
class Animal(Document):
name = StringField()
tag = StringField()
Animal.drop_collection()
animal = Animal(name="Leopard", tag="heavy").save()
animalref = LazyReference(Animal, animal.pk)
self.assertEqual(animal, animalref)
self.assertEqual(animalref, animal)
other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90"))
self.assertNotEqual(animal, other_animalref)
self.assertNotEqual(other_animalref, animal)
def test_lazy_reference_embedded(self):
class Animal(Document):
name = StringField()
tag = StringField()
class EmbeddedOcurrence(EmbeddedDocument):
in_list = ListField(LazyReferenceField(Animal))
direct = LazyReferenceField(Animal)
class Ocurrence(Document):
in_list = ListField(LazyReferenceField(Animal))
in_embedded = EmbeddedDocumentField(EmbeddedOcurrence)
direct = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
animal1 = Animal('doggo').save()
animal2 = Animal('cheeta').save()
def check_fields_type(occ):
self.assertIsInstance(occ.direct, LazyReference)
for elem in occ.in_list:
self.assertIsInstance(elem, LazyReference)
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
for elem in occ.in_embedded.in_list:
self.assertIsInstance(elem, LazyReference)
occ = Ocurrence(
in_list=[animal1, animal2],
in_embedded={'in_list': [animal1, animal2], 'direct': animal1},
direct=animal1
).save()
check_fields_type(occ)
occ.reload()
check_fields_type(occ)
occ.direct = animal1.id
occ.in_list = [animal1.id, animal2.id]
occ.in_embedded.direct = animal1.id
occ.in_embedded.in_list = [animal1.id, animal2.id]
check_fields_type(occ)
class GenericLazyReferenceFieldTest(MongoDBTestCase):
def test_generic_lazy_reference_simple(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = GenericLazyReferenceField()
Animal.drop_collection()
Ocurrence.drop_collection()
animal = Animal(name="Leopard", tag="heavy").save()
Ocurrence(person="test", animal=animal).save()
p = Ocurrence.objects.get()
self.assertIsInstance(p.animal, LazyReference)
fetched_animal = p.animal.fetch()
self.assertEqual(fetched_animal, animal)
# `fetch` keep cache on referenced document by default...
animal.tag = "not so heavy"
animal.save()
double_fetch = p.animal.fetch()
self.assertIs(fetched_animal, double_fetch)
self.assertEqual(double_fetch.tag, "heavy")
# ...unless specified otherwise
fetch_force = p.animal.fetch(force=True)
self.assertIsNot(fetch_force, fetched_animal)
self.assertEqual(fetch_force.tag, "not so heavy")
def test_generic_lazy_reference_choices(self):
class Animal(Document):
name = StringField()
class Vegetal(Document):
name = StringField()
class Mineral(Document):
name = StringField()
class Ocurrence(Document):
living_thing = GenericLazyReferenceField(choices=[Animal, Vegetal])
thing = GenericLazyReferenceField()
Animal.drop_collection()
Vegetal.drop_collection()
Mineral.drop_collection()
Ocurrence.drop_collection()
animal = Animal(name="Leopard").save()
vegetal = Vegetal(name="Oak").save()
mineral = Mineral(name="Granite").save()
occ_animal = Ocurrence(living_thing=animal, thing=animal).save()
occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save()
with self.assertRaises(ValidationError):
Ocurrence(living_thing=mineral).save()
occ = Ocurrence.objects.get(living_thing=animal)
self.assertEqual(occ, occ_animal)
self.assertIsInstance(occ.thing, LazyReference)
self.assertIsInstance(occ.living_thing, LazyReference)
occ.thing = vegetal
occ.living_thing = vegetal
occ.save()
occ.thing = mineral
occ.living_thing = mineral
with self.assertRaises(ValidationError):
occ.save()
def test_generic_lazy_reference_set(self):
class Animal(Document):
meta = {'allow_inheritance': True}
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = GenericLazyReferenceField()
Animal.drop_collection()
Ocurrence.drop_collection()
class SubAnimal(Animal):
nick = StringField()
animal = Animal(name="Leopard", tag="heavy").save()
sub_animal = SubAnimal(nick='doggo', name='dog').save()
for ref in (
animal,
LazyReference(Animal, animal.pk),
{'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)},
sub_animal,
LazyReference(SubAnimal, sub_animal.pk),
{'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)},
):
p = Ocurrence(person="test", animal=ref).save()
p.reload()
self.assertIsInstance(p.animal, (LazyReference, Document))
p.animal.fetch()
def test_generic_lazy_reference_bad_set(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = GenericLazyReferenceField(choices=['Animal'])
Animal.drop_collection()
Ocurrence.drop_collection()
class BadDoc(Document):
pass
animal = Animal(name="Leopard", tag="heavy").save()
baddoc = BadDoc().save()
for bad in (
42,
'foo',
baddoc,
LazyReference(BadDoc, animal.pk)
):
with self.assertRaises(ValidationError):
p = Ocurrence(person="test", animal=bad).save()
def test_generic_lazy_reference_query_conversion(self):
class Member(Document):
user_num = IntField(primary_key=True)
class BlogPost(Document):
title = StringField()
author = GenericLazyReferenceField()
Member.drop_collection()
BlogPost.drop_collection()
m1 = Member(user_num=1)
m1.save()
m2 = Member(user_num=2)
m2.save()
post1 = BlogPost(title='post 1', author=m1)
post1.save()
post2 = BlogPost(title='post 2', author=m2)
post2.save()
post = BlogPost.objects(author=m1).first()
self.assertEqual(post.id, post1.id)
post = BlogPost.objects(author=m2).first()
self.assertEqual(post.id, post2.id)
# Same thing by passing a LazyReference instance
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
self.assertEqual(post.id, post2.id)
def test_generic_lazy_reference_not_set(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = GenericLazyReferenceField()
Animal.drop_collection()
Ocurrence.drop_collection()
Ocurrence(person='foo').save()
p = Ocurrence.objects.get()
self.assertIs(p.animal, None)
def test_generic_lazy_reference_embedded(self):
class Animal(Document):
name = StringField()
tag = StringField()
class EmbeddedOcurrence(EmbeddedDocument):
in_list = ListField(GenericLazyReferenceField())
direct = GenericLazyReferenceField()
class Ocurrence(Document):
in_list = ListField(GenericLazyReferenceField())
in_embedded = EmbeddedDocumentField(EmbeddedOcurrence)
direct = GenericLazyReferenceField()
Animal.drop_collection()
Ocurrence.drop_collection()
animal1 = Animal('doggo').save()
animal2 = Animal('cheeta').save()
def check_fields_type(occ):
self.assertIsInstance(occ.direct, LazyReference)
for elem in occ.in_list:
self.assertIsInstance(elem, LazyReference)
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
for elem in occ.in_embedded.in_list:
self.assertIsInstance(elem, LazyReference)
occ = Ocurrence(
in_list=[animal1, animal2],
in_embedded={'in_list': [animal1, animal2], 'direct': animal1},
direct=animal1
).save()
check_fields_type(occ)
occ.reload()
check_fields_type(occ)
animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)}
animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)}
occ.direct = animal1_ref
occ.in_list = [animal1_ref, animal2_ref]
occ.in_embedded.direct = animal1_ref
occ.in_embedded.in_list = [animal1_ref, animal2_ref]
check_fields_type(occ)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -510,6 +510,24 @@ class GeoQueriesTest(MongoDBTestCase):
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count() roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
self.assertEqual(1, roads) self.assertEqual(1, roads)
def test_aspymongo_with_only(self):
"""Ensure as_pymongo works with only"""
class Place(Document):
location = PointField()
Place.drop_collection()
p = Place(location=[24.946861267089844, 60.16311983618494])
p.save()
qs = Place.objects().only('location')
self.assertDictEqual(
qs.as_pymongo()[0]['location'],
{u'type': u'Point',
u'coordinates': [
24.946861267089844,
60.16311983618494]
}
)
def test_2dsphere_point_sets_correctly(self): def test_2dsphere_point_sets_correctly(self):
class Location(Document): class Location(Document):
loc = PointField() loc = PointField()

View File

@ -1,6 +1,8 @@
import unittest import unittest
from mongoengine import connect, Document, IntField from mongoengine import connect, Document, IntField, StringField, ListField
from tests.utils import needs_mongodb_v26
__all__ = ("FindAndModifyTest",) __all__ = ("FindAndModifyTest",)
@ -94,6 +96,37 @@ class FindAndModifyTest(unittest.TestCase):
self.assertEqual(old_doc.to_mongo(), {"_id": 1}) self.assertEqual(old_doc.to_mongo(), {"_id": 1})
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
@needs_mongodb_v26
def test_modify_with_push(self):
class BlogPost(Document):
tags = ListField(StringField())
BlogPost.drop_collection()
blog = BlogPost.objects.create()
# Push a new tag via modify with new=False (default).
BlogPost(id=blog.id).modify(push__tags='code')
self.assertEqual(blog.tags, [])
blog.reload()
self.assertEqual(blog.tags, ['code'])
# Push a new tag via modify with new=True.
blog = BlogPost.objects(id=blog.id).modify(push__tags='java', new=True)
self.assertEqual(blog.tags, ['code', 'java'])
# Push a new tag with a positional argument.
blog = BlogPost.objects(id=blog.id).modify(
push__tags__0='python',
new=True)
self.assertEqual(blog.tags, ['python', 'code', 'java'])
# Push multiple new tags with a positional argument.
blog = BlogPost.objects(id=blog.id).modify(
push__tags__1=['go', 'rust'],
new=True)
self.assertEqual(blog.tags, ['python', 'go', 'rust', 'code', 'java'])
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -9,6 +9,7 @@ from nose.plugins.skip import SkipTest
import pymongo import pymongo
from pymongo.errors import ConfigurationError from pymongo.errors import ConfigurationError
from pymongo.read_preferences import ReadPreference from pymongo.read_preferences import ReadPreference
from pymongo.results import UpdateResult
import six import six
from mongoengine import * from mongoengine import *
@ -589,6 +590,20 @@ class QuerySetTest(unittest.TestCase):
Scores.objects(id=scores.id).update(max__high_score=500) Scores.objects(id=scores.id).update(max__high_score=500)
self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000) self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000)
@needs_mongodb_v26
def test_update_multiple(self):
class Product(Document):
item = StringField()
price = FloatField()
product = Product.objects.create(item='ABC', price=10.99)
product = Product.objects.create(item='ABC', price=10.99)
Product.objects(id=product.id).update(mul__price=1.25)
self.assertEqual(Product.objects.get(id=product.id).price, 13.7375)
unknown_product = Product.objects.create(item='Unknown')
Product.objects(id=unknown_product.id).update(mul__price=100)
self.assertEqual(Product.objects.get(id=unknown_product.id).price, 0)
def test_updates_can_have_match_operators(self): def test_updates_can_have_match_operators(self):
class Comment(EmbeddedDocument): class Comment(EmbeddedDocument):
@ -656,14 +671,14 @@ class QuerySetTest(unittest.TestCase):
result = self.Person(name="Bob", age=25).update( result = self.Person(name="Bob", age=25).update(
upsert=True, full_result=True) upsert=True, full_result=True)
self.assertTrue(isinstance(result, dict)) self.assertTrue(isinstance(result, UpdateResult))
self.assertTrue("upserted" in result) self.assertTrue("upserted" in result.raw_result)
self.assertFalse(result["updatedExisting"]) self.assertFalse(result.raw_result["updatedExisting"])
bob = self.Person.objects.first() bob = self.Person.objects.first()
result = bob.update(set__age=30, full_result=True) result = bob.update(set__age=30, full_result=True)
self.assertTrue(isinstance(result, dict)) self.assertTrue(isinstance(result, UpdateResult))
self.assertTrue(result["updatedExisting"]) self.assertTrue(result.raw_result["updatedExisting"])
self.Person(name="Bob", age=20).save() self.Person(name="Bob", age=20).save()
result = self.Person.objects(name="Bob").update( result = self.Person.objects(name="Bob").update(
@ -830,9 +845,6 @@ class QuerySetTest(unittest.TestCase):
blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) blogs.append(Blog(title="post %s" % i, posts=[post1, post2]))
Blog.objects.insert(blogs, load_bulk=False) Blog.objects.insert(blogs, load_bulk=False)
if mongodb_version < (2, 6):
self.assertEqual(q, 1)
else:
# profiling logs each doc now in the bulk op # profiling logs each doc now in the bulk op
self.assertEqual(q, 99) self.assertEqual(q, 99)
@ -843,11 +855,7 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(q, 0) self.assertEqual(q, 0)
Blog.objects.insert(blogs) Blog.objects.insert(blogs)
if mongodb_version < (2, 6): self.assertEqual(q, 100) # 99 for insert 1 for fetch
self.assertEqual(q, 2) # 1 for insert, and 1 for in bulk fetch
else:
# 99 for insert, and 1 for in bulk fetch
self.assertEqual(q, 100)
Blog.drop_collection() Blog.drop_collection()
@ -912,10 +920,6 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(Blog.objects.count(), 2) self.assertEqual(Blog.objects.count(), 2)
Blog.objects.insert([blog2, blog3],
write_concern={"w": 0, 'continue_on_error': True})
self.assertEqual(Blog.objects.count(), 3)
def test_get_changed_fields_query_count(self): def test_get_changed_fields_query_count(self):
"""Make sure we don't perform unnecessary db operations when """Make sure we don't perform unnecessary db operations when
none of document's fields were updated. none of document's fields were updated.
@ -1903,6 +1907,47 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
@needs_mongodb_v26
def test_update_push_with_position(self):
"""Ensure that the 'push' update with position works properly.
"""
class BlogPost(Document):
slug = StringField()
tags = ListField(StringField())
BlogPost.drop_collection()
post = BlogPost.objects.create(slug="test")
BlogPost.objects.filter(id=post.id).update(push__tags="code")
BlogPost.objects.filter(id=post.id).update(push__tags__0=["mongodb", "python"])
post.reload()
self.assertEqual(post.tags, ['mongodb', 'python', 'code'])
BlogPost.objects.filter(id=post.id).update(set__tags__2="java")
post.reload()
self.assertEqual(post.tags, ['mongodb', 'python', 'java'])
#test push with singular value
BlogPost.objects.filter(id=post.id).update(push__tags__0='scala')
post.reload()
self.assertEqual(post.tags, ['scala', 'mongodb', 'python', 'java'])
def test_update_push_list_of_list(self):
"""Ensure that the 'push' update operation works in the list of list
"""
class BlogPost(Document):
slug = StringField()
tags = ListField()
BlogPost.drop_collection()
post = BlogPost(slug="test").save()
BlogPost.objects.filter(slug="test").update(push__tags=["value1", 123])
post.reload()
self.assertEqual(post.tags, [["value1", 123]])
def test_update_push_and_pull_add_to_set(self): def test_update_push_and_pull_add_to_set(self):
"""Ensure that the 'pull' update operation works correctly. """Ensure that the 'pull' update operation works correctly.
""" """
@ -2045,6 +2090,23 @@ class QuerySetTest(unittest.TestCase):
Site.objects(id=s.id).update_one( Site.objects(id=s.id).update_one(
pull_all__collaborators__helpful__user=['Ross']) pull_all__collaborators__helpful__user=['Ross'])
def test_pull_in_genericembedded_field(self):
class Foo(EmbeddedDocument):
name = StringField()
class Bar(Document):
foos = ListField(GenericEmbeddedDocumentField(
choices=[Foo, ]))
Bar.drop_collection()
foo = Foo(name="bar")
bar = Bar(foos=[foo]).save()
Bar.objects(id=bar.id).update(pull__foos=foo)
bar.reload()
self.assertEqual(len(bar.foos), 0)
def test_update_one_pop_generic_reference(self): def test_update_one_pop_generic_reference(self):
class BlogTag(Document): class BlogTag(Document):
@ -2138,6 +2200,24 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(message.authors[1].name, "Ross") self.assertEqual(message.authors[1].name, "Ross")
self.assertEqual(message.authors[2].name, "Adam") self.assertEqual(message.authors[2].name, "Adam")
def test_set_generic_embedded_documents(self):
class Bar(EmbeddedDocument):
name = StringField()
class User(Document):
username = StringField()
bar = GenericEmbeddedDocumentField(choices=[Bar,])
User.drop_collection()
User(username='abc').save()
User.objects(username='abc').update(
set__bar=Bar(name='test'), upsert=True)
user = User.objects(username='abc').first()
self.assertEqual(user.bar.name, "test")
def test_reload_embedded_docs_instance(self): def test_reload_embedded_docs_instance(self):
class SubDoc(EmbeddedDocument): class SubDoc(EmbeddedDocument):
@ -2307,12 +2387,17 @@ class QuerySetTest(unittest.TestCase):
age = IntField() age = IntField()
with db_ops_tracker() as q: with db_ops_tracker() as q:
adult = (User.objects.filter(age__gte=18) adult1 = (User.objects.filter(age__gte=18)
.comment('looking for an adult') .comment('looking for an adult')
.first()) .first())
adult2 = (User.objects.comment('looking for an adult')
.filter(age__gte=18)
.first())
ops = q.get_ops() ops = q.get_ops()
self.assertEqual(len(ops), 1) self.assertEqual(len(ops), 2)
op = ops[0] for op in ops:
self.assertEqual(op['query']['$query'], {'age': {'$gte': 18}}) self.assertEqual(op['query']['$query'], {'age': {'$gte': 18}})
self.assertEqual(op['query']['$comment'], 'looking for an adult') self.assertEqual(op['query']['$comment'], 'looking for an adult')
@ -4764,6 +4849,30 @@ class QuerySetTest(unittest.TestCase):
for obj in C.objects.no_sub_classes(): for obj in C.objects.no_sub_classes():
self.assertEqual(obj.__class__, C) self.assertEqual(obj.__class__, C)
def test_query_generic_embedded_document(self):
"""Ensure that querying sub field on generic_embedded_field works
"""
class A(EmbeddedDocument):
a_name = StringField()
class B(EmbeddedDocument):
b_name = StringField()
class Doc(Document):
document = GenericEmbeddedDocumentField(choices=(A, B))
Doc.drop_collection()
Doc(document=A(a_name='A doc')).save()
Doc(document=B(b_name='B doc')).save()
# Using raw in filter working fine
self.assertEqual(Doc.objects(
__raw__={'document.a_name': 'A doc'}).count(), 1)
self.assertEqual(Doc.objects(
__raw__={'document.b_name': 'B doc'}).count(), 1)
self.assertEqual(Doc.objects(document__a_name='A doc').count(), 1)
self.assertEqual(Doc.objects(document__b_name='B doc').count(), 1)
def test_query_reference_to_custom_pk_doc(self): def test_query_reference_to_custom_pk_doc(self):
class A(Document): class A(Document):

View File

@ -1,5 +1,7 @@
import unittest import unittest
from bson.son import SON
from mongoengine import * from mongoengine import *
from mongoengine.queryset import Q, transform from mongoengine.queryset import Q, transform
@ -28,12 +30,16 @@ class TransformTest(unittest.TestCase):
{'name': {'$exists': True}}) {'name': {'$exists': True}})
def test_transform_update(self): def test_transform_update(self):
class LisDoc(Document):
foo = ListField(StringField())
class DicDoc(Document): class DicDoc(Document):
dictField = DictField() dictField = DictField()
class Doc(Document): class Doc(Document):
pass pass
LisDoc.drop_collection()
DicDoc.drop_collection() DicDoc.drop_collection()
Doc.drop_collection() Doc.drop_collection()
@ -51,6 +57,20 @@ class TransformTest(unittest.TestCase):
update = transform.update(DicDoc, pull__dictField__test=doc) update = transform.update(DicDoc, pull__dictField__test=doc)
self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict)) self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict))
update = transform.update(LisDoc, pull__foo__in=['a'])
self.assertEqual(update, {'$pull': {'foo': {'$in': ['a']}}})
def test_transform_update_push(self):
"""Ensure the differences in behvaior between 'push' and 'push_all'"""
class BlogPost(Document):
tags = ListField(StringField())
update = transform.update(BlogPost, push__tags=['mongo', 'db'])
self.assertEqual(update, {'$push': {'tags': ['mongo', 'db']}})
update = transform.update(BlogPost, push_all__tags=['mongo', 'db'])
self.assertEqual(update, {'$push': {'tags': {'$each': ['mongo', 'db']}}})
def test_query_field_name(self): def test_query_field_name(self):
"""Ensure that the correct field name is used when querying. """Ensure that the correct field name is used when querying.
""" """
@ -241,6 +261,30 @@ class TransformTest(unittest.TestCase):
with self.assertRaises(InvalidQueryError): with self.assertRaises(InvalidQueryError):
events.count() events.count()
def test_update_pull_for_list_fields(self):
"""
Test added to check pull operation in update for
EmbeddedDocumentListField which is inside a EmbeddedDocumentField
"""
class Word(EmbeddedDocument):
word = StringField()
index = IntField()
class SubDoc(EmbeddedDocument):
heading = ListField(StringField())
text = EmbeddedDocumentListField(Word)
class MainDoc(Document):
title = StringField()
content = EmbeddedDocumentField(SubDoc)
word = Word(word='abc', index=1)
update = transform.update(MainDoc, pull__content__text=word)
self.assertEqual(update, {'$pull': {'content.text': SON([('word', u'abc'), ('index', 1)])}})
update = transform.update(MainDoc, pull__content__heading='xyz')
self.assertEqual(update, {'$pull': {'content.heading': 'xyz'}})
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -1,6 +1,6 @@
import unittest import unittest
from mongoengine.base.datastructures import StrictDict, SemiStrictDict from mongoengine.base.datastructures import StrictDict
class TestStrictDict(unittest.TestCase): class TestStrictDict(unittest.TestCase):
@ -76,44 +76,5 @@ class TestStrictDict(unittest.TestCase):
assert dict(**d) == {'a': 1, 'b': 2} assert dict(**d) == {'a': 1, 'b': 2}
class TestSemiSrictDict(TestStrictDict):
def strict_dict_class(self, *args, **kwargs):
return SemiStrictDict.create(*args, **kwargs)
def test_init_fails_on_nonexisting_attrs(self):
# disable irrelevant test
pass
def test_setattr_raises_on_nonexisting_attr(self):
# disable irrelevant test
pass
def test_setattr_getattr_nonexisting_attr_succeeds(self):
d = self.dtype()
d.x = 1
self.assertEqual(d.x, 1)
def test_init_succeeds_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
self.assertEqual((d.a, d.b, d.c, d.x), (1, 1, 1, 2))
def test_iter_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
self.assertEqual(list(d), ['a', 'b', 'c', 'x'])
def test_iteritems_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
self.assertEqual(list(d.iteritems()), [('a', 1), ('b', 1), ('c', 1), ('x', 2)])
def tets_cmp_with_strict_dicts(self):
d = self.dtype(a=1, b=1, c=1)
dd = StrictDict.create(("a", "b", "c"))(a=1, b=1, c=1)
self.assertEqual(d, dd)
def test_cmp_with_strict_dict_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
dd = StrictDict.create(("a", "b", "c", "x"))(a=1, b=1, c=1, x=2)
self.assertEqual(d, dd)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -1,13 +1,12 @@
[tox] [tox]
envlist = {py27,py35,pypy,pypy3}-{mg27,mg28,mg30} envlist = {py27,py35,pypy,pypy3}-{mg35,mg3x}
[testenv] [testenv]
commands = commands =
python setup.py nosetests {posargs} python setup.py nosetests {posargs}
deps = deps =
nose nose
mg27: PyMongo<2.8 mg35: PyMongo==3.5
mg28: PyMongo>=2.8,<2.9 mg3x: PyMongo>=3.0
mg30: PyMongo>=3.0
setenv = setenv =
PYTHON_EGG_CACHE = {envdir}/python-eggs PYTHON_EGG_CACHE = {envdir}/python-eggs