Compare commits
1 Commits
revert-164
...
better-db-
Author | SHA1 | Date | |
---|---|---|---|
|
7195236a3b |
@@ -1,6 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
sudo apt-get remove mongodb-org-server
|
|
||||||
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
||||||
|
|
||||||
if [ "$MONGODB" = "2.4" ]; then
|
if [ "$MONGODB" = "2.4" ]; then
|
||||||
@@ -14,7 +13,7 @@ elif [ "$MONGODB" = "2.6" ]; then
|
|||||||
sudo apt-get install mongodb-org-server=2.6.12
|
sudo apt-get install mongodb-org-server=2.6.12
|
||||||
# service should be started automatically
|
# service should be started automatically
|
||||||
elif [ "$MONGODB" = "3.0" ]; then
|
elif [ "$MONGODB" = "3.0" ]; then
|
||||||
echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
echo "deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install mongodb-org-server=3.0.14
|
sudo apt-get install mongodb-org-server=3.0.14
|
||||||
# service should be started automatically
|
# service should be started automatically
|
||||||
@@ -22,6 +21,3 @@ else
|
|||||||
echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0."
|
echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0."
|
||||||
exit 1
|
exit 1
|
||||||
fi;
|
fi;
|
||||||
|
|
||||||
mkdir db
|
|
||||||
1>db/logs mongod --dbpath=db &
|
|
||||||
|
11
.travis.yml
11
.travis.yml
@@ -15,8 +15,9 @@ language: python
|
|||||||
python:
|
python:
|
||||||
- 2.7
|
- 2.7
|
||||||
- 3.5
|
- 3.5
|
||||||
- 3.6
|
|
||||||
- pypy
|
- pypy
|
||||||
|
- pypy3.3-5.2-alpha1
|
||||||
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
- MONGODB=2.6 PYMONGO=2.7
|
- MONGODB=2.6 PYMONGO=2.7
|
||||||
@@ -40,15 +41,9 @@ matrix:
|
|||||||
env: MONGODB=2.4 PYMONGO=3.0
|
env: MONGODB=2.4 PYMONGO=3.0
|
||||||
- python: 3.5
|
- python: 3.5
|
||||||
env: MONGODB=3.0 PYMONGO=3.0
|
env: MONGODB=3.0 PYMONGO=3.0
|
||||||
- python: 3.6
|
|
||||||
env: MONGODB=2.4 PYMONGO=3.0
|
|
||||||
- python: 3.6
|
|
||||||
env: MONGODB=3.0 PYMONGO=3.0
|
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
- bash .install_mongodb_on_travis.sh
|
- bash .install_mongodb_on_travis.sh
|
||||||
- sleep 15 # https://docs.travis-ci.com/user/database-setup/#MongoDB-does-not-immediately-accept-connections
|
|
||||||
- mongo --eval 'db.version();'
|
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
|
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
|
||||||
@@ -97,7 +92,7 @@ deploy:
|
|||||||
distributions: "sdist bdist_wheel"
|
distributions: "sdist bdist_wheel"
|
||||||
|
|
||||||
# only deploy on tagged commits (aka GitHub releases) and only for the
|
# only deploy on tagged commits (aka GitHub releases) and only for the
|
||||||
# parent repo's builds running Python 2.7 along with PyMongo v3.0 (we run
|
# parent repo's builds running Python 2.7 along with dev PyMongo (we run
|
||||||
# Travis against many different Python and PyMongo versions and we don't
|
# Travis against many different Python and PyMongo versions and we don't
|
||||||
# want the deploy to occur multiple times).
|
# want the deploy to occur multiple times).
|
||||||
on:
|
on:
|
||||||
|
2
AUTHORS
2
AUTHORS
@@ -243,5 +243,3 @@ that much better:
|
|||||||
* Victor Varvaryuk
|
* Victor Varvaryuk
|
||||||
* Stanislav Kaledin (https://github.com/sallyruthstruik)
|
* Stanislav Kaledin (https://github.com/sallyruthstruik)
|
||||||
* Dmitry Yantsen (https://github.com/mrTable)
|
* Dmitry Yantsen (https://github.com/mrTable)
|
||||||
* Renjianxin (https://github.com/Davidrjx)
|
|
||||||
* Erdenezul Batmunkh (https://github.com/erdenezul)
|
|
@@ -2,30 +2,9 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
dev
|
Development
|
||||||
===
|
===========
|
||||||
- Subfield resolve error in generic_emdedded_document query #1651 #1652
|
- (Fill this out as you fix issues and develop your features).
|
||||||
- use each modifier only with $position #1673 #1675
|
|
||||||
- Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704
|
|
||||||
- Fix validation error instance in GenericEmbeddedDocumentField #1067
|
|
||||||
|
|
||||||
Changes in 0.15.0
|
|
||||||
=================
|
|
||||||
- Add LazyReferenceField and GenericLazyReferenceField to address #1230
|
|
||||||
|
|
||||||
Changes in 0.14.1
|
|
||||||
=================
|
|
||||||
- Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630
|
|
||||||
- Added support for the `$position` param in the `$push` operator #1566
|
|
||||||
- Fixed `DateTimeField` interpreting an empty string as today #1533
|
|
||||||
- Added a missing `__ne__` method to the `GridFSProxy` class #1632
|
|
||||||
- Fixed `BaseQuerySet._fields_to_db_fields` #1553
|
|
||||||
|
|
||||||
Changes in 0.14.0
|
|
||||||
=================
|
|
||||||
- BREAKING CHANGE: Removed the `coerce_types` param from `QuerySet.as_pymongo` #1549
|
|
||||||
- POTENTIAL BREAKING CHANGE: Made EmbeddedDocument not hashable by default #1528
|
|
||||||
- Improved code quality #1531, #1540, #1541, #1547
|
|
||||||
|
|
||||||
Changes in 0.13.0
|
Changes in 0.13.0
|
||||||
=================
|
=================
|
||||||
|
@@ -565,15 +565,6 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
|
|||||||
>>> post.tags
|
>>> post.tags
|
||||||
['database', 'mongodb']
|
['database', 'mongodb']
|
||||||
|
|
||||||
From MongoDB version 2.6, push operator supports $position value which allows
|
|
||||||
to push values with index.
|
|
||||||
>>> post = BlogPost(title="Test", tags=["mongo"])
|
|
||||||
>>> post.save()
|
|
||||||
>>> post.update(push__tags__0=["database", "code"])
|
|
||||||
>>> post.reload()
|
|
||||||
>>> post.tags
|
|
||||||
['database', 'code', 'mongo']
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
Currently only top level lists are handled, future versions of mongodb /
|
Currently only top level lists are handled, future versions of mongodb /
|
||||||
pymongo plan to support nested positional operators. See `The $ positional
|
pymongo plan to support nested positional operators. See `The $ positional
|
||||||
|
@@ -153,7 +153,7 @@ post. This works, but there is no real reason to be storing the comments
|
|||||||
separately from their associated posts, other than to work around the
|
separately from their associated posts, other than to work around the
|
||||||
relational model. Using MongoDB we can store the comments as a list of
|
relational model. Using MongoDB we can store the comments as a list of
|
||||||
*embedded documents* directly on a post document. An embedded document should
|
*embedded documents* directly on a post document. An embedded document should
|
||||||
be treated no differently than a regular document; it just doesn't have its own
|
be treated no differently that a regular document; it just doesn't have its own
|
||||||
collection in the database. Using MongoEngine, we can define the structure of
|
collection in the database. Using MongoEngine, we can define the structure of
|
||||||
embedded documents, along with utility methods, in exactly the same way we do
|
embedded documents, along with utility methods, in exactly the same way we do
|
||||||
with regular documents::
|
with regular documents::
|
||||||
|
@@ -6,18 +6,6 @@ Development
|
|||||||
***********
|
***********
|
||||||
(Fill this out whenever you introduce breaking changes to MongoEngine)
|
(Fill this out whenever you introduce breaking changes to MongoEngine)
|
||||||
|
|
||||||
0.14.0
|
|
||||||
******
|
|
||||||
This release includes a few bug fixes and a significant code cleanup. The most
|
|
||||||
important change is that `QuerySet.as_pymongo` no longer supports a
|
|
||||||
`coerce_types` mode. If you used it in the past, a) please let us know of your
|
|
||||||
use case, b) you'll need to override `as_pymongo` to get the desired outcome.
|
|
||||||
|
|
||||||
This release also makes the EmbeddedDocument not hashable by default. If you
|
|
||||||
use embedded documents in sets or dictionaries, you might have to override
|
|
||||||
`__hash__` and implement a hashing logic specific to your use case. See #1528
|
|
||||||
for the reason behind this change.
|
|
||||||
|
|
||||||
0.13.0
|
0.13.0
|
||||||
******
|
******
|
||||||
This release adds Unicode support to the `EmailField` and changes its
|
This release adds Unicode support to the `EmailField` and changes its
|
||||||
|
@@ -23,7 +23,7 @@ __all__ = (list(document.__all__) + list(fields.__all__) +
|
|||||||
list(signals.__all__) + list(errors.__all__))
|
list(signals.__all__) + list(errors.__all__))
|
||||||
|
|
||||||
|
|
||||||
VERSION = (0, 15, 0)
|
VERSION = (0, 13, 0)
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
|
@@ -15,7 +15,7 @@ __all__ = (
|
|||||||
'UPDATE_OPERATORS', '_document_registry', 'get_document',
|
'UPDATE_OPERATORS', '_document_registry', 'get_document',
|
||||||
|
|
||||||
# datastructures
|
# datastructures
|
||||||
'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference',
|
'BaseDict', 'BaseList', 'EmbeddedDocumentList',
|
||||||
|
|
||||||
# document
|
# document
|
||||||
'BaseDocument',
|
'BaseDocument',
|
||||||
|
@@ -1,13 +1,12 @@
|
|||||||
import itertools
|
import itertools
|
||||||
import weakref
|
import weakref
|
||||||
|
|
||||||
from bson import DBRef
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||||
|
|
||||||
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference')
|
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList')
|
||||||
|
|
||||||
|
|
||||||
class BaseDict(dict):
|
class BaseDict(dict):
|
||||||
@@ -128,7 +127,7 @@ class BaseList(list):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
for i in six.moves.range(self.__len__()):
|
for i in xrange(self.__len__()):
|
||||||
yield self[i]
|
yield self[i]
|
||||||
|
|
||||||
def __setitem__(self, key, value, *args, **kwargs):
|
def __setitem__(self, key, value, *args, **kwargs):
|
||||||
@@ -448,40 +447,40 @@ class StrictDict(object):
|
|||||||
return cls._classes[allowed_keys]
|
return cls._classes[allowed_keys]
|
||||||
|
|
||||||
|
|
||||||
class LazyReference(DBRef):
|
class SemiStrictDict(StrictDict):
|
||||||
__slots__ = ('_cached_doc', 'passthrough', 'document_type')
|
__slots__ = ('_extras', )
|
||||||
|
_classes = {}
|
||||||
|
|
||||||
def fetch(self, force=False):
|
def __getattr__(self, attr):
|
||||||
if not self._cached_doc or force:
|
|
||||||
self._cached_doc = self.document_type.objects.get(pk=self.pk)
|
|
||||||
if not self._cached_doc:
|
|
||||||
raise DoesNotExist('Trying to dereference unknown document %s' % (self))
|
|
||||||
return self._cached_doc
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pk(self):
|
|
||||||
return self.id
|
|
||||||
|
|
||||||
def __init__(self, document_type, pk, cached_doc=None, passthrough=False):
|
|
||||||
self.document_type = document_type
|
|
||||||
self._cached_doc = cached_doc
|
|
||||||
self.passthrough = passthrough
|
|
||||||
super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk)
|
|
||||||
|
|
||||||
def __getitem__(self, name):
|
|
||||||
if not self.passthrough:
|
|
||||||
raise KeyError()
|
|
||||||
document = self.fetch()
|
|
||||||
return document[name]
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
if not object.__getattribute__(self, 'passthrough'):
|
|
||||||
raise AttributeError()
|
|
||||||
document = self.fetch()
|
|
||||||
try:
|
try:
|
||||||
return document[name]
|
super(SemiStrictDict, self).__getattr__(attr)
|
||||||
except KeyError:
|
except AttributeError:
|
||||||
raise AttributeError()
|
try:
|
||||||
|
return self.__getattribute__('_extras')[attr]
|
||||||
|
except KeyError as e:
|
||||||
|
raise AttributeError(e)
|
||||||
|
|
||||||
def __repr__(self):
|
def __setattr__(self, attr, value):
|
||||||
return "<LazyReference(%s, %r)>" % (self.document_type, self.pk)
|
try:
|
||||||
|
super(SemiStrictDict, self).__setattr__(attr, value)
|
||||||
|
except AttributeError:
|
||||||
|
try:
|
||||||
|
self._extras[attr] = value
|
||||||
|
except AttributeError:
|
||||||
|
self._extras = {attr: value}
|
||||||
|
|
||||||
|
def __delattr__(self, attr):
|
||||||
|
try:
|
||||||
|
super(SemiStrictDict, self).__delattr__(attr)
|
||||||
|
except AttributeError:
|
||||||
|
try:
|
||||||
|
del self._extras[attr]
|
||||||
|
except KeyError as e:
|
||||||
|
raise AttributeError(e)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
try:
|
||||||
|
extras_iter = iter(self.__getattribute__('_extras'))
|
||||||
|
except AttributeError:
|
||||||
|
extras_iter = ()
|
||||||
|
return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter)
|
||||||
|
@@ -13,14 +13,13 @@ from mongoengine import signals
|
|||||||
from mongoengine.base.common import get_document
|
from mongoengine.base.common import get_document
|
||||||
from mongoengine.base.datastructures import (BaseDict, BaseList,
|
from mongoengine.base.datastructures import (BaseDict, BaseList,
|
||||||
EmbeddedDocumentList,
|
EmbeddedDocumentList,
|
||||||
LazyReference,
|
SemiStrictDict, StrictDict)
|
||||||
StrictDict)
|
|
||||||
from mongoengine.base.fields import ComplexBaseField
|
from mongoengine.base.fields import ComplexBaseField
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError,
|
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError,
|
||||||
LookUpError, OperationError, ValidationError)
|
LookUpError, OperationError, ValidationError)
|
||||||
|
|
||||||
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
|
__all__ = ('BaseDocument',)
|
||||||
|
|
||||||
NON_FIELD_ERRORS = '__all__'
|
NON_FIELD_ERRORS = '__all__'
|
||||||
|
|
||||||
@@ -80,7 +79,8 @@ class BaseDocument(object):
|
|||||||
if self.STRICT and not self._dynamic:
|
if self.STRICT and not self._dynamic:
|
||||||
self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
|
self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
|
||||||
else:
|
else:
|
||||||
self._data = {}
|
self._data = SemiStrictDict.create(
|
||||||
|
allowed_keys=self._fields_ordered)()
|
||||||
|
|
||||||
self._dynamic_fields = SON()
|
self._dynamic_fields = SON()
|
||||||
|
|
||||||
@@ -489,7 +489,7 @@ class BaseDocument(object):
|
|||||||
else:
|
else:
|
||||||
data = getattr(data, part, None)
|
data = getattr(data, part, None)
|
||||||
|
|
||||||
if not isinstance(data, LazyReference) and hasattr(data, '_changed_fields'):
|
if hasattr(data, '_changed_fields'):
|
||||||
if getattr(data, '_is_document', False):
|
if getattr(data, '_is_document', False):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@@ -146,14 +146,13 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
|||||||
raise MongoEngineConnectionError(msg)
|
raise MongoEngineConnectionError(msg)
|
||||||
|
|
||||||
def _clean_settings(settings_dict):
|
def _clean_settings(settings_dict):
|
||||||
# set literal more efficient than calling set function
|
irrelevant_fields = set([
|
||||||
irrelevant_fields_set = {
|
'name', 'username', 'password', 'authentication_source',
|
||||||
'name', 'username', 'password',
|
'authentication_mechanism'
|
||||||
'authentication_source', 'authentication_mechanism'
|
])
|
||||||
}
|
|
||||||
return {
|
return {
|
||||||
k: v for k, v in settings_dict.items()
|
k: v for k, v in settings_dict.items()
|
||||||
if k not in irrelevant_fields_set
|
if k not in irrelevant_fields
|
||||||
}
|
}
|
||||||
|
|
||||||
# Retrieve a copy of the connection settings associated with the requested
|
# Retrieve a copy of the connection settings associated with the requested
|
||||||
|
@@ -1,9 +1,9 @@
|
|||||||
|
from collections import OrderedDict
|
||||||
from bson import DBRef, SON
|
from bson import DBRef, SON
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
|
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
|
||||||
TopLevelDocumentMetaclass, get_document)
|
TopLevelDocumentMetaclass, get_document)
|
||||||
from mongoengine.base.datastructures import LazyReference
|
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from mongoengine.document import Document, EmbeddedDocument
|
from mongoengine.document import Document, EmbeddedDocument
|
||||||
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
|
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
|
||||||
@@ -100,10 +100,7 @@ class DeReference(object):
|
|||||||
if isinstance(item, (Document, EmbeddedDocument)):
|
if isinstance(item, (Document, EmbeddedDocument)):
|
||||||
for field_name, field in item._fields.iteritems():
|
for field_name, field in item._fields.iteritems():
|
||||||
v = item._data.get(field_name, None)
|
v = item._data.get(field_name, None)
|
||||||
if isinstance(v, LazyReference):
|
if isinstance(v, DBRef):
|
||||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
|
||||||
continue
|
|
||||||
elif isinstance(v, DBRef):
|
|
||||||
reference_map.setdefault(field.document_type, set()).add(v.id)
|
reference_map.setdefault(field.document_type, set()).add(v.id)
|
||||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||||
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
||||||
@@ -114,9 +111,6 @@ class DeReference(object):
|
|||||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||||
key = field_cls
|
key = field_cls
|
||||||
reference_map.setdefault(key, set()).update(refs)
|
reference_map.setdefault(key, set()).update(refs)
|
||||||
elif isinstance(item, LazyReference):
|
|
||||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
|
||||||
continue
|
|
||||||
elif isinstance(item, DBRef):
|
elif isinstance(item, DBRef):
|
||||||
reference_map.setdefault(item.collection, set()).add(item.id)
|
reference_map.setdefault(item.collection, set()).add(item.id)
|
||||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||||
@@ -208,6 +202,10 @@ class DeReference(object):
|
|||||||
as_tuple = isinstance(items, tuple)
|
as_tuple = isinstance(items, tuple)
|
||||||
iterator = enumerate(items)
|
iterator = enumerate(items)
|
||||||
data = []
|
data = []
|
||||||
|
elif isinstance(items, OrderedDict):
|
||||||
|
is_list = False
|
||||||
|
iterator = items.iteritems()
|
||||||
|
data = OrderedDict()
|
||||||
else:
|
else:
|
||||||
is_list = False
|
is_list = False
|
||||||
iterator = items.iteritems()
|
iterator = items.iteritems()
|
||||||
|
@@ -320,7 +320,7 @@ class Document(BaseDocument):
|
|||||||
:param save_condition: only perform save if matching record in db
|
:param save_condition: only perform save if matching record in db
|
||||||
satisfies condition(s) (e.g. version number).
|
satisfies condition(s) (e.g. version number).
|
||||||
Raises :class:`OperationError` if the conditions are not satisfied
|
Raises :class:`OperationError` if the conditions are not satisfied
|
||||||
:param signal_kwargs: (optional) kwargs dictionary to be passed to
|
:parm signal_kwargs: (optional) kwargs dictionary to be passed to
|
||||||
the signal calls.
|
the signal calls.
|
||||||
|
|
||||||
.. versionchanged:: 0.5
|
.. versionchanged:: 0.5
|
||||||
|
@@ -6,6 +6,7 @@ import socket
|
|||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
import warnings
|
import warnings
|
||||||
|
from collections import Mapping
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
from bson import Binary, DBRef, ObjectId, SON
|
from bson import Binary, DBRef, ObjectId, SON
|
||||||
@@ -26,9 +27,7 @@ except ImportError:
|
|||||||
Int64 = long
|
Int64 = long
|
||||||
|
|
||||||
from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField,
|
from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField,
|
||||||
GeoJsonBaseField, LazyReference, ObjectIdField,
|
GeoJsonBaseField, ObjectIdField, get_document)
|
||||||
get_document)
|
|
||||||
from mongoengine.common import _import_class
|
|
||||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||||
from mongoengine.document import Document, EmbeddedDocument
|
from mongoengine.document import Document, EmbeddedDocument
|
||||||
from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError
|
from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError
|
||||||
@@ -48,7 +47,6 @@ __all__ = (
|
|||||||
'GenericEmbeddedDocumentField', 'DynamicField', 'ListField',
|
'GenericEmbeddedDocumentField', 'DynamicField', 'ListField',
|
||||||
'SortedListField', 'EmbeddedDocumentListField', 'DictField',
|
'SortedListField', 'EmbeddedDocumentListField', 'DictField',
|
||||||
'MapField', 'ReferenceField', 'CachedReferenceField',
|
'MapField', 'ReferenceField', 'CachedReferenceField',
|
||||||
'LazyReferenceField', 'GenericLazyReferenceField',
|
|
||||||
'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy',
|
'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy',
|
||||||
'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField',
|
'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField',
|
||||||
'GeoPointField', 'PointField', 'LineStringField', 'PolygonField',
|
'GeoPointField', 'PointField', 'LineStringField', 'PolygonField',
|
||||||
@@ -486,10 +484,6 @@ class DateTimeField(BaseField):
|
|||||||
if not isinstance(value, six.string_types):
|
if not isinstance(value, six.string_types):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
value = value.strip()
|
|
||||||
if not value:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Attempt to parse a datetime:
|
# Attempt to parse a datetime:
|
||||||
if dateutil:
|
if dateutil:
|
||||||
try:
|
try:
|
||||||
@@ -689,28 +683,16 @@ class GenericEmbeddedDocumentField(BaseField):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
def validate(self, value, clean=True):
|
def validate(self, value, clean=True):
|
||||||
if self.choices and isinstance(value, SON):
|
|
||||||
for choice in self.choices:
|
|
||||||
if value['_cls'] == choice._class_name:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if not isinstance(value, EmbeddedDocument):
|
if not isinstance(value, EmbeddedDocument):
|
||||||
self.error('Invalid embedded document instance provided to an '
|
self.error('Invalid embedded document instance provided to an '
|
||||||
'GenericEmbeddedDocumentField')
|
'GenericEmbeddedDocumentField')
|
||||||
|
|
||||||
value.validate(clean=clean)
|
value.validate(clean=clean)
|
||||||
|
|
||||||
def lookup_member(self, member_name):
|
|
||||||
if self.choices:
|
|
||||||
for choice in self.choices:
|
|
||||||
field = choice._fields.get(member_name)
|
|
||||||
if field:
|
|
||||||
return field
|
|
||||||
return None
|
|
||||||
|
|
||||||
def to_mongo(self, document, use_db_field=True, fields=None):
|
def to_mongo(self, document, use_db_field=True, fields=None):
|
||||||
if document is None:
|
if document is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
data = document.to_mongo(use_db_field, fields)
|
data = document.to_mongo(use_db_field, fields)
|
||||||
if '_cls' not in data:
|
if '_cls' not in data:
|
||||||
data['_cls'] = document._class_name
|
data['_cls'] = document._class_name
|
||||||
@@ -723,6 +705,14 @@ class DynamicField(BaseField):
|
|||||||
|
|
||||||
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
|
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
|
||||||
|
|
||||||
|
def __init__(self, container_class=dict, *args, **kwargs):
|
||||||
|
self._container_cls = container_class
|
||||||
|
if not issubclass(self._container_cls, Mapping):
|
||||||
|
self.error('The class that is specified in `container_class` parameter '
|
||||||
|
'must be a subclass of `dict`.')
|
||||||
|
|
||||||
|
super(DynamicField, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def to_mongo(self, value, use_db_field=True, fields=None):
|
def to_mongo(self, value, use_db_field=True, fields=None):
|
||||||
"""Convert a Python type to a MongoDB compatible type.
|
"""Convert a Python type to a MongoDB compatible type.
|
||||||
"""
|
"""
|
||||||
@@ -748,7 +738,7 @@ class DynamicField(BaseField):
|
|||||||
is_list = True
|
is_list = True
|
||||||
value = {k: v for k, v in enumerate(value)}
|
value = {k: v for k, v in enumerate(value)}
|
||||||
|
|
||||||
data = {}
|
data = self._container_cls()
|
||||||
for k, v in value.iteritems():
|
for k, v in value.iteritems():
|
||||||
data[k] = self.to_mongo(v, use_db_field, fields)
|
data[k] = self.to_mongo(v, use_db_field, fields)
|
||||||
|
|
||||||
@@ -794,17 +784,6 @@ class ListField(ComplexBaseField):
|
|||||||
kwargs.setdefault('default', lambda: [])
|
kwargs.setdefault('default', lambda: [])
|
||||||
super(ListField, self).__init__(**kwargs)
|
super(ListField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
if instance is None:
|
|
||||||
# Document class being used rather than a document object
|
|
||||||
return self
|
|
||||||
value = instance._data.get(self.name)
|
|
||||||
LazyReferenceField = _import_class('LazyReferenceField')
|
|
||||||
GenericLazyReferenceField = _import_class('GenericLazyReferenceField')
|
|
||||||
if isinstance(self.field, (LazyReferenceField, GenericLazyReferenceField)) and value:
|
|
||||||
instance._data[self.name] = [self.field.build_lazyref(x) for x in value]
|
|
||||||
return super(ListField, self).__get__(instance, owner)
|
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
"""Make sure that a list of valid fields is being used."""
|
"""Make sure that a list of valid fields is being used."""
|
||||||
if (not isinstance(value, (list, tuple, QuerySet)) or
|
if (not isinstance(value, (list, tuple, QuerySet)) or
|
||||||
@@ -979,15 +958,6 @@ class ReferenceField(BaseField):
|
|||||||
"""A reference to a document that will be automatically dereferenced on
|
"""A reference to a document that will be automatically dereferenced on
|
||||||
access (lazily).
|
access (lazily).
|
||||||
|
|
||||||
Note this means you will get a database I/O access everytime you access
|
|
||||||
this field. This is necessary because the field returns a :class:`~mongoengine.Document`
|
|
||||||
which precise type can depend of the value of the `_cls` field present in the
|
|
||||||
document in database.
|
|
||||||
In short, using this type of field can lead to poor performances (especially
|
|
||||||
if you access this field only to retrieve it `pk` field which is already
|
|
||||||
known before dereference). To solve this you should consider using the
|
|
||||||
:class:`~mongoengine.fields.LazyReferenceField`.
|
|
||||||
|
|
||||||
Use the `reverse_delete_rule` to handle what should happen if the document
|
Use the `reverse_delete_rule` to handle what should happen if the document
|
||||||
the field is referencing is deleted. EmbeddedDocuments, DictFields and
|
the field is referencing is deleted. EmbeddedDocuments, DictFields and
|
||||||
MapFields does not support reverse_delete_rule and an `InvalidDocumentError`
|
MapFields does not support reverse_delete_rule and an `InvalidDocumentError`
|
||||||
@@ -1122,8 +1092,8 @@ class ReferenceField(BaseField):
|
|||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
|
|
||||||
if not isinstance(value, (self.document_type, LazyReference, DBRef, ObjectId)):
|
if not isinstance(value, (self.document_type, DBRef, ObjectId)):
|
||||||
self.error('A ReferenceField only accepts DBRef, LazyReference, ObjectId or documents')
|
self.error('A ReferenceField only accepts DBRef, ObjectId or documents')
|
||||||
|
|
||||||
if isinstance(value, Document) and value.id is None:
|
if isinstance(value, Document) and value.id is None:
|
||||||
self.error('You can only reference documents once they have been '
|
self.error('You can only reference documents once they have been '
|
||||||
@@ -1298,12 +1268,6 @@ class GenericReferenceField(BaseField):
|
|||||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||||
that will be automatically dereferenced on access (lazily).
|
that will be automatically dereferenced on access (lazily).
|
||||||
|
|
||||||
Note this field works the same way as :class:`~mongoengine.document.ReferenceField`,
|
|
||||||
doing database I/O access the first time it is accessed (even if it's to access
|
|
||||||
it ``pk`` or ``id`` field).
|
|
||||||
To solve this you should consider using the
|
|
||||||
:class:`~mongoengine.fields.GenericLazyReferenceField`.
|
|
||||||
|
|
||||||
.. note ::
|
.. note ::
|
||||||
* Any documents used as a generic reference must be registered in the
|
* Any documents used as a generic reference must be registered in the
|
||||||
document registry. Importing the model will automatically register
|
document registry. Importing the model will automatically register
|
||||||
@@ -1506,9 +1470,6 @@ class GridFSProxy(object):
|
|||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fs(self):
|
def fs(self):
|
||||||
if not self._fs:
|
if not self._fs:
|
||||||
@@ -2182,195 +2143,3 @@ class MultiPolygonField(GeoJsonBaseField):
|
|||||||
.. versionadded:: 0.9
|
.. versionadded:: 0.9
|
||||||
"""
|
"""
|
||||||
_type = 'MultiPolygon'
|
_type = 'MultiPolygon'
|
||||||
|
|
||||||
|
|
||||||
class LazyReferenceField(BaseField):
|
|
||||||
"""A really lazy reference to a document.
|
|
||||||
Unlike the :class:`~mongoengine.fields.ReferenceField` it must be manually
|
|
||||||
dereferenced using it ``fetch()`` method.
|
|
||||||
|
|
||||||
.. versionadded:: 0.15
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, document_type, passthrough=False, dbref=False,
|
|
||||||
reverse_delete_rule=DO_NOTHING, **kwargs):
|
|
||||||
"""Initialises the Reference Field.
|
|
||||||
|
|
||||||
:param dbref: Store the reference as :class:`~pymongo.dbref.DBRef`
|
|
||||||
or as the :class:`~pymongo.objectid.ObjectId`.id .
|
|
||||||
:param reverse_delete_rule: Determines what to do when the referring
|
|
||||||
object is deleted
|
|
||||||
:param passthrough: When trying to access unknown fields, the
|
|
||||||
:class:`~mongoengine.base.datastructure.LazyReference` instance will
|
|
||||||
automatically call `fetch()` and try to retrive the field on the fetched
|
|
||||||
document. Note this only work getting field (not setting or deleting).
|
|
||||||
"""
|
|
||||||
if (
|
|
||||||
not isinstance(document_type, six.string_types) and
|
|
||||||
not issubclass(document_type, Document)
|
|
||||||
):
|
|
||||||
self.error('Argument to LazyReferenceField constructor must be a '
|
|
||||||
'document class or a string')
|
|
||||||
|
|
||||||
self.dbref = dbref
|
|
||||||
self.passthrough = passthrough
|
|
||||||
self.document_type_obj = document_type
|
|
||||||
self.reverse_delete_rule = reverse_delete_rule
|
|
||||||
super(LazyReferenceField, self).__init__(**kwargs)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def document_type(self):
|
|
||||||
if isinstance(self.document_type_obj, six.string_types):
|
|
||||||
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
|
|
||||||
self.document_type_obj = self.owner_document
|
|
||||||
else:
|
|
||||||
self.document_type_obj = get_document(self.document_type_obj)
|
|
||||||
return self.document_type_obj
|
|
||||||
|
|
||||||
def build_lazyref(self, value):
|
|
||||||
if isinstance(value, LazyReference):
|
|
||||||
if value.passthrough != self.passthrough:
|
|
||||||
value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough)
|
|
||||||
elif value is not None:
|
|
||||||
if isinstance(value, self.document_type):
|
|
||||||
value = LazyReference(self.document_type, value.pk, passthrough=self.passthrough)
|
|
||||||
elif isinstance(value, DBRef):
|
|
||||||
value = LazyReference(self.document_type, value.id, passthrough=self.passthrough)
|
|
||||||
else:
|
|
||||||
# value is the primary key of the referenced document
|
|
||||||
value = LazyReference(self.document_type, value, passthrough=self.passthrough)
|
|
||||||
return value
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
"""Descriptor to allow lazy dereferencing."""
|
|
||||||
if instance is None:
|
|
||||||
# Document class being used rather than a document object
|
|
||||||
return self
|
|
||||||
|
|
||||||
value = self.build_lazyref(instance._data.get(self.name))
|
|
||||||
if value:
|
|
||||||
instance._data[self.name] = value
|
|
||||||
|
|
||||||
return super(LazyReferenceField, self).__get__(instance, owner)
|
|
||||||
|
|
||||||
def to_mongo(self, value):
|
|
||||||
if isinstance(value, LazyReference):
|
|
||||||
pk = value.pk
|
|
||||||
elif isinstance(value, self.document_type):
|
|
||||||
pk = value.pk
|
|
||||||
elif isinstance(value, DBRef):
|
|
||||||
pk = value.id
|
|
||||||
else:
|
|
||||||
# value is the primary key of the referenced document
|
|
||||||
pk = value
|
|
||||||
id_field_name = self.document_type._meta['id_field']
|
|
||||||
id_field = self.document_type._fields[id_field_name]
|
|
||||||
pk = id_field.to_mongo(pk)
|
|
||||||
if self.dbref:
|
|
||||||
return DBRef(self.document_type._get_collection_name(), pk)
|
|
||||||
else:
|
|
||||||
return pk
|
|
||||||
|
|
||||||
def validate(self, value):
|
|
||||||
if isinstance(value, LazyReference):
|
|
||||||
if value.collection != self.document_type._get_collection_name():
|
|
||||||
self.error('Reference must be on a `%s` document.' % self.document_type)
|
|
||||||
pk = value.pk
|
|
||||||
elif isinstance(value, self.document_type):
|
|
||||||
pk = value.pk
|
|
||||||
elif isinstance(value, DBRef):
|
|
||||||
# TODO: check collection ?
|
|
||||||
collection = self.document_type._get_collection_name()
|
|
||||||
if value.collection != collection:
|
|
||||||
self.error("DBRef on bad collection (must be on `%s`)" % collection)
|
|
||||||
pk = value.id
|
|
||||||
else:
|
|
||||||
# value is the primary key of the referenced document
|
|
||||||
id_field_name = self.document_type._meta['id_field']
|
|
||||||
id_field = getattr(self.document_type, id_field_name)
|
|
||||||
pk = value
|
|
||||||
try:
|
|
||||||
id_field.validate(pk)
|
|
||||||
except ValidationError:
|
|
||||||
self.error(
|
|
||||||
"value should be `{0}` document, LazyReference or DBRef on `{0}` "
|
|
||||||
"or `{0}`'s primary key (i.e. `{1}`)".format(
|
|
||||||
self.document_type.__name__, type(id_field).__name__))
|
|
||||||
|
|
||||||
if pk is None:
|
|
||||||
self.error('You can only reference documents once they have been '
|
|
||||||
'saved to the database')
|
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
super(LazyReferenceField, self).prepare_query_value(op, value)
|
|
||||||
return self.to_mongo(value)
|
|
||||||
|
|
||||||
def lookup_member(self, member_name):
|
|
||||||
return self.document_type._fields.get(member_name)
|
|
||||||
|
|
||||||
|
|
||||||
class GenericLazyReferenceField(GenericReferenceField):
|
|
||||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
|
||||||
that will be automatically dereferenced on access (lazily).
|
|
||||||
Unlike the :class:`~mongoengine.fields.GenericReferenceField` it must be
|
|
||||||
manually dereferenced using it ``fetch()`` method.
|
|
||||||
|
|
||||||
.. note ::
|
|
||||||
* Any documents used as a generic reference must be registered in the
|
|
||||||
document registry. Importing the model will automatically register
|
|
||||||
it.
|
|
||||||
|
|
||||||
* You can use the choices param to limit the acceptable Document types
|
|
||||||
|
|
||||||
.. versionadded:: 0.15
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self.passthrough = kwargs.pop('passthrough', False)
|
|
||||||
super(GenericLazyReferenceField, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def _validate_choices(self, value):
|
|
||||||
if isinstance(value, LazyReference):
|
|
||||||
value = value.document_type._class_name
|
|
||||||
super(GenericLazyReferenceField, self)._validate_choices(value)
|
|
||||||
|
|
||||||
def build_lazyref(self, value):
|
|
||||||
if isinstance(value, LazyReference):
|
|
||||||
if value.passthrough != self.passthrough:
|
|
||||||
value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough)
|
|
||||||
elif value is not None:
|
|
||||||
if isinstance(value, (dict, SON)):
|
|
||||||
value = LazyReference(get_document(value['_cls']), value['_ref'].id, passthrough=self.passthrough)
|
|
||||||
elif isinstance(value, Document):
|
|
||||||
value = LazyReference(type(value), value.pk, passthrough=self.passthrough)
|
|
||||||
return value
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
if instance is None:
|
|
||||||
return self
|
|
||||||
|
|
||||||
value = self.build_lazyref(instance._data.get(self.name))
|
|
||||||
if value:
|
|
||||||
instance._data[self.name] = value
|
|
||||||
|
|
||||||
return super(GenericLazyReferenceField, self).__get__(instance, owner)
|
|
||||||
|
|
||||||
def validate(self, value):
|
|
||||||
if isinstance(value, LazyReference) and value.pk is None:
|
|
||||||
self.error('You can only reference documents once they have been'
|
|
||||||
' saved to the database')
|
|
||||||
return super(GenericLazyReferenceField, self).validate(value)
|
|
||||||
|
|
||||||
def to_mongo(self, document):
|
|
||||||
if document is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if isinstance(document, LazyReference):
|
|
||||||
return SON((
|
|
||||||
('_cls', document.document_type._class_name),
|
|
||||||
('_ref', DBRef(document.document_type._get_collection_name(), document.pk))
|
|
||||||
))
|
|
||||||
else:
|
|
||||||
return super(GenericLazyReferenceField, self).to_mongo(document)
|
|
||||||
|
@@ -67,6 +67,7 @@ class BaseQuerySet(object):
|
|||||||
self._scalar = []
|
self._scalar = []
|
||||||
self._none = False
|
self._none = False
|
||||||
self._as_pymongo = False
|
self._as_pymongo = False
|
||||||
|
self._as_pymongo_coerce = False
|
||||||
self._search_text = None
|
self._search_text = None
|
||||||
|
|
||||||
# If inheritance is allowed, only return instances and instances of
|
# If inheritance is allowed, only return instances and instances of
|
||||||
@@ -727,12 +728,11 @@ class BaseQuerySet(object):
|
|||||||
'%s is not a subclass of BaseQuerySet' % new_qs.__name__)
|
'%s is not a subclass of BaseQuerySet' % new_qs.__name__)
|
||||||
|
|
||||||
copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj',
|
copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj',
|
||||||
'_where_clause', '_loaded_fields', '_ordering',
|
'_where_clause', '_loaded_fields', '_ordering', '_snapshot',
|
||||||
'_snapshot', '_timeout', '_class_check', '_slave_okay',
|
'_timeout', '_class_check', '_slave_okay', '_read_preference',
|
||||||
'_read_preference', '_iter', '_scalar', '_as_pymongo',
|
'_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce',
|
||||||
'_limit', '_skip', '_hint', '_auto_dereference',
|
'_limit', '_skip', '_hint', '_auto_dereference',
|
||||||
'_search_text', 'only_fields', '_max_time_ms',
|
'_search_text', 'only_fields', '_max_time_ms', '_comment')
|
||||||
'_comment')
|
|
||||||
|
|
||||||
for prop in copy_props:
|
for prop in copy_props:
|
||||||
val = getattr(self, prop)
|
val = getattr(self, prop)
|
||||||
@@ -939,8 +939,7 @@ class BaseQuerySet(object):
|
|||||||
|
|
||||||
posts = BlogPost.objects(...).fields(slice__comments=5)
|
posts = BlogPost.objects(...).fields(slice__comments=5)
|
||||||
|
|
||||||
:param kwargs: A set of keyword arguments identifying what to
|
:param kwargs: A set keywors arguments identifying what to include.
|
||||||
include, exclude, or slice.
|
|
||||||
|
|
||||||
.. versionadded:: 0.5
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
@@ -1129,15 +1128,16 @@ class BaseQuerySet(object):
|
|||||||
"""An alias for scalar"""
|
"""An alias for scalar"""
|
||||||
return self.scalar(*fields)
|
return self.scalar(*fields)
|
||||||
|
|
||||||
def as_pymongo(self):
|
def as_pymongo(self, coerce_types=False):
|
||||||
"""Instead of returning Document instances, return raw values from
|
"""Instead of returning Document instances, return raw values from
|
||||||
pymongo.
|
pymongo.
|
||||||
|
|
||||||
This method is particularly useful if you don't need dereferencing
|
:param coerce_types: Field types (if applicable) would be use to
|
||||||
and care primarily about the speed of data retrieval.
|
coerce types.
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
queryset._as_pymongo = True
|
queryset._as_pymongo = True
|
||||||
|
queryset._as_pymongo_coerce = coerce_types
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
def max_time_ms(self, ms):
|
def max_time_ms(self, ms):
|
||||||
@@ -1722,33 +1722,25 @@ class BaseQuerySet(object):
|
|||||||
return frequencies
|
return frequencies
|
||||||
|
|
||||||
def _fields_to_dbfields(self, fields):
|
def _fields_to_dbfields(self, fields):
|
||||||
"""Translate fields' paths to their db equivalents."""
|
"""Translate fields paths to its db equivalents"""
|
||||||
|
ret = []
|
||||||
subclasses = []
|
subclasses = []
|
||||||
if self._document._meta['allow_inheritance']:
|
document = self._document
|
||||||
|
if document._meta['allow_inheritance']:
|
||||||
subclasses = [get_document(x)
|
subclasses = [get_document(x)
|
||||||
for x in self._document._subclasses][1:]
|
for x in document._subclasses][1:]
|
||||||
|
|
||||||
db_field_paths = []
|
|
||||||
for field in fields:
|
for field in fields:
|
||||||
field_parts = field.split('.')
|
|
||||||
try:
|
try:
|
||||||
field = '.'.join(
|
field = '.'.join(f.db_field for f in
|
||||||
f if isinstance(f, six.string_types) else f.db_field
|
document._lookup_field(field.split('.')))
|
||||||
for f in self._document._lookup_field(field_parts)
|
ret.append(field)
|
||||||
)
|
|
||||||
db_field_paths.append(field)
|
|
||||||
except LookUpError as err:
|
except LookUpError as err:
|
||||||
found = False
|
found = False
|
||||||
|
|
||||||
# If a field path wasn't found on the main document, go
|
|
||||||
# through its subclasses and see if it exists on any of them.
|
|
||||||
for subdoc in subclasses:
|
for subdoc in subclasses:
|
||||||
try:
|
try:
|
||||||
subfield = '.'.join(
|
subfield = '.'.join(f.db_field for f in
|
||||||
f if isinstance(f, six.string_types) else f.db_field
|
subdoc._lookup_field(field.split('.')))
|
||||||
for f in subdoc._lookup_field(field_parts)
|
ret.append(subfield)
|
||||||
)
|
|
||||||
db_field_paths.append(subfield)
|
|
||||||
found = True
|
found = True
|
||||||
break
|
break
|
||||||
except LookUpError:
|
except LookUpError:
|
||||||
@@ -1756,8 +1748,7 @@ class BaseQuerySet(object):
|
|||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
raise err
|
raise err
|
||||||
|
return ret
|
||||||
return db_field_paths
|
|
||||||
|
|
||||||
def _get_order_by(self, keys):
|
def _get_order_by(self, keys):
|
||||||
"""Given a list of MongoEngine-style sort keys, return a list
|
"""Given a list of MongoEngine-style sort keys, return a list
|
||||||
@@ -1808,25 +1799,59 @@ class BaseQuerySet(object):
|
|||||||
|
|
||||||
return tuple(data)
|
return tuple(data)
|
||||||
|
|
||||||
def _get_as_pymongo(self, doc):
|
def _get_as_pymongo(self, row):
|
||||||
"""Clean up a PyMongo doc, removing fields that were only fetched
|
# Extract which fields paths we should follow if .fields(...) was
|
||||||
for the sake of MongoEngine's implementation, and return it.
|
# used. If not, handle all fields.
|
||||||
"""
|
if not getattr(self, '__as_pymongo_fields', None):
|
||||||
# Always remove _cls as a MongoEngine's implementation detail.
|
self.__as_pymongo_fields = []
|
||||||
if '_cls' in doc:
|
|
||||||
del doc['_cls']
|
|
||||||
|
|
||||||
# If the _id was not included in a .only or was excluded in a .exclude,
|
for field in self._loaded_fields.fields - set(['_cls']):
|
||||||
# remove it from the doc (we always fetch it so that we can properly
|
self.__as_pymongo_fields.append(field)
|
||||||
# construct documents).
|
while '.' in field:
|
||||||
fields = self._loaded_fields
|
field, _ = field.rsplit('.', 1)
|
||||||
if fields and '_id' in doc and (
|
self.__as_pymongo_fields.append(field)
|
||||||
(fields.value == QueryFieldList.ONLY and '_id' not in fields.fields) or
|
|
||||||
(fields.value == QueryFieldList.EXCLUDE and '_id' in fields.fields)
|
|
||||||
):
|
|
||||||
del doc['_id']
|
|
||||||
|
|
||||||
return doc
|
all_fields = not self.__as_pymongo_fields
|
||||||
|
|
||||||
|
def clean(data, path=None):
|
||||||
|
path = path or ''
|
||||||
|
|
||||||
|
if isinstance(data, dict):
|
||||||
|
new_data = {}
|
||||||
|
for key, value in data.iteritems():
|
||||||
|
new_path = '%s.%s' % (path, key) if path else key
|
||||||
|
|
||||||
|
if all_fields:
|
||||||
|
include_field = True
|
||||||
|
elif self._loaded_fields.value == QueryFieldList.ONLY:
|
||||||
|
include_field = new_path in self.__as_pymongo_fields
|
||||||
|
else:
|
||||||
|
include_field = new_path not in self.__as_pymongo_fields
|
||||||
|
|
||||||
|
if include_field:
|
||||||
|
new_data[key] = clean(value, path=new_path)
|
||||||
|
data = new_data
|
||||||
|
elif isinstance(data, list):
|
||||||
|
data = [clean(d, path=path) for d in data]
|
||||||
|
else:
|
||||||
|
if self._as_pymongo_coerce:
|
||||||
|
# If we need to coerce types, we need to determine the
|
||||||
|
# type of this field and use the corresponding
|
||||||
|
# .to_python(...)
|
||||||
|
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
|
||||||
|
|
||||||
|
obj = self._document
|
||||||
|
for chunk in path.split('.'):
|
||||||
|
obj = getattr(obj, chunk, None)
|
||||||
|
if obj is None:
|
||||||
|
break
|
||||||
|
elif isinstance(obj, EmbeddedDocumentField):
|
||||||
|
obj = obj.document_type
|
||||||
|
if obj and data is not None:
|
||||||
|
data = obj.to_python(data)
|
||||||
|
return data
|
||||||
|
|
||||||
|
return clean(row)
|
||||||
|
|
||||||
def _sub_js_fields(self, code):
|
def _sub_js_fields(self, code):
|
||||||
"""When fields are specified with [~fieldname] syntax, where
|
"""When fields are specified with [~fieldname] syntax, where
|
||||||
|
@@ -1,5 +1,3 @@
|
|||||||
import six
|
|
||||||
|
|
||||||
from mongoengine.errors import OperationError
|
from mongoengine.errors import OperationError
|
||||||
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
|
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
|
||||||
NULLIFY, PULL)
|
NULLIFY, PULL)
|
||||||
@@ -114,7 +112,7 @@ class QuerySet(BaseQuerySet):
|
|||||||
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
||||||
# the result cache.
|
# the result cache.
|
||||||
try:
|
try:
|
||||||
for _ in six.moves.range(ITER_CHUNK_SIZE):
|
for _ in xrange(ITER_CHUNK_SIZE):
|
||||||
self._result_cache.append(self.next())
|
self._result_cache.append(self.next())
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
# Getting this exception means there are no more docs in the
|
# Getting this exception means there are no more docs in the
|
||||||
@@ -168,7 +166,7 @@ class QuerySetNoCache(BaseQuerySet):
|
|||||||
return '.. queryset mid-iteration ..'
|
return '.. queryset mid-iteration ..'
|
||||||
|
|
||||||
data = []
|
data = []
|
||||||
for _ in six.moves.range(REPR_OUTPUT_SIZE + 1):
|
for _ in xrange(REPR_OUTPUT_SIZE + 1):
|
||||||
try:
|
try:
|
||||||
data.append(self.next())
|
data.append(self.next())
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
|
@@ -284,9 +284,7 @@ def update(_doc_cls=None, **update):
|
|||||||
if isinstance(field, GeoJsonBaseField):
|
if isinstance(field, GeoJsonBaseField):
|
||||||
value = field.to_mongo(value)
|
value = field.to_mongo(value)
|
||||||
|
|
||||||
if op == 'push' and isinstance(value, (list, tuple, set)):
|
if op in (None, 'set', 'push', 'pull'):
|
||||||
value = [field.prepare_query_value(op, v) for v in value]
|
|
||||||
elif op in (None, 'set', 'push', 'pull'):
|
|
||||||
if field.required or value is not None:
|
if field.required or value is not None:
|
||||||
value = field.prepare_query_value(op, value)
|
value = field.prepare_query_value(op, value)
|
||||||
elif op in ('pushAll', 'pullAll'):
|
elif op in ('pushAll', 'pullAll'):
|
||||||
@@ -335,20 +333,10 @@ def update(_doc_cls=None, **update):
|
|||||||
value = {key: value}
|
value = {key: value}
|
||||||
elif op == 'addToSet' and isinstance(value, list):
|
elif op == 'addToSet' and isinstance(value, list):
|
||||||
value = {key: {'$each': value}}
|
value = {key: {'$each': value}}
|
||||||
elif op == 'push':
|
|
||||||
if parts[-1].isdigit():
|
|
||||||
key = parts[0]
|
|
||||||
position = int(parts[-1])
|
|
||||||
# $position expects an iterable. If pushing a single value,
|
|
||||||
# wrap it in a list.
|
|
||||||
if not isinstance(value, (set, tuple, list)):
|
|
||||||
value = [value]
|
|
||||||
value = {key: {'$each': value, '$position': position}}
|
|
||||||
else:
|
|
||||||
value = {key: value}
|
|
||||||
else:
|
else:
|
||||||
value = {key: value}
|
value = {key: value}
|
||||||
key = '$' + op
|
key = '$' + op
|
||||||
|
|
||||||
if key not in mongo_update:
|
if key not in mongo_update:
|
||||||
mongo_update[key] = value
|
mongo_update[key] = value
|
||||||
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
||||||
|
@@ -1,11 +1,11 @@
|
|||||||
[nosetests]
|
[nosetests]
|
||||||
verbosity=2
|
verbosity=2
|
||||||
detailed-errors=1
|
detailed-errors=1
|
||||||
#tests=tests
|
tests=tests
|
||||||
cover-package=mongoengine
|
cover-package=mongoengine
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
ignore=E501,F401,F403,F405,I201,I202
|
ignore=E501,F401,F403,F405,I201
|
||||||
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
||||||
max-complexity=47
|
max-complexity=47
|
||||||
application-import-names=mongoengine,tests
|
application-import-names=mongoengine,tests
|
||||||
|
6
setup.py
6
setup.py
@@ -70,9 +70,9 @@ setup(
|
|||||||
name='mongoengine',
|
name='mongoengine',
|
||||||
version=VERSION,
|
version=VERSION,
|
||||||
author='Harry Marr',
|
author='Harry Marr',
|
||||||
author_email='harry.marr@gmail.com',
|
author_email='harry.marr@{nospam}gmail.com',
|
||||||
maintainer="Stefan Wojcik",
|
maintainer="Ross Lawley",
|
||||||
maintainer_email="wojcikstefan@gmail.com",
|
maintainer_email="ross.lawley@{nospam}gmail.com",
|
||||||
url='http://mongoengine.org/',
|
url='http://mongoengine.org/',
|
||||||
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
||||||
license='MIT',
|
license='MIT',
|
||||||
|
@@ -22,8 +22,6 @@ from mongoengine.queryset import NULLIFY, Q
|
|||||||
from mongoengine.context_managers import switch_db, query_counter
|
from mongoengine.context_managers import switch_db, query_counter
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
|
|
||||||
from tests.utils import needs_mongodb_v26
|
|
||||||
|
|
||||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
|
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
|
||||||
'../fields/mongoengine.png')
|
'../fields/mongoengine.png')
|
||||||
|
|
||||||
@@ -828,22 +826,6 @@ class InstanceTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())])
|
self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())])
|
||||||
|
|
||||||
@needs_mongodb_v26
|
|
||||||
def test_modify_with_positional_push(self):
|
|
||||||
class BlogPost(Document):
|
|
||||||
tags = ListField(StringField())
|
|
||||||
|
|
||||||
post = BlogPost.objects.create(tags=['python'])
|
|
||||||
self.assertEqual(post.tags, ['python'])
|
|
||||||
post.modify(push__tags__0=['code', 'mongo'])
|
|
||||||
self.assertEqual(post.tags, ['code', 'mongo', 'python'])
|
|
||||||
|
|
||||||
# Assert same order of the list items is maintained in the db
|
|
||||||
self.assertEqual(
|
|
||||||
BlogPost._get_collection().find_one({'_id': post.pk})['tags'],
|
|
||||||
['code', 'mongo', 'python']
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_save(self):
|
def test_save(self):
|
||||||
"""Ensure that a document may be saved in the database."""
|
"""Ensure that a document may be saved in the database."""
|
||||||
|
|
||||||
@@ -3167,33 +3149,6 @@ class InstanceTest(unittest.TestCase):
|
|||||||
|
|
||||||
person.update(set__height=2.0)
|
person.update(set__height=2.0)
|
||||||
|
|
||||||
@needs_mongodb_v26
|
|
||||||
def test_push_with_position(self):
|
|
||||||
"""Ensure that push with position works properly for an instance."""
|
|
||||||
class BlogPost(Document):
|
|
||||||
slug = StringField()
|
|
||||||
tags = ListField(StringField())
|
|
||||||
|
|
||||||
blog = BlogPost()
|
|
||||||
blog.slug = "ABC"
|
|
||||||
blog.tags = ["python"]
|
|
||||||
blog.save()
|
|
||||||
|
|
||||||
blog.update(push__tags__0=["mongodb", "code"])
|
|
||||||
blog.reload()
|
|
||||||
self.assertEqual(blog.tags, ['mongodb', 'code', 'python'])
|
|
||||||
|
|
||||||
def test_push_nested_list(self):
|
|
||||||
"""Ensure that push update works in nested list"""
|
|
||||||
class BlogPost(Document):
|
|
||||||
slug = StringField()
|
|
||||||
tags = ListField()
|
|
||||||
|
|
||||||
blog = BlogPost(slug="test").save()
|
|
||||||
blog.update(push__tags=["value1", 123])
|
|
||||||
blog.reload()
|
|
||||||
self.assertEqual(blog.tags, [["value1", 123]])
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@@ -5,9 +5,11 @@ import uuid
|
|||||||
import math
|
import math
|
||||||
import itertools
|
import itertools
|
||||||
import re
|
import re
|
||||||
|
import pymongo
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from nose.plugins.skip import SkipTest
|
from nose.plugins.skip import SkipTest
|
||||||
|
from collections import OrderedDict
|
||||||
import six
|
import six
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -26,37 +28,18 @@ except ImportError:
|
|||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList,
|
from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList,
|
||||||
_document_registry, LazyReference)
|
_document_registry, TopLevelDocumentMetaclass)
|
||||||
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase, MONGO_TEST_DB
|
||||||
|
from mongoengine.python_support import IS_PYMONGO_3
|
||||||
|
if IS_PYMONGO_3:
|
||||||
|
from bson import CodecOptions
|
||||||
|
|
||||||
__all__ = ("FieldTest", "EmbeddedDocumentListFieldTestCase")
|
__all__ = ("FieldTest", "EmbeddedDocumentListFieldTestCase")
|
||||||
|
|
||||||
|
|
||||||
class FieldTest(MongoDBTestCase):
|
class FieldTest(MongoDBTestCase):
|
||||||
|
|
||||||
def test_datetime_from_empty_string(self):
|
|
||||||
"""
|
|
||||||
Ensure an exception is raised when trying to
|
|
||||||
cast an empty string to datetime.
|
|
||||||
"""
|
|
||||||
class MyDoc(Document):
|
|
||||||
dt = DateTimeField()
|
|
||||||
|
|
||||||
md = MyDoc(dt='')
|
|
||||||
self.assertRaises(ValidationError, md.save)
|
|
||||||
|
|
||||||
def test_datetime_from_whitespace_string(self):
|
|
||||||
"""
|
|
||||||
Ensure an exception is raised when trying to
|
|
||||||
cast a whitespace-only string to datetime.
|
|
||||||
"""
|
|
||||||
class MyDoc(Document):
|
|
||||||
dt = DateTimeField()
|
|
||||||
|
|
||||||
md = MyDoc(dt=' ')
|
|
||||||
self.assertRaises(ValidationError, md.save)
|
|
||||||
|
|
||||||
def test_default_values_nothing_set(self):
|
def test_default_values_nothing_set(self):
|
||||||
"""Ensure that default field values are used when creating
|
"""Ensure that default field values are used when creating
|
||||||
a document.
|
a document.
|
||||||
@@ -931,9 +914,7 @@ class FieldTest(MongoDBTestCase):
|
|||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
tags = ListField(StringField())
|
tags = ListField(StringField())
|
||||||
authors = ListField(ReferenceField(User))
|
authors = ListField(ReferenceField(User))
|
||||||
authors_as_lazy = ListField(LazyReferenceField(User))
|
|
||||||
generic = ListField(GenericReferenceField())
|
generic = ListField(GenericReferenceField())
|
||||||
generic_as_lazy = ListField(GenericLazyReferenceField())
|
|
||||||
|
|
||||||
User.drop_collection()
|
User.drop_collection()
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
@@ -971,15 +952,6 @@ class FieldTest(MongoDBTestCase):
|
|||||||
post.authors = [user]
|
post.authors = [user]
|
||||||
post.validate()
|
post.validate()
|
||||||
|
|
||||||
post.authors_as_lazy = [Comment()]
|
|
||||||
self.assertRaises(ValidationError, post.validate)
|
|
||||||
|
|
||||||
post.authors_as_lazy = [User()]
|
|
||||||
self.assertRaises(ValidationError, post.validate)
|
|
||||||
|
|
||||||
post.authors_as_lazy = [user]
|
|
||||||
post.validate()
|
|
||||||
|
|
||||||
post.generic = [1, 2]
|
post.generic = [1, 2]
|
||||||
self.assertRaises(ValidationError, post.validate)
|
self.assertRaises(ValidationError, post.validate)
|
||||||
|
|
||||||
@@ -992,18 +964,6 @@ class FieldTest(MongoDBTestCase):
|
|||||||
post.generic = [user]
|
post.generic = [user]
|
||||||
post.validate()
|
post.validate()
|
||||||
|
|
||||||
post.generic_as_lazy = [1, 2]
|
|
||||||
self.assertRaises(ValidationError, post.validate)
|
|
||||||
|
|
||||||
post.generic_as_lazy = [User(), Comment()]
|
|
||||||
self.assertRaises(ValidationError, post.validate)
|
|
||||||
|
|
||||||
post.generic_as_lazy = [Comment()]
|
|
||||||
self.assertRaises(ValidationError, post.validate)
|
|
||||||
|
|
||||||
post.generic_as_lazy = [user]
|
|
||||||
post.validate()
|
|
||||||
|
|
||||||
def test_sorted_list_sorting(self):
|
def test_sorted_list_sorting(self):
|
||||||
"""Ensure that a sorted list field properly sorts values.
|
"""Ensure that a sorted list field properly sorts values.
|
||||||
"""
|
"""
|
||||||
@@ -4228,6 +4188,67 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
|
|||||||
self.assertTrue(hasattr(CustomData.c_field, 'custom_data'))
|
self.assertTrue(hasattr(CustomData.c_field, 'custom_data'))
|
||||||
self.assertEqual(custom_data['a'], CustomData.c_field.custom_data['a'])
|
self.assertEqual(custom_data['a'], CustomData.c_field.custom_data['a'])
|
||||||
|
|
||||||
|
def test_dynamicfield_with_container_class(self):
|
||||||
|
"""
|
||||||
|
Tests that object can be stored in order by DynamicField class
|
||||||
|
with container_class parameter.
|
||||||
|
"""
|
||||||
|
raw_data = [('d', 1), ('c', 2), ('b', 3), ('a', 4)]
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
ordered_data = DynamicField(container_class=OrderedDict)
|
||||||
|
unordered_data = DynamicField()
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
doc = Doc(ordered_data=OrderedDict(raw_data), unordered_data=dict(raw_data)).save()
|
||||||
|
|
||||||
|
# checks that the data is in order
|
||||||
|
self.assertEqual(type(doc.ordered_data), OrderedDict)
|
||||||
|
self.assertEqual(type(doc.unordered_data), dict)
|
||||||
|
self.assertEqual(','.join(doc.ordered_data.keys()), 'd,c,b,a')
|
||||||
|
|
||||||
|
# checks that the data is stored to the database in order
|
||||||
|
pymongo_db = pymongo.MongoClient()[MONGO_TEST_DB]
|
||||||
|
if IS_PYMONGO_3:
|
||||||
|
codec_option = CodecOptions(document_class=OrderedDict)
|
||||||
|
db_doc = pymongo_db.doc.with_options(codec_options=codec_option).find_one()
|
||||||
|
else:
|
||||||
|
db_doc = pymongo_db.doc.find_one(as_class=OrderedDict)
|
||||||
|
|
||||||
|
self.assertEqual(','.join(doc.ordered_data.keys()), 'd,c,b,a')
|
||||||
|
|
||||||
|
def test_dynamicfield_with_wrong_container_class(self):
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
class DocWithInvalidField:
|
||||||
|
data = DynamicField(container_class=list)
|
||||||
|
|
||||||
|
def test_dynamicfield_with_wrong_container_class_and_reload_docuemnt(self):
|
||||||
|
# This is because 'codec_options' is supported on pymongo3 or later
|
||||||
|
if IS_PYMONGO_3:
|
||||||
|
class OrderedDocument(Document):
|
||||||
|
my_metaclass = TopLevelDocumentMetaclass
|
||||||
|
__metaclass__ = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_collection(cls):
|
||||||
|
collection = super(OrderedDocument, cls)._get_collection()
|
||||||
|
opts = CodecOptions(document_class=OrderedDict)
|
||||||
|
|
||||||
|
return collection.with_options(codec_options=opts)
|
||||||
|
|
||||||
|
raw_data = [('d', 1), ('c', 2), ('b', 3), ('a', 4)]
|
||||||
|
|
||||||
|
class Doc(OrderedDocument):
|
||||||
|
data = DynamicField(container_class=OrderedDict)
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
doc = Doc(data=OrderedDict(raw_data)).save()
|
||||||
|
doc.reload()
|
||||||
|
|
||||||
|
self.assertEqual(type(doc.data), OrderedDict)
|
||||||
|
self.assertEqual(','.join(doc.data.keys()), 'd,c,b,a')
|
||||||
|
|
||||||
class CachedReferenceFieldTest(MongoDBTestCase):
|
class CachedReferenceFieldTest(MongoDBTestCase):
|
||||||
|
|
||||||
@@ -4621,522 +4642,5 @@ class CachedReferenceFieldTest(MongoDBTestCase):
|
|||||||
self.assertTrue(isinstance(ocorrence.animal, Animal))
|
self.assertTrue(isinstance(ocorrence.animal, Animal))
|
||||||
|
|
||||||
|
|
||||||
class LazyReferenceFieldTest(MongoDBTestCase):
|
|
||||||
def test_lazy_reference_config(self):
|
|
||||||
# Make sure ReferenceField only accepts a document class or a string
|
|
||||||
# with a document class name.
|
|
||||||
self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument)
|
|
||||||
|
|
||||||
def test_lazy_reference_simple(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = LazyReferenceField(Animal)
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
animal = Animal(name="Leopard", tag="heavy").save()
|
|
||||||
Ocurrence(person="test", animal=animal).save()
|
|
||||||
p = Ocurrence.objects.get()
|
|
||||||
self.assertIsInstance(p.animal, LazyReference)
|
|
||||||
fetched_animal = p.animal.fetch()
|
|
||||||
self.assertEqual(fetched_animal, animal)
|
|
||||||
# `fetch` keep cache on referenced document by default...
|
|
||||||
animal.tag = "not so heavy"
|
|
||||||
animal.save()
|
|
||||||
double_fetch = p.animal.fetch()
|
|
||||||
self.assertIs(fetched_animal, double_fetch)
|
|
||||||
self.assertEqual(double_fetch.tag, "heavy")
|
|
||||||
# ...unless specified otherwise
|
|
||||||
fetch_force = p.animal.fetch(force=True)
|
|
||||||
self.assertIsNot(fetch_force, fetched_animal)
|
|
||||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
|
||||||
|
|
||||||
def test_lazy_reference_fetch_invalid_ref(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = LazyReferenceField(Animal)
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
animal = Animal(name="Leopard", tag="heavy").save()
|
|
||||||
Ocurrence(person="test", animal=animal).save()
|
|
||||||
animal.delete()
|
|
||||||
p = Ocurrence.objects.get()
|
|
||||||
self.assertIsInstance(p.animal, LazyReference)
|
|
||||||
with self.assertRaises(DoesNotExist):
|
|
||||||
p.animal.fetch()
|
|
||||||
|
|
||||||
def test_lazy_reference_set(self):
|
|
||||||
class Animal(Document):
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = LazyReferenceField(Animal)
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
class SubAnimal(Animal):
|
|
||||||
nick = StringField()
|
|
||||||
|
|
||||||
animal = Animal(name="Leopard", tag="heavy").save()
|
|
||||||
sub_animal = SubAnimal(nick='doggo', name='dog').save()
|
|
||||||
for ref in (
|
|
||||||
animal,
|
|
||||||
animal.pk,
|
|
||||||
DBRef(animal._get_collection_name(), animal.pk),
|
|
||||||
LazyReference(Animal, animal.pk),
|
|
||||||
|
|
||||||
sub_animal,
|
|
||||||
sub_animal.pk,
|
|
||||||
DBRef(sub_animal._get_collection_name(), sub_animal.pk),
|
|
||||||
LazyReference(SubAnimal, sub_animal.pk),
|
|
||||||
):
|
|
||||||
p = Ocurrence(person="test", animal=ref).save()
|
|
||||||
p.reload()
|
|
||||||
self.assertIsInstance(p.animal, LazyReference)
|
|
||||||
p.animal.fetch()
|
|
||||||
|
|
||||||
def test_lazy_reference_bad_set(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = LazyReferenceField(Animal)
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
class BadDoc(Document):
|
|
||||||
pass
|
|
||||||
|
|
||||||
animal = Animal(name="Leopard", tag="heavy").save()
|
|
||||||
baddoc = BadDoc().save()
|
|
||||||
for bad in (
|
|
||||||
42,
|
|
||||||
'foo',
|
|
||||||
baddoc,
|
|
||||||
DBRef(baddoc._get_collection_name(), animal.pk),
|
|
||||||
LazyReference(BadDoc, animal.pk)
|
|
||||||
):
|
|
||||||
with self.assertRaises(ValidationError):
|
|
||||||
p = Ocurrence(person="test", animal=bad).save()
|
|
||||||
|
|
||||||
def test_lazy_reference_query_conversion(self):
|
|
||||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
|
||||||
of the type of the primary key of the referenced object.
|
|
||||||
"""
|
|
||||||
class Member(Document):
|
|
||||||
user_num = IntField(primary_key=True)
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
title = StringField()
|
|
||||||
author = LazyReferenceField(Member, dbref=False)
|
|
||||||
|
|
||||||
Member.drop_collection()
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
m1 = Member(user_num=1)
|
|
||||||
m1.save()
|
|
||||||
m2 = Member(user_num=2)
|
|
||||||
m2.save()
|
|
||||||
|
|
||||||
post1 = BlogPost(title='post 1', author=m1)
|
|
||||||
post1.save()
|
|
||||||
|
|
||||||
post2 = BlogPost(title='post 2', author=m2)
|
|
||||||
post2.save()
|
|
||||||
|
|
||||||
post = BlogPost.objects(author=m1).first()
|
|
||||||
self.assertEqual(post.id, post1.id)
|
|
||||||
|
|
||||||
post = BlogPost.objects(author=m2).first()
|
|
||||||
self.assertEqual(post.id, post2.id)
|
|
||||||
|
|
||||||
# Same thing by passing a LazyReference instance
|
|
||||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
|
||||||
self.assertEqual(post.id, post2.id)
|
|
||||||
|
|
||||||
def test_lazy_reference_query_conversion_dbref(self):
|
|
||||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
|
||||||
of the type of the primary key of the referenced object.
|
|
||||||
"""
|
|
||||||
class Member(Document):
|
|
||||||
user_num = IntField(primary_key=True)
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
title = StringField()
|
|
||||||
author = LazyReferenceField(Member, dbref=True)
|
|
||||||
|
|
||||||
Member.drop_collection()
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
m1 = Member(user_num=1)
|
|
||||||
m1.save()
|
|
||||||
m2 = Member(user_num=2)
|
|
||||||
m2.save()
|
|
||||||
|
|
||||||
post1 = BlogPost(title='post 1', author=m1)
|
|
||||||
post1.save()
|
|
||||||
|
|
||||||
post2 = BlogPost(title='post 2', author=m2)
|
|
||||||
post2.save()
|
|
||||||
|
|
||||||
post = BlogPost.objects(author=m1).first()
|
|
||||||
self.assertEqual(post.id, post1.id)
|
|
||||||
|
|
||||||
post = BlogPost.objects(author=m2).first()
|
|
||||||
self.assertEqual(post.id, post2.id)
|
|
||||||
|
|
||||||
# Same thing by passing a LazyReference instance
|
|
||||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
|
||||||
self.assertEqual(post.id, post2.id)
|
|
||||||
|
|
||||||
def test_lazy_reference_passthrough(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
animal = LazyReferenceField(Animal, passthrough=False)
|
|
||||||
animal_passthrough = LazyReferenceField(Animal, passthrough=True)
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
animal = Animal(name="Leopard", tag="heavy").save()
|
|
||||||
Ocurrence(animal=animal, animal_passthrough=animal).save()
|
|
||||||
p = Ocurrence.objects.get()
|
|
||||||
self.assertIsInstance(p.animal, LazyReference)
|
|
||||||
with self.assertRaises(KeyError):
|
|
||||||
p.animal['name']
|
|
||||||
with self.assertRaises(AttributeError):
|
|
||||||
p.animal.name
|
|
||||||
self.assertEqual(p.animal.pk, animal.pk)
|
|
||||||
|
|
||||||
self.assertEqual(p.animal_passthrough.name, "Leopard")
|
|
||||||
self.assertEqual(p.animal_passthrough['name'], "Leopard")
|
|
||||||
|
|
||||||
# Should not be able to access referenced document's methods
|
|
||||||
with self.assertRaises(AttributeError):
|
|
||||||
p.animal.save
|
|
||||||
with self.assertRaises(KeyError):
|
|
||||||
p.animal['save']
|
|
||||||
|
|
||||||
def test_lazy_reference_not_set(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = LazyReferenceField(Animal)
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
Ocurrence(person='foo').save()
|
|
||||||
p = Ocurrence.objects.get()
|
|
||||||
self.assertIs(p.animal, None)
|
|
||||||
|
|
||||||
def test_lazy_reference_equality(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
|
|
||||||
animal = Animal(name="Leopard", tag="heavy").save()
|
|
||||||
animalref = LazyReference(Animal, animal.pk)
|
|
||||||
self.assertEqual(animal, animalref)
|
|
||||||
self.assertEqual(animalref, animal)
|
|
||||||
|
|
||||||
other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90"))
|
|
||||||
self.assertNotEqual(animal, other_animalref)
|
|
||||||
self.assertNotEqual(other_animalref, animal)
|
|
||||||
|
|
||||||
def test_lazy_reference_embedded(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class EmbeddedOcurrence(EmbeddedDocument):
|
|
||||||
in_list = ListField(LazyReferenceField(Animal))
|
|
||||||
direct = LazyReferenceField(Animal)
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
in_list = ListField(LazyReferenceField(Animal))
|
|
||||||
in_embedded = EmbeddedDocumentField(EmbeddedOcurrence)
|
|
||||||
direct = LazyReferenceField(Animal)
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
animal1 = Animal('doggo').save()
|
|
||||||
animal2 = Animal('cheeta').save()
|
|
||||||
|
|
||||||
def check_fields_type(occ):
|
|
||||||
self.assertIsInstance(occ.direct, LazyReference)
|
|
||||||
for elem in occ.in_list:
|
|
||||||
self.assertIsInstance(elem, LazyReference)
|
|
||||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
|
||||||
for elem in occ.in_embedded.in_list:
|
|
||||||
self.assertIsInstance(elem, LazyReference)
|
|
||||||
|
|
||||||
occ = Ocurrence(
|
|
||||||
in_list=[animal1, animal2],
|
|
||||||
in_embedded={'in_list': [animal1, animal2], 'direct': animal1},
|
|
||||||
direct=animal1
|
|
||||||
).save()
|
|
||||||
check_fields_type(occ)
|
|
||||||
occ.reload()
|
|
||||||
check_fields_type(occ)
|
|
||||||
occ.direct = animal1.id
|
|
||||||
occ.in_list = [animal1.id, animal2.id]
|
|
||||||
occ.in_embedded.direct = animal1.id
|
|
||||||
occ.in_embedded.in_list = [animal1.id, animal2.id]
|
|
||||||
check_fields_type(occ)
|
|
||||||
|
|
||||||
|
|
||||||
class GenericLazyReferenceFieldTest(MongoDBTestCase):
|
|
||||||
def test_generic_lazy_reference_simple(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = GenericLazyReferenceField()
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
animal = Animal(name="Leopard", tag="heavy").save()
|
|
||||||
Ocurrence(person="test", animal=animal).save()
|
|
||||||
p = Ocurrence.objects.get()
|
|
||||||
self.assertIsInstance(p.animal, LazyReference)
|
|
||||||
fetched_animal = p.animal.fetch()
|
|
||||||
self.assertEqual(fetched_animal, animal)
|
|
||||||
# `fetch` keep cache on referenced document by default...
|
|
||||||
animal.tag = "not so heavy"
|
|
||||||
animal.save()
|
|
||||||
double_fetch = p.animal.fetch()
|
|
||||||
self.assertIs(fetched_animal, double_fetch)
|
|
||||||
self.assertEqual(double_fetch.tag, "heavy")
|
|
||||||
# ...unless specified otherwise
|
|
||||||
fetch_force = p.animal.fetch(force=True)
|
|
||||||
self.assertIsNot(fetch_force, fetched_animal)
|
|
||||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
|
||||||
|
|
||||||
def test_generic_lazy_reference_choices(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Vegetal(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Mineral(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
living_thing = GenericLazyReferenceField(choices=[Animal, Vegetal])
|
|
||||||
thing = GenericLazyReferenceField()
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Vegetal.drop_collection()
|
|
||||||
Mineral.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
animal = Animal(name="Leopard").save()
|
|
||||||
vegetal = Vegetal(name="Oak").save()
|
|
||||||
mineral = Mineral(name="Granite").save()
|
|
||||||
|
|
||||||
occ_animal = Ocurrence(living_thing=animal, thing=animal).save()
|
|
||||||
occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
|
||||||
with self.assertRaises(ValidationError):
|
|
||||||
Ocurrence(living_thing=mineral).save()
|
|
||||||
|
|
||||||
occ = Ocurrence.objects.get(living_thing=animal)
|
|
||||||
self.assertEqual(occ, occ_animal)
|
|
||||||
self.assertIsInstance(occ.thing, LazyReference)
|
|
||||||
self.assertIsInstance(occ.living_thing, LazyReference)
|
|
||||||
|
|
||||||
occ.thing = vegetal
|
|
||||||
occ.living_thing = vegetal
|
|
||||||
occ.save()
|
|
||||||
|
|
||||||
occ.thing = mineral
|
|
||||||
occ.living_thing = mineral
|
|
||||||
with self.assertRaises(ValidationError):
|
|
||||||
occ.save()
|
|
||||||
|
|
||||||
def test_generic_lazy_reference_set(self):
|
|
||||||
class Animal(Document):
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = GenericLazyReferenceField()
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
class SubAnimal(Animal):
|
|
||||||
nick = StringField()
|
|
||||||
|
|
||||||
animal = Animal(name="Leopard", tag="heavy").save()
|
|
||||||
sub_animal = SubAnimal(nick='doggo', name='dog').save()
|
|
||||||
for ref in (
|
|
||||||
animal,
|
|
||||||
LazyReference(Animal, animal.pk),
|
|
||||||
{'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)},
|
|
||||||
|
|
||||||
sub_animal,
|
|
||||||
LazyReference(SubAnimal, sub_animal.pk),
|
|
||||||
{'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)},
|
|
||||||
):
|
|
||||||
p = Ocurrence(person="test", animal=ref).save()
|
|
||||||
p.reload()
|
|
||||||
self.assertIsInstance(p.animal, (LazyReference, Document))
|
|
||||||
p.animal.fetch()
|
|
||||||
|
|
||||||
def test_generic_lazy_reference_bad_set(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = GenericLazyReferenceField(choices=['Animal'])
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
class BadDoc(Document):
|
|
||||||
pass
|
|
||||||
|
|
||||||
animal = Animal(name="Leopard", tag="heavy").save()
|
|
||||||
baddoc = BadDoc().save()
|
|
||||||
for bad in (
|
|
||||||
42,
|
|
||||||
'foo',
|
|
||||||
baddoc,
|
|
||||||
LazyReference(BadDoc, animal.pk)
|
|
||||||
):
|
|
||||||
with self.assertRaises(ValidationError):
|
|
||||||
p = Ocurrence(person="test", animal=bad).save()
|
|
||||||
|
|
||||||
def test_generic_lazy_reference_query_conversion(self):
|
|
||||||
class Member(Document):
|
|
||||||
user_num = IntField(primary_key=True)
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
title = StringField()
|
|
||||||
author = GenericLazyReferenceField()
|
|
||||||
|
|
||||||
Member.drop_collection()
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
m1 = Member(user_num=1)
|
|
||||||
m1.save()
|
|
||||||
m2 = Member(user_num=2)
|
|
||||||
m2.save()
|
|
||||||
|
|
||||||
post1 = BlogPost(title='post 1', author=m1)
|
|
||||||
post1.save()
|
|
||||||
|
|
||||||
post2 = BlogPost(title='post 2', author=m2)
|
|
||||||
post2.save()
|
|
||||||
|
|
||||||
post = BlogPost.objects(author=m1).first()
|
|
||||||
self.assertEqual(post.id, post1.id)
|
|
||||||
|
|
||||||
post = BlogPost.objects(author=m2).first()
|
|
||||||
self.assertEqual(post.id, post2.id)
|
|
||||||
|
|
||||||
# Same thing by passing a LazyReference instance
|
|
||||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
|
||||||
self.assertEqual(post.id, post2.id)
|
|
||||||
|
|
||||||
def test_generic_lazy_reference_not_set(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = GenericLazyReferenceField()
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
Ocurrence(person='foo').save()
|
|
||||||
p = Ocurrence.objects.get()
|
|
||||||
self.assertIs(p.animal, None)
|
|
||||||
|
|
||||||
def test_generic_lazy_reference_embedded(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class EmbeddedOcurrence(EmbeddedDocument):
|
|
||||||
in_list = ListField(GenericLazyReferenceField())
|
|
||||||
direct = GenericLazyReferenceField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
in_list = ListField(GenericLazyReferenceField())
|
|
||||||
in_embedded = EmbeddedDocumentField(EmbeddedOcurrence)
|
|
||||||
direct = GenericLazyReferenceField()
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
animal1 = Animal('doggo').save()
|
|
||||||
animal2 = Animal('cheeta').save()
|
|
||||||
|
|
||||||
def check_fields_type(occ):
|
|
||||||
self.assertIsInstance(occ.direct, LazyReference)
|
|
||||||
for elem in occ.in_list:
|
|
||||||
self.assertIsInstance(elem, LazyReference)
|
|
||||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
|
||||||
for elem in occ.in_embedded.in_list:
|
|
||||||
self.assertIsInstance(elem, LazyReference)
|
|
||||||
|
|
||||||
occ = Ocurrence(
|
|
||||||
in_list=[animal1, animal2],
|
|
||||||
in_embedded={'in_list': [animal1, animal2], 'direct': animal1},
|
|
||||||
direct=animal1
|
|
||||||
).save()
|
|
||||||
check_fields_type(occ)
|
|
||||||
occ.reload()
|
|
||||||
check_fields_type(occ)
|
|
||||||
animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)}
|
|
||||||
animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)}
|
|
||||||
occ.direct = animal1_ref
|
|
||||||
occ.in_list = [animal1_ref, animal2_ref]
|
|
||||||
occ.in_embedded.direct = animal1_ref
|
|
||||||
occ.in_embedded.in_list = [animal1_ref, animal2_ref]
|
|
||||||
check_fields_type(occ)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@@ -197,18 +197,14 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
|||||||
title = StringField()
|
title = StringField()
|
||||||
text = StringField()
|
text = StringField()
|
||||||
|
|
||||||
class VariousData(EmbeddedDocument):
|
|
||||||
some = BooleanField()
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
author = EmbeddedDocumentField(User)
|
author = EmbeddedDocumentField(User)
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
various = MapField(field=EmbeddedDocumentField(VariousData))
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
post = BlogPost(content='Had a good coffee today...', various={'test_dynamic':{'some': True}})
|
post = BlogPost(content='Had a good coffee today...')
|
||||||
post.author = User(name='Test User')
|
post.author = User(name='Test User')
|
||||||
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||||
post.save()
|
post.save()
|
||||||
@@ -219,9 +215,6 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
|||||||
self.assertEqual(obj.author.name, 'Test User')
|
self.assertEqual(obj.author.name, 'Test User')
|
||||||
self.assertEqual(obj.comments, [])
|
self.assertEqual(obj.comments, [])
|
||||||
|
|
||||||
obj = BlogPost.objects.only('various.test_dynamic.some').get()
|
|
||||||
self.assertEqual(obj.various["test_dynamic"].some, True)
|
|
||||||
|
|
||||||
obj = BlogPost.objects.only('content', 'comments.title',).get()
|
obj = BlogPost.objects.only('content', 'comments.title',).get()
|
||||||
self.assertEqual(obj.content, 'Had a good coffee today...')
|
self.assertEqual(obj.content, 'Had a good coffee today...')
|
||||||
self.assertEqual(obj.author, None)
|
self.assertEqual(obj.author, None)
|
||||||
|
@@ -510,24 +510,6 @@ class GeoQueriesTest(MongoDBTestCase):
|
|||||||
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
|
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
|
||||||
self.assertEqual(1, roads)
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
def test_aspymongo_with_only(self):
|
|
||||||
"""Ensure as_pymongo works with only"""
|
|
||||||
class Place(Document):
|
|
||||||
location = PointField()
|
|
||||||
|
|
||||||
Place.drop_collection()
|
|
||||||
p = Place(location=[24.946861267089844, 60.16311983618494])
|
|
||||||
p.save()
|
|
||||||
qs = Place.objects().only('location')
|
|
||||||
self.assertDictEqual(
|
|
||||||
qs.as_pymongo()[0]['location'],
|
|
||||||
{u'type': u'Point',
|
|
||||||
u'coordinates': [
|
|
||||||
24.946861267089844,
|
|
||||||
60.16311983618494]
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_2dsphere_point_sets_correctly(self):
|
def test_2dsphere_point_sets_correctly(self):
|
||||||
class Location(Document):
|
class Location(Document):
|
||||||
loc = PointField()
|
loc = PointField()
|
||||||
|
@@ -1,8 +1,6 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from mongoengine import connect, Document, IntField, StringField, ListField
|
from mongoengine import connect, Document, IntField
|
||||||
|
|
||||||
from tests.utils import needs_mongodb_v26
|
|
||||||
|
|
||||||
__all__ = ("FindAndModifyTest",)
|
__all__ = ("FindAndModifyTest",)
|
||||||
|
|
||||||
@@ -96,37 +94,6 @@ class FindAndModifyTest(unittest.TestCase):
|
|||||||
self.assertEqual(old_doc.to_mongo(), {"_id": 1})
|
self.assertEqual(old_doc.to_mongo(), {"_id": 1})
|
||||||
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
||||||
|
|
||||||
@needs_mongodb_v26
|
|
||||||
def test_modify_with_push(self):
|
|
||||||
class BlogPost(Document):
|
|
||||||
tags = ListField(StringField())
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
blog = BlogPost.objects.create()
|
|
||||||
|
|
||||||
# Push a new tag via modify with new=False (default).
|
|
||||||
BlogPost(id=blog.id).modify(push__tags='code')
|
|
||||||
self.assertEqual(blog.tags, [])
|
|
||||||
blog.reload()
|
|
||||||
self.assertEqual(blog.tags, ['code'])
|
|
||||||
|
|
||||||
# Push a new tag via modify with new=True.
|
|
||||||
blog = BlogPost.objects(id=blog.id).modify(push__tags='java', new=True)
|
|
||||||
self.assertEqual(blog.tags, ['code', 'java'])
|
|
||||||
|
|
||||||
# Push a new tag with a positional argument.
|
|
||||||
blog = BlogPost.objects(id=blog.id).modify(
|
|
||||||
push__tags__0='python',
|
|
||||||
new=True)
|
|
||||||
self.assertEqual(blog.tags, ['python', 'code', 'java'])
|
|
||||||
|
|
||||||
# Push multiple new tags with a positional argument.
|
|
||||||
blog = BlogPost.objects(id=blog.id).modify(
|
|
||||||
push__tags__1=['go', 'rust'],
|
|
||||||
new=True)
|
|
||||||
self.assertEqual(blog.tags, ['python', 'go', 'rust', 'code', 'java'])
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@@ -1903,47 +1903,6 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
@needs_mongodb_v26
|
|
||||||
def test_update_push_with_position(self):
|
|
||||||
"""Ensure that the 'push' update with position works properly.
|
|
||||||
"""
|
|
||||||
class BlogPost(Document):
|
|
||||||
slug = StringField()
|
|
||||||
tags = ListField(StringField())
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
post = BlogPost.objects.create(slug="test")
|
|
||||||
|
|
||||||
BlogPost.objects.filter(id=post.id).update(push__tags="code")
|
|
||||||
BlogPost.objects.filter(id=post.id).update(push__tags__0=["mongodb", "python"])
|
|
||||||
post.reload()
|
|
||||||
self.assertEqual(post.tags, ['mongodb', 'python', 'code'])
|
|
||||||
|
|
||||||
BlogPost.objects.filter(id=post.id).update(set__tags__2="java")
|
|
||||||
post.reload()
|
|
||||||
self.assertEqual(post.tags, ['mongodb', 'python', 'java'])
|
|
||||||
|
|
||||||
#test push with singular value
|
|
||||||
BlogPost.objects.filter(id=post.id).update(push__tags__0='scala')
|
|
||||||
post.reload()
|
|
||||||
self.assertEqual(post.tags, ['scala', 'mongodb', 'python', 'java'])
|
|
||||||
|
|
||||||
def test_update_push_list_of_list(self):
|
|
||||||
"""Ensure that the 'push' update operation works in the list of list
|
|
||||||
"""
|
|
||||||
class BlogPost(Document):
|
|
||||||
slug = StringField()
|
|
||||||
tags = ListField()
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
post = BlogPost(slug="test").save()
|
|
||||||
|
|
||||||
BlogPost.objects.filter(slug="test").update(push__tags=["value1", 123])
|
|
||||||
post.reload()
|
|
||||||
self.assertEqual(post.tags, [["value1", 123]])
|
|
||||||
|
|
||||||
def test_update_push_and_pull_add_to_set(self):
|
def test_update_push_and_pull_add_to_set(self):
|
||||||
"""Ensure that the 'pull' update operation works correctly.
|
"""Ensure that the 'pull' update operation works correctly.
|
||||||
"""
|
"""
|
||||||
@@ -2086,23 +2045,6 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
Site.objects(id=s.id).update_one(
|
Site.objects(id=s.id).update_one(
|
||||||
pull_all__collaborators__helpful__user=['Ross'])
|
pull_all__collaborators__helpful__user=['Ross'])
|
||||||
|
|
||||||
def test_pull_in_genericembedded_field(self):
|
|
||||||
|
|
||||||
class Foo(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Bar(Document):
|
|
||||||
foos = ListField(GenericEmbeddedDocumentField(
|
|
||||||
choices=[Foo, ]))
|
|
||||||
|
|
||||||
Bar.drop_collection()
|
|
||||||
|
|
||||||
foo = Foo(name="bar")
|
|
||||||
bar = Bar(foos=[foo]).save()
|
|
||||||
Bar.objects(id=bar.id).update(pull__foos=foo)
|
|
||||||
bar.reload()
|
|
||||||
self.assertEqual(len(bar.foos), 0)
|
|
||||||
|
|
||||||
def test_update_one_pop_generic_reference(self):
|
def test_update_one_pop_generic_reference(self):
|
||||||
|
|
||||||
class BlogTag(Document):
|
class BlogTag(Document):
|
||||||
@@ -2196,24 +2138,6 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(message.authors[1].name, "Ross")
|
self.assertEqual(message.authors[1].name, "Ross")
|
||||||
self.assertEqual(message.authors[2].name, "Adam")
|
self.assertEqual(message.authors[2].name, "Adam")
|
||||||
|
|
||||||
def test_set_generic_embedded_documents(self):
|
|
||||||
|
|
||||||
class Bar(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
username = StringField()
|
|
||||||
bar = GenericEmbeddedDocumentField(choices=[Bar,])
|
|
||||||
|
|
||||||
User.drop_collection()
|
|
||||||
|
|
||||||
User(username='abc').save()
|
|
||||||
User.objects(username='abc').update(
|
|
||||||
set__bar=Bar(name='test'), upsert=True)
|
|
||||||
|
|
||||||
user = User.objects(username='abc').first()
|
|
||||||
self.assertEqual(user.bar.name, "test")
|
|
||||||
|
|
||||||
def test_reload_embedded_docs_instance(self):
|
def test_reload_embedded_docs_instance(self):
|
||||||
|
|
||||||
class SubDoc(EmbeddedDocument):
|
class SubDoc(EmbeddedDocument):
|
||||||
@@ -4123,35 +4047,6 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
plist = list(Person.objects.scalar('name', 'state'))
|
plist = list(Person.objects.scalar('name', 'state'))
|
||||||
self.assertEqual(plist, [(u'Wilson JR', s1)])
|
self.assertEqual(plist, [(u'Wilson JR', s1)])
|
||||||
|
|
||||||
def test_generic_reference_field_with_only_and_as_pymongo(self):
|
|
||||||
class TestPerson(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class TestActivity(Document):
|
|
||||||
name = StringField()
|
|
||||||
owner = GenericReferenceField()
|
|
||||||
|
|
||||||
TestPerson.drop_collection()
|
|
||||||
TestActivity.drop_collection()
|
|
||||||
|
|
||||||
person = TestPerson(name='owner')
|
|
||||||
person.save()
|
|
||||||
|
|
||||||
a1 = TestActivity(name='a1', owner=person)
|
|
||||||
a1.save()
|
|
||||||
|
|
||||||
activity = TestActivity.objects(owner=person).scalar('id', 'owner').no_dereference().first()
|
|
||||||
self.assertEqual(activity[0], a1.pk)
|
|
||||||
self.assertEqual(activity[1]['_ref'], DBRef('test_person', person.pk))
|
|
||||||
|
|
||||||
activity = TestActivity.objects(owner=person).only('id', 'owner')[0]
|
|
||||||
self.assertEqual(activity.pk, a1.pk)
|
|
||||||
self.assertEqual(activity.owner, person)
|
|
||||||
|
|
||||||
activity = TestActivity.objects(owner=person).only('id', 'owner').as_pymongo().first()
|
|
||||||
self.assertEqual(activity['_id'], a1.pk)
|
|
||||||
self.assertTrue(activity['owner']['_ref'], DBRef('test_person', person.pk))
|
|
||||||
|
|
||||||
def test_scalar_db_field(self):
|
def test_scalar_db_field(self):
|
||||||
|
|
||||||
class TestDoc(Document):
|
class TestDoc(Document):
|
||||||
@@ -4497,44 +4392,21 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(doc_objects, Doc.objects.from_json(json_data))
|
self.assertEqual(doc_objects, Doc.objects.from_json(json_data))
|
||||||
|
|
||||||
def test_as_pymongo(self):
|
def test_as_pymongo(self):
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
class LastLogin(EmbeddedDocument):
|
from decimal import Decimal
|
||||||
location = StringField()
|
|
||||||
ip = StringField()
|
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
id = ObjectIdField('_id')
|
id = ObjectIdField('_id')
|
||||||
name = StringField()
|
name = StringField()
|
||||||
age = IntField()
|
age = IntField()
|
||||||
price = DecimalField()
|
price = DecimalField()
|
||||||
last_login = EmbeddedDocumentField(LastLogin)
|
|
||||||
|
|
||||||
User.drop_collection()
|
User.drop_collection()
|
||||||
|
User(name="Bob Dole", age=89, price=Decimal('1.11')).save()
|
||||||
User.objects.create(name="Bob Dole", age=89, price=Decimal('1.11'))
|
User(name="Barack Obama", age=51, price=Decimal('2.22')).save()
|
||||||
User.objects.create(
|
|
||||||
name="Barack Obama",
|
|
||||||
age=51,
|
|
||||||
price=Decimal('2.22'),
|
|
||||||
last_login=LastLogin(
|
|
||||||
location='White House',
|
|
||||||
ip='104.107.108.116'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
results = User.objects.as_pymongo()
|
|
||||||
self.assertEqual(
|
|
||||||
set(results[0].keys()),
|
|
||||||
set(['_id', 'name', 'age', 'price'])
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
set(results[1].keys()),
|
|
||||||
set(['_id', 'name', 'age', 'price', 'last_login'])
|
|
||||||
)
|
|
||||||
|
|
||||||
results = User.objects.only('id', 'name').as_pymongo()
|
results = User.objects.only('id', 'name').as_pymongo()
|
||||||
self.assertEqual(set(results[0].keys()), set(['_id', 'name']))
|
self.assertEqual(sorted(results[0].keys()), sorted(['_id', 'name']))
|
||||||
|
|
||||||
users = User.objects.only('name', 'price').as_pymongo()
|
users = User.objects.only('name', 'price').as_pymongo()
|
||||||
results = list(users)
|
results = list(users)
|
||||||
@@ -4545,20 +4417,16 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(results[1]['name'], 'Barack Obama')
|
self.assertEqual(results[1]['name'], 'Barack Obama')
|
||||||
self.assertEqual(results[1]['price'], 2.22)
|
self.assertEqual(results[1]['price'], 2.22)
|
||||||
|
|
||||||
users = User.objects.only('name', 'last_login').as_pymongo()
|
# Test coerce_types
|
||||||
|
users = User.objects.only(
|
||||||
|
'name', 'price').as_pymongo(coerce_types=True)
|
||||||
results = list(users)
|
results = list(users)
|
||||||
self.assertTrue(isinstance(results[0], dict))
|
self.assertTrue(isinstance(results[0], dict))
|
||||||
self.assertTrue(isinstance(results[1], dict))
|
self.assertTrue(isinstance(results[1], dict))
|
||||||
self.assertEqual(results[0], {
|
self.assertEqual(results[0]['name'], 'Bob Dole')
|
||||||
'name': 'Bob Dole'
|
self.assertEqual(results[0]['price'], Decimal('1.11'))
|
||||||
})
|
self.assertEqual(results[1]['name'], 'Barack Obama')
|
||||||
self.assertEqual(results[1], {
|
self.assertEqual(results[1]['price'], Decimal('2.22'))
|
||||||
'name': 'Barack Obama',
|
|
||||||
'last_login': {
|
|
||||||
'location': 'White House',
|
|
||||||
'ip': '104.107.108.116'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
def test_as_pymongo_json_limit_fields(self):
|
def test_as_pymongo_json_limit_fields(self):
|
||||||
|
|
||||||
@@ -4722,6 +4590,7 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
def test_no_cache(self):
|
def test_no_cache(self):
|
||||||
"""Ensure you can add meta data to file"""
|
"""Ensure you can add meta data to file"""
|
||||||
|
|
||||||
class Noddy(Document):
|
class Noddy(Document):
|
||||||
fields = DictField()
|
fields = DictField()
|
||||||
|
|
||||||
@@ -4739,19 +4608,15 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertEqual(len(list(docs)), 100)
|
self.assertEqual(len(list(docs)), 100)
|
||||||
|
|
||||||
# Can't directly get a length of a no-cache queryset.
|
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
len(docs)
|
len(docs)
|
||||||
|
|
||||||
# Another iteration over the queryset should result in another db op.
|
|
||||||
with query_counter() as q:
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
list(docs)
|
list(docs)
|
||||||
self.assertEqual(q, 1)
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
# ... and another one to double-check.
|
|
||||||
with query_counter() as q:
|
|
||||||
list(docs)
|
list(docs)
|
||||||
self.assertEqual(q, 1)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
def test_nested_queryset_iterator(self):
|
def test_nested_queryset_iterator(self):
|
||||||
# Try iterating the same queryset twice, nested.
|
# Try iterating the same queryset twice, nested.
|
||||||
@@ -4840,30 +4705,6 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
for obj in C.objects.no_sub_classes():
|
for obj in C.objects.no_sub_classes():
|
||||||
self.assertEqual(obj.__class__, C)
|
self.assertEqual(obj.__class__, C)
|
||||||
|
|
||||||
def test_query_generic_embedded_document(self):
|
|
||||||
"""Ensure that querying sub field on generic_embedded_field works
|
|
||||||
"""
|
|
||||||
class A(EmbeddedDocument):
|
|
||||||
a_name = StringField()
|
|
||||||
|
|
||||||
class B(EmbeddedDocument):
|
|
||||||
b_name = StringField()
|
|
||||||
|
|
||||||
class Doc(Document):
|
|
||||||
document = GenericEmbeddedDocumentField(choices=(A, B))
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
Doc(document=A(a_name='A doc')).save()
|
|
||||||
Doc(document=B(b_name='B doc')).save()
|
|
||||||
|
|
||||||
# Using raw in filter working fine
|
|
||||||
self.assertEqual(Doc.objects(
|
|
||||||
__raw__={'document.a_name': 'A doc'}).count(), 1)
|
|
||||||
self.assertEqual(Doc.objects(
|
|
||||||
__raw__={'document.b_name': 'B doc'}).count(), 1)
|
|
||||||
self.assertEqual(Doc.objects(document__a_name='A doc').count(), 1)
|
|
||||||
self.assertEqual(Doc.objects(document__b_name='B doc').count(), 1)
|
|
||||||
|
|
||||||
def test_query_reference_to_custom_pk_doc(self):
|
def test_query_reference_to_custom_pk_doc(self):
|
||||||
|
|
||||||
class A(Document):
|
class A(Document):
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from mongoengine.base.datastructures import StrictDict
|
from mongoengine.base.datastructures import StrictDict, SemiStrictDict
|
||||||
|
|
||||||
|
|
||||||
class TestStrictDict(unittest.TestCase):
|
class TestStrictDict(unittest.TestCase):
|
||||||
@@ -76,5 +76,44 @@ class TestStrictDict(unittest.TestCase):
|
|||||||
assert dict(**d) == {'a': 1, 'b': 2}
|
assert dict(**d) == {'a': 1, 'b': 2}
|
||||||
|
|
||||||
|
|
||||||
|
class TestSemiSrictDict(TestStrictDict):
|
||||||
|
def strict_dict_class(self, *args, **kwargs):
|
||||||
|
return SemiStrictDict.create(*args, **kwargs)
|
||||||
|
|
||||||
|
def test_init_fails_on_nonexisting_attrs(self):
|
||||||
|
# disable irrelevant test
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_setattr_raises_on_nonexisting_attr(self):
|
||||||
|
# disable irrelevant test
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_setattr_getattr_nonexisting_attr_succeeds(self):
|
||||||
|
d = self.dtype()
|
||||||
|
d.x = 1
|
||||||
|
self.assertEqual(d.x, 1)
|
||||||
|
|
||||||
|
def test_init_succeeds_with_nonexisting_attrs(self):
|
||||||
|
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||||
|
self.assertEqual((d.a, d.b, d.c, d.x), (1, 1, 1, 2))
|
||||||
|
|
||||||
|
def test_iter_with_nonexisting_attrs(self):
|
||||||
|
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||||
|
self.assertEqual(list(d), ['a', 'b', 'c', 'x'])
|
||||||
|
|
||||||
|
def test_iteritems_with_nonexisting_attrs(self):
|
||||||
|
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||||
|
self.assertEqual(list(d.iteritems()), [('a', 1), ('b', 1), ('c', 1), ('x', 2)])
|
||||||
|
|
||||||
|
def tets_cmp_with_strict_dicts(self):
|
||||||
|
d = self.dtype(a=1, b=1, c=1)
|
||||||
|
dd = StrictDict.create(("a", "b", "c"))(a=1, b=1, c=1)
|
||||||
|
self.assertEqual(d, dd)
|
||||||
|
|
||||||
|
def test_cmp_with_strict_dict_with_nonexisting_attrs(self):
|
||||||
|
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||||
|
dd = StrictDict.create(("a", "b", "c", "x"))(a=1, b=1, c=1, x=2)
|
||||||
|
self.assertEqual(d, dd)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@@ -2,10 +2,15 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from bson import DBRef, ObjectId
|
from bson import DBRef, ObjectId
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from mongoengine.context_managers import query_counter
|
from mongoengine.context_managers import query_counter
|
||||||
|
from mongoengine.python_support import IS_PYMONGO_3
|
||||||
|
from mongoengine.base import TopLevelDocumentMetaclass
|
||||||
|
if IS_PYMONGO_3:
|
||||||
|
from bson import CodecOptions
|
||||||
|
|
||||||
|
|
||||||
class FieldTest(unittest.TestCase):
|
class FieldTest(unittest.TestCase):
|
||||||
@@ -1287,5 +1292,70 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
def test_dynamic_field_dereference(self):
|
||||||
|
class Merchandise(Document):
|
||||||
|
name = StringField()
|
||||||
|
price = IntField()
|
||||||
|
|
||||||
|
class Store(Document):
|
||||||
|
merchandises = DynamicField()
|
||||||
|
|
||||||
|
Merchandise.drop_collection()
|
||||||
|
Store.drop_collection()
|
||||||
|
|
||||||
|
merchandises = {
|
||||||
|
'#1': Merchandise(name='foo', price=100).save(),
|
||||||
|
'#2': Merchandise(name='bar', price=120).save(),
|
||||||
|
'#3': Merchandise(name='baz', price=110).save(),
|
||||||
|
}
|
||||||
|
Store(merchandises=merchandises).save()
|
||||||
|
|
||||||
|
store = Store.objects().first()
|
||||||
|
for obj in store.merchandises.values():
|
||||||
|
self.assertFalse(isinstance(obj, Merchandise))
|
||||||
|
|
||||||
|
store.select_related()
|
||||||
|
for obj in store.merchandises.values():
|
||||||
|
self.assertTrue(isinstance(obj, Merchandise))
|
||||||
|
|
||||||
|
def test_dynamic_field_dereference_with_ordering_guarantee_on_pymongo3(self):
|
||||||
|
# This is because 'codec_options' is supported on pymongo3 or later
|
||||||
|
if IS_PYMONGO_3:
|
||||||
|
class OrderedDocument(Document):
|
||||||
|
my_metaclass = TopLevelDocumentMetaclass
|
||||||
|
__metaclass__ = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_collection(cls):
|
||||||
|
collection = super(OrderedDocument, cls)._get_collection()
|
||||||
|
opts = CodecOptions(document_class=OrderedDict)
|
||||||
|
|
||||||
|
return collection.with_options(codec_options=opts)
|
||||||
|
|
||||||
|
class Merchandise(Document):
|
||||||
|
name = StringField()
|
||||||
|
price = IntField()
|
||||||
|
|
||||||
|
class Store(OrderedDocument):
|
||||||
|
merchandises = DynamicField(container_class=OrderedDict)
|
||||||
|
|
||||||
|
Merchandise.drop_collection()
|
||||||
|
Store.drop_collection()
|
||||||
|
|
||||||
|
merchandises = OrderedDict()
|
||||||
|
merchandises['#1'] = Merchandise(name='foo', price=100).save()
|
||||||
|
merchandises['#2'] = Merchandise(name='bar', price=120).save()
|
||||||
|
merchandises['#3'] = Merchandise(name='baz', price=110).save()
|
||||||
|
|
||||||
|
Store(merchandises=merchandises).save()
|
||||||
|
|
||||||
|
store = Store.objects().first()
|
||||||
|
|
||||||
|
store.select_related()
|
||||||
|
|
||||||
|
# confirms that the load data order is same with the one at storing
|
||||||
|
self.assertTrue(type(store.merchandises), OrderedDict)
|
||||||
|
self.assertEqual(','.join(store.merchandises.keys()), '#1,#2,#3')
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
Reference in New Issue
Block a user