Merge branch 'master' into limit_behaviour
This commit is contained in:
commit
e79ea7a2cf
@ -1,5 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
sudo apt-get remove mongodb-org-server
|
||||
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
||||
|
||||
if [ "$MONGODB" = "2.4" ]; then
|
||||
@ -13,7 +14,7 @@ elif [ "$MONGODB" = "2.6" ]; then
|
||||
sudo apt-get install mongodb-org-server=2.6.12
|
||||
# service should be started automatically
|
||||
elif [ "$MONGODB" = "3.0" ]; then
|
||||
echo "deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-org-server=3.0.14
|
||||
# service should be started automatically
|
||||
@ -21,3 +22,6 @@ else
|
||||
echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0."
|
||||
exit 1
|
||||
fi;
|
||||
|
||||
mkdir db
|
||||
1>db/logs mongod --dbpath=db &
|
||||
|
27
.travis.yml
27
.travis.yml
@ -15,12 +15,11 @@ language: python
|
||||
python:
|
||||
- 2.7
|
||||
- 3.5
|
||||
- 3.6
|
||||
- pypy
|
||||
|
||||
env:
|
||||
- MONGODB=2.6 PYMONGO=2.7
|
||||
- MONGODB=2.6 PYMONGO=2.8
|
||||
- MONGODB=2.6 PYMONGO=3.0
|
||||
- MONGODB=2.6 PYMONGO=3.x
|
||||
|
||||
matrix:
|
||||
# Finish the build as soon as one job fails
|
||||
@ -28,20 +27,22 @@ matrix:
|
||||
|
||||
include:
|
||||
- python: 2.7
|
||||
env: MONGODB=2.4 PYMONGO=2.7
|
||||
env: MONGODB=2.4 PYMONGO=3.5
|
||||
- python: 2.7
|
||||
env: MONGODB=2.4 PYMONGO=3.0
|
||||
- python: 2.7
|
||||
env: MONGODB=3.0 PYMONGO=3.0
|
||||
env: MONGODB=3.0 PYMONGO=3.x
|
||||
- python: 3.5
|
||||
env: MONGODB=2.4 PYMONGO=2.7
|
||||
env: MONGODB=2.4 PYMONGO=3.5
|
||||
- python: 3.5
|
||||
env: MONGODB=2.4 PYMONGO=3.0
|
||||
- python: 3.5
|
||||
env: MONGODB=3.0 PYMONGO=3.0
|
||||
env: MONGODB=3.0 PYMONGO=3.x
|
||||
- python: 3.6
|
||||
env: MONGODB=2.4 PYMONGO=3.5
|
||||
- python: 3.6
|
||||
env: MONGODB=3.0 PYMONGO=3.x
|
||||
|
||||
before_install:
|
||||
- bash .install_mongodb_on_travis.sh
|
||||
- sleep 15 # https://docs.travis-ci.com/user/database-setup/#MongoDB-does-not-immediately-accept-connections
|
||||
- mongo --eval 'db.version();'
|
||||
|
||||
install:
|
||||
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
|
||||
@ -90,11 +91,11 @@ deploy:
|
||||
distributions: "sdist bdist_wheel"
|
||||
|
||||
# only deploy on tagged commits (aka GitHub releases) and only for the
|
||||
# parent repo's builds running Python 2.7 along with dev PyMongo (we run
|
||||
# parent repo's builds running Python 2.7 along with PyMongo v3.x (we run
|
||||
# Travis against many different Python and PyMongo versions and we don't
|
||||
# want the deploy to occur multiple times).
|
||||
on:
|
||||
tags: true
|
||||
repo: MongoEngine/mongoengine
|
||||
condition: "$PYMONGO = 3.0"
|
||||
condition: "$PYMONGO = 3.x"
|
||||
python: 2.7
|
||||
|
4
AUTHORS
4
AUTHORS
@ -244,4 +244,6 @@ that much better:
|
||||
* Stanislav Kaledin (https://github.com/sallyruthstruik)
|
||||
* Dmitry Yantsen (https://github.com/mrTable)
|
||||
* Renjianxin (https://github.com/Davidrjx)
|
||||
* Erdenezul Batmunkh (https://github.com/erdenezul)
|
||||
* Erdenezul Batmunkh (https://github.com/erdenezul)
|
||||
* Andy Yankovsky (https://github.com/werat)
|
||||
* Bastien Gérard (https://github.com/bagerard)
|
||||
|
@ -22,8 +22,11 @@ Supported Interpreters
|
||||
|
||||
MongoEngine supports CPython 2.7 and newer. Language
|
||||
features not supported by all interpreters can not be used.
|
||||
Please also ensure that your code is properly converted by
|
||||
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
|
||||
The codebase is written in python 2 so you must be using python 2
|
||||
when developing new features. Compatibility of the library with Python 3
|
||||
relies on the 2to3 package that gets executed as part of the installation
|
||||
build. You should ensure that your code is properly converted by
|
||||
`2to3 <http://docs.python.org/library/2to3.html>`_.
|
||||
|
||||
Style Guide
|
||||
-----------
|
||||
|
@ -87,7 +87,9 @@ Fields
|
||||
.. autoclass:: mongoengine.fields.DictField
|
||||
.. autoclass:: mongoengine.fields.MapField
|
||||
.. autoclass:: mongoengine.fields.ReferenceField
|
||||
.. autoclass:: mongoengine.fields.LazyReferenceField
|
||||
.. autoclass:: mongoengine.fields.GenericReferenceField
|
||||
.. autoclass:: mongoengine.fields.GenericLazyReferenceField
|
||||
.. autoclass:: mongoengine.fields.CachedReferenceField
|
||||
.. autoclass:: mongoengine.fields.BinaryField
|
||||
.. autoclass:: mongoengine.fields.FileField
|
||||
|
@ -6,6 +6,36 @@ Development
|
||||
===========
|
||||
- QuerySet limit function behaviour: Passing 0 as parameter will return all the documents in the cursor #1611
|
||||
- (Fill this out as you fix issues and develop your features).
|
||||
=======
|
||||
Changes in 0.15.4
|
||||
=================
|
||||
- Added `DateField` #513
|
||||
|
||||
Changes in 0.15.3
|
||||
=================
|
||||
- Subfield resolve error in generic_emdedded_document query #1651 #1652
|
||||
- use each modifier only with $position #1673 #1675
|
||||
- Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704
|
||||
- Fix validation error instance in GenericEmbeddedDocumentField #1067
|
||||
- Update cached fields when fields argument is given #1712
|
||||
- Add a db parameter to register_connection for compatibility with connect
|
||||
- Use insert_one, insert_many in Document.insert #1491
|
||||
- Use new update_one, update_many on document/queryset update #1491
|
||||
- Use insert_one, insert_many in Document.insert #1491
|
||||
- Fix reload(fields) affect changed fields #1371
|
||||
- Fix Read-only access to database fails when trying to create indexes #1338
|
||||
|
||||
Changes in 0.15.0
|
||||
=================
|
||||
- Add LazyReferenceField and GenericLazyReferenceField to address #1230
|
||||
|
||||
Changes in 0.14.1
|
||||
=================
|
||||
- Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630
|
||||
- Added support for the `$position` param in the `$push` operator #1566
|
||||
- Fixed `DateTimeField` interpreting an empty string as today #1533
|
||||
- Added a missing `__ne__` method to the `GridFSProxy` class #1632
|
||||
- Fixed `BaseQuerySet._fields_to_db_fields` #1553
|
||||
|
||||
Changes in 0.14.0
|
||||
=================
|
||||
|
@ -18,10 +18,10 @@ provide the :attr:`host` and :attr:`port` arguments to
|
||||
|
||||
connect('project1', host='192.168.1.35', port=12345)
|
||||
|
||||
If the database requires authentication, :attr:`username` and :attr:`password`
|
||||
arguments should be provided::
|
||||
If the database requires authentication, :attr:`username`, :attr:`password`
|
||||
and :attr:`authentication_source` arguments should be provided::
|
||||
|
||||
connect('project1', username='webapp', password='pwd123')
|
||||
connect('project1', username='webapp', password='pwd123', authentication_source='admin')
|
||||
|
||||
URI style connections are also supported -- just supply the URI as
|
||||
the :attr:`host` to
|
||||
|
@ -22,7 +22,7 @@ objects** as class attributes to the document class::
|
||||
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
date_modified = DateTimeField(default=datetime.datetime.now)
|
||||
date_modified = DateTimeField(default=datetime.datetime.utcnow)
|
||||
|
||||
As BSON (the binary format for storing data in mongodb) is order dependent,
|
||||
documents are serialized based on their field order.
|
||||
@ -80,6 +80,7 @@ are as follows:
|
||||
* :class:`~mongoengine.fields.FloatField`
|
||||
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
||||
* :class:`~mongoengine.fields.GenericReferenceField`
|
||||
* :class:`~mongoengine.fields.GenericLazyReferenceField`
|
||||
* :class:`~mongoengine.fields.GeoPointField`
|
||||
* :class:`~mongoengine.fields.ImageField`
|
||||
* :class:`~mongoengine.fields.IntField`
|
||||
@ -87,6 +88,7 @@ are as follows:
|
||||
* :class:`~mongoengine.fields.MapField`
|
||||
* :class:`~mongoengine.fields.ObjectIdField`
|
||||
* :class:`~mongoengine.fields.ReferenceField`
|
||||
* :class:`~mongoengine.fields.LazyReferenceField`
|
||||
* :class:`~mongoengine.fields.SequenceField`
|
||||
* :class:`~mongoengine.fields.SortedListField`
|
||||
* :class:`~mongoengine.fields.StringField`
|
||||
@ -224,7 +226,7 @@ store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate
|
||||
user = ReferenceField(User)
|
||||
answers = DictField()
|
||||
|
||||
survey_response = SurveyResponse(date=datetime.now(), user=request.user)
|
||||
survey_response = SurveyResponse(date=datetime.utcnow(), user=request.user)
|
||||
response_form = ResponseForm(request.POST)
|
||||
survey_response.answers = response_form.cleaned_data()
|
||||
survey_response.save()
|
||||
@ -511,6 +513,9 @@ If a dictionary is passed then the following options are available:
|
||||
Allows you to automatically expire data from a collection by setting the
|
||||
time in seconds to expire the a field.
|
||||
|
||||
:attr:`name` (Optional)
|
||||
Allows you to specify a name for the index
|
||||
|
||||
.. note::
|
||||
|
||||
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
|
||||
@ -524,14 +529,15 @@ There are a few top level defaults for all indexes that can be set::
|
||||
title = StringField()
|
||||
rating = StringField()
|
||||
meta = {
|
||||
'index_options': {},
|
||||
'index_opts': {},
|
||||
'index_background': True,
|
||||
'index_cls': False,
|
||||
'auto_create_index': True,
|
||||
'index_drop_dups': True,
|
||||
'index_cls': False
|
||||
}
|
||||
|
||||
|
||||
:attr:`index_options` (Optional)
|
||||
:attr:`index_opts` (Optional)
|
||||
Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_
|
||||
|
||||
:attr:`index_background` (Optional)
|
||||
@ -540,6 +546,12 @@ There are a few top level defaults for all indexes that can be set::
|
||||
:attr:`index_cls` (Optional)
|
||||
A way to turn off a specific index for _cls.
|
||||
|
||||
:attr:`auto_create_index` (Optional)
|
||||
When this is True (default), MongoEngine will ensure that the correct
|
||||
indexes exist in MongoDB each time a command is run. This can be disabled
|
||||
in systems where indexes are managed separately. Disabling this will improve
|
||||
performance.
|
||||
|
||||
:attr:`index_drop_dups` (Optional)
|
||||
Set the default value for if an index should drop duplicates
|
||||
|
||||
@ -618,7 +630,7 @@ collection after a given period. See the official
|
||||
documentation for more information. A common usecase might be session data::
|
||||
|
||||
class Session(Document):
|
||||
created = DateTimeField(default=datetime.now)
|
||||
created = DateTimeField(default=datetime.utcnow)
|
||||
meta = {
|
||||
'indexes': [
|
||||
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
||||
|
@ -57,7 +57,8 @@ document values for example::
|
||||
|
||||
def clean(self):
|
||||
"""Ensures that only published essays have a `pub_date` and
|
||||
automatically sets the pub_date if published and not set"""
|
||||
automatically sets `pub_date` if essay is published and `pub_date`
|
||||
is not set"""
|
||||
if self.status == 'Draft' and self.pub_date is not None:
|
||||
msg = 'Draft entries should not have a publication date.'
|
||||
raise ValidationError(msg)
|
||||
|
@ -53,7 +53,8 @@ Deletion
|
||||
|
||||
Deleting stored files is achieved with the :func:`delete` method::
|
||||
|
||||
marmot.photo.delete()
|
||||
marmot.photo.delete() # Deletes the GridFS document
|
||||
marmot.save() # Saves the GridFS reference (being None) contained in the marmot instance
|
||||
|
||||
.. warning::
|
||||
|
||||
@ -71,4 +72,5 @@ Files can be replaced with the :func:`replace` method. This works just like
|
||||
the :func:`put` method so even metadata can (and should) be replaced::
|
||||
|
||||
another_marmot = open('another_marmot.png', 'rb')
|
||||
marmot.photo.replace(another_marmot, content_type='image/png')
|
||||
marmot.photo.replace(another_marmot, content_type='image/png') # Replaces the GridFS document
|
||||
marmot.save() # Replaces the GridFS reference contained in marmot instance
|
||||
|
@ -43,10 +43,10 @@ Available signals include:
|
||||
has taken place but before saving.
|
||||
|
||||
`post_save`
|
||||
Called within :meth:`~mongoengine.Document.save` after all actions
|
||||
(validation, insert/update, cascades, clearing dirty flags) have completed
|
||||
successfully. Passed the additional boolean keyword argument `created` to
|
||||
indicate if the save was an insert or an update.
|
||||
Called within :meth:`~mongoengine.Document.save` after most actions
|
||||
(validation, insert/update, and cascades, but not clearing dirty flags) have
|
||||
completed successfully. Passed the additional boolean keyword argument
|
||||
`created` to indicate if the save was an insert or an update.
|
||||
|
||||
`pre_delete`
|
||||
Called within :meth:`~mongoengine.Document.delete` prior to
|
||||
@ -113,6 +113,10 @@ handlers within your subclass::
|
||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
|
||||
.. warning::
|
||||
|
||||
Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently.
|
||||
|
||||
Finally, you can also use this small decorator to quickly create a number of
|
||||
signals and attach them to your :class:`~mongoengine.Document` or
|
||||
:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators::
|
||||
|
@ -48,4 +48,4 @@ Ordering by text score
|
||||
|
||||
::
|
||||
|
||||
objects = News.objects.search('mongo').order_by('$text_score')
|
||||
objects = News.objects.search_text('mongo').order_by('$text_score')
|
||||
|
@ -86,7 +86,7 @@ of them stand out as particularly intuitive solutions.
|
||||
Posts
|
||||
^^^^^
|
||||
|
||||
Happily mongoDB *isn't* a relational database, so we're not going to do it that
|
||||
Happily MongoDB *isn't* a relational database, so we're not going to do it that
|
||||
way. As it turns out, we can use MongoDB's schemaless nature to provide us with
|
||||
a much nicer solution. We will store all of the posts in *one collection* and
|
||||
each post type will only store the fields it needs. If we later want to add
|
||||
@ -153,7 +153,7 @@ post. This works, but there is no real reason to be storing the comments
|
||||
separately from their associated posts, other than to work around the
|
||||
relational model. Using MongoDB we can store the comments as a list of
|
||||
*embedded documents* directly on a post document. An embedded document should
|
||||
be treated no differently that a regular document; it just doesn't have its own
|
||||
be treated no differently than a regular document; it just doesn't have its own
|
||||
collection in the database. Using MongoEngine, we can define the structure of
|
||||
embedded documents, along with utility methods, in exactly the same way we do
|
||||
with regular documents::
|
||||
|
@ -23,7 +23,7 @@ __all__ = (list(document.__all__) + list(fields.__all__) +
|
||||
list(signals.__all__) + list(errors.__all__))
|
||||
|
||||
|
||||
VERSION = (0, 14, 0)
|
||||
VERSION = (0, 15, 3)
|
||||
|
||||
|
||||
def get_version():
|
||||
|
@ -15,7 +15,7 @@ __all__ = (
|
||||
'UPDATE_OPERATORS', '_document_registry', 'get_document',
|
||||
|
||||
# datastructures
|
||||
'BaseDict', 'BaseList', 'EmbeddedDocumentList',
|
||||
'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference',
|
||||
|
||||
# document
|
||||
'BaseDocument',
|
||||
|
@ -3,9 +3,10 @@ from mongoengine.errors import NotRegistered
|
||||
__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
|
||||
|
||||
|
||||
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||
'set_on_insert', 'min', 'max', 'rename'])
|
||||
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'mul',
|
||||
'pop', 'push', 'push_all', 'pull',
|
||||
'pull_all', 'add_to_set', 'set_on_insert',
|
||||
'min', 'max', 'rename'])
|
||||
|
||||
|
||||
_document_registry = {}
|
||||
|
@ -1,12 +1,13 @@
|
||||
import itertools
|
||||
import weakref
|
||||
|
||||
from bson import DBRef
|
||||
import six
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||
|
||||
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList')
|
||||
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference')
|
||||
|
||||
|
||||
class BaseDict(dict):
|
||||
@ -127,8 +128,8 @@ class BaseList(list):
|
||||
return value
|
||||
|
||||
def __iter__(self):
|
||||
for i in xrange(self.__len__()):
|
||||
yield self[i]
|
||||
for v in super(BaseList, self).__iter__():
|
||||
yield v
|
||||
|
||||
def __setitem__(self, key, value, *args, **kwargs):
|
||||
if isinstance(key, slice):
|
||||
@ -137,7 +138,7 @@ class BaseList(list):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseList, self).__setitem__(key, value)
|
||||
|
||||
def __delitem__(self, key, *args, **kwargs):
|
||||
def __delitem__(self, key):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__delitem__(key)
|
||||
|
||||
@ -186,7 +187,7 @@ class BaseList(list):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).remove(*args, **kwargs)
|
||||
|
||||
def reverse(self, *args, **kwargs):
|
||||
def reverse(self):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).reverse()
|
||||
|
||||
@ -233,6 +234,9 @@ class EmbeddedDocumentList(BaseList):
|
||||
Filters the list by only including embedded documents with the
|
||||
given keyword arguments.
|
||||
|
||||
This method only supports simple comparison (e.g: .filter(name='John Doe'))
|
||||
and does not support operators like __gte, __lte, __icontains like queryset.filter does
|
||||
|
||||
:param kwargs: The keyword arguments corresponding to the fields to
|
||||
filter on. *Multiple arguments are treated as if they are ANDed
|
||||
together.*
|
||||
@ -350,7 +354,8 @@ class EmbeddedDocumentList(BaseList):
|
||||
|
||||
def update(self, **update):
|
||||
"""
|
||||
Updates the embedded documents with the given update values.
|
||||
Updates the embedded documents with the given replacement values. This
|
||||
function does not support mongoDB update operators such as ``inc__``.
|
||||
|
||||
.. note::
|
||||
The embedded document changes are not automatically saved
|
||||
@ -447,40 +452,40 @@ class StrictDict(object):
|
||||
return cls._classes[allowed_keys]
|
||||
|
||||
|
||||
class SemiStrictDict(StrictDict):
|
||||
__slots__ = ('_extras', )
|
||||
_classes = {}
|
||||
class LazyReference(DBRef):
|
||||
__slots__ = ('_cached_doc', 'passthrough', 'document_type')
|
||||
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
super(SemiStrictDict, self).__getattr__(attr)
|
||||
except AttributeError:
|
||||
try:
|
||||
return self.__getattribute__('_extras')[attr]
|
||||
except KeyError as e:
|
||||
raise AttributeError(e)
|
||||
def fetch(self, force=False):
|
||||
if not self._cached_doc or force:
|
||||
self._cached_doc = self.document_type.objects.get(pk=self.pk)
|
||||
if not self._cached_doc:
|
||||
raise DoesNotExist('Trying to dereference unknown document %s' % (self))
|
||||
return self._cached_doc
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
try:
|
||||
super(SemiStrictDict, self).__setattr__(attr, value)
|
||||
except AttributeError:
|
||||
try:
|
||||
self._extras[attr] = value
|
||||
except AttributeError:
|
||||
self._extras = {attr: value}
|
||||
@property
|
||||
def pk(self):
|
||||
return self.id
|
||||
|
||||
def __delattr__(self, attr):
|
||||
try:
|
||||
super(SemiStrictDict, self).__delattr__(attr)
|
||||
except AttributeError:
|
||||
try:
|
||||
del self._extras[attr]
|
||||
except KeyError as e:
|
||||
raise AttributeError(e)
|
||||
def __init__(self, document_type, pk, cached_doc=None, passthrough=False):
|
||||
self.document_type = document_type
|
||||
self._cached_doc = cached_doc
|
||||
self.passthrough = passthrough
|
||||
super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk)
|
||||
|
||||
def __iter__(self):
|
||||
def __getitem__(self, name):
|
||||
if not self.passthrough:
|
||||
raise KeyError()
|
||||
document = self.fetch()
|
||||
return document[name]
|
||||
|
||||
def __getattr__(self, name):
|
||||
if not object.__getattribute__(self, 'passthrough'):
|
||||
raise AttributeError()
|
||||
document = self.fetch()
|
||||
try:
|
||||
extras_iter = iter(self.__getattribute__('_extras'))
|
||||
except AttributeError:
|
||||
extras_iter = ()
|
||||
return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter)
|
||||
return document[name]
|
||||
except KeyError:
|
||||
raise AttributeError()
|
||||
|
||||
def __repr__(self):
|
||||
return "<LazyReference(%s, %r)>" % (self.document_type, self.pk)
|
||||
|
@ -13,13 +13,14 @@ from mongoengine import signals
|
||||
from mongoengine.base.common import get_document
|
||||
from mongoengine.base.datastructures import (BaseDict, BaseList,
|
||||
EmbeddedDocumentList,
|
||||
SemiStrictDict, StrictDict)
|
||||
LazyReference,
|
||||
StrictDict)
|
||||
from mongoengine.base.fields import ComplexBaseField
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError,
|
||||
LookUpError, OperationError, ValidationError)
|
||||
|
||||
__all__ = ('BaseDocument',)
|
||||
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
|
||||
|
||||
NON_FIELD_ERRORS = '__all__'
|
||||
|
||||
@ -79,8 +80,7 @@ class BaseDocument(object):
|
||||
if self.STRICT and not self._dynamic:
|
||||
self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
|
||||
else:
|
||||
self._data = SemiStrictDict.create(
|
||||
allowed_keys=self._fields_ordered)()
|
||||
self._data = {}
|
||||
|
||||
self._dynamic_fields = SON()
|
||||
|
||||
@ -100,13 +100,11 @@ class BaseDocument(object):
|
||||
for key, value in values.iteritems():
|
||||
if key in self._fields or key == '_id':
|
||||
setattr(self, key, value)
|
||||
elif self._dynamic:
|
||||
else:
|
||||
dynamic_data[key] = value
|
||||
else:
|
||||
FileField = _import_class('FileField')
|
||||
for key, value in values.iteritems():
|
||||
if key == '__auto_convert':
|
||||
continue
|
||||
key = self._reverse_db_field_map.get(key, key)
|
||||
if key in self._fields or key in ('id', 'pk', '_cls'):
|
||||
if __auto_convert and value is not None:
|
||||
@ -147,7 +145,7 @@ class BaseDocument(object):
|
||||
|
||||
if not hasattr(self, name) and not name.startswith('_'):
|
||||
DynamicField = _import_class('DynamicField')
|
||||
field = DynamicField(db_field=name)
|
||||
field = DynamicField(db_field=name, null=True)
|
||||
field.name = name
|
||||
self._dynamic_fields[name] = field
|
||||
self._fields_ordered += (name,)
|
||||
@ -337,7 +335,7 @@ class BaseDocument(object):
|
||||
value = field.generate()
|
||||
self._data[field_name] = value
|
||||
|
||||
if value is not None:
|
||||
if (value is not None) or (field.null):
|
||||
if use_db_field:
|
||||
data[field.db_field] = value
|
||||
else:
|
||||
@ -406,7 +404,15 @@ class BaseDocument(object):
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_data, created=False):
|
||||
"""Converts json data to an unsaved document instance"""
|
||||
"""Converts json data to a Document instance
|
||||
|
||||
:param json_data: The json data to load into the Document
|
||||
:param created: If True, the document will be considered as a brand new document
|
||||
If False and an id is provided, it will consider that the data being
|
||||
loaded corresponds to what's already in the database (This has an impact of subsequent call to .save())
|
||||
If False and no id is provided, it will consider the data as a new document
|
||||
(default ``False``)
|
||||
"""
|
||||
return cls._from_son(json_util.loads(json_data), created=created)
|
||||
|
||||
def __expand_dynamic_values(self, name, value):
|
||||
@ -489,7 +495,7 @@ class BaseDocument(object):
|
||||
else:
|
||||
data = getattr(data, part, None)
|
||||
|
||||
if hasattr(data, '_changed_fields'):
|
||||
if not isinstance(data, LazyReference) and hasattr(data, '_changed_fields'):
|
||||
if getattr(data, '_is_document', False):
|
||||
continue
|
||||
|
||||
@ -1080,5 +1086,11 @@ class BaseDocument(object):
|
||||
"""Return the display value for a choice field"""
|
||||
value = getattr(self, field.name)
|
||||
if field.choices and isinstance(field.choices[0], (list, tuple)):
|
||||
return dict(field.choices).get(value, value)
|
||||
if value is None:
|
||||
return None
|
||||
sep = getattr(field, 'display_sep', ' ')
|
||||
values = value if field.__class__.__name__ in ('ListField', 'SortedListField') else [value]
|
||||
return sep.join([
|
||||
dict(field.choices).get(val, val)
|
||||
for val in values or []])
|
||||
return value
|
||||
|
@ -55,7 +55,7 @@ class BaseField(object):
|
||||
field. Generally this is deprecated in favour of the
|
||||
`FIELD.validate` method
|
||||
:param choices: (optional) The valid choices
|
||||
:param null: (optional) Is the field value can be null. If no and there is a default value
|
||||
:param null: (optional) If the field value can be null. If no and there is a default value
|
||||
then the default value is set
|
||||
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
|
||||
means that uniqueness won't be enforced for `None` values
|
||||
@ -130,7 +130,6 @@ class BaseField(object):
|
||||
def __set__(self, instance, value):
|
||||
"""Descriptor for assigning a value to a field in a document.
|
||||
"""
|
||||
|
||||
# If setting to None and there is a default
|
||||
# Then set the value to the default value
|
||||
if value is None:
|
||||
@ -213,8 +212,10 @@ class BaseField(object):
|
||||
)
|
||||
)
|
||||
# Choices which are types other than Documents
|
||||
elif value not in choice_list:
|
||||
self.error('Value must be one of %s' % six.text_type(choice_list))
|
||||
else:
|
||||
values = value if isinstance(value, (list, tuple)) else [value]
|
||||
if len(set(values) - set(choice_list)):
|
||||
self.error('Value must be one of %s' % six.text_type(choice_list))
|
||||
|
||||
def _validate(self, value, **kwargs):
|
||||
# Check the Choices Constraint
|
||||
|
22
mongoengine/base/utils.py
Normal file
22
mongoengine/base/utils.py
Normal file
@ -0,0 +1,22 @@
|
||||
import re
|
||||
|
||||
|
||||
class LazyRegexCompiler(object):
|
||||
"""Descriptor to allow lazy compilation of regex"""
|
||||
|
||||
def __init__(self, pattern, flags=0):
|
||||
self._pattern = pattern
|
||||
self._flags = flags
|
||||
self._compiled_regex = None
|
||||
|
||||
@property
|
||||
def compiled_regex(self):
|
||||
if self._compiled_regex is None:
|
||||
self._compiled_regex = re.compile(self._pattern, self._flags)
|
||||
return self._compiled_regex
|
||||
|
||||
def __get__(self, obj, objtype):
|
||||
return self.compiled_regex
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise AttributeError("Can not set attribute LazyRegexCompiler")
|
@ -28,7 +28,7 @@ _connections = {}
|
||||
_dbs = {}
|
||||
|
||||
|
||||
def register_connection(alias, name=None, host=None, port=None,
|
||||
def register_connection(alias, db=None, name=None, host=None, port=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None, password=None,
|
||||
authentication_source=None,
|
||||
@ -39,6 +39,7 @@ def register_connection(alias, name=None, host=None, port=None,
|
||||
:param alias: the name that will be used to refer to this connection
|
||||
throughout MongoEngine
|
||||
:param name: the name of the specific database to use
|
||||
:param db: the name of the database to use, for compatibility with connect
|
||||
:param host: the host name of the :program:`mongod` instance to connect to
|
||||
:param port: the port that the :program:`mongod` instance is running on
|
||||
:param read_preference: The read preference for the collection
|
||||
@ -58,7 +59,7 @@ def register_connection(alias, name=None, host=None, port=None,
|
||||
.. versionchanged:: 0.10.6 - added mongomock support
|
||||
"""
|
||||
conn_settings = {
|
||||
'name': name or 'test',
|
||||
'name': name or db or 'test',
|
||||
'host': host or 'localhost',
|
||||
'port': port or 27017,
|
||||
'read_preference': read_preference,
|
||||
@ -103,6 +104,18 @@ def register_connection(alias, name=None, host=None, port=None,
|
||||
conn_settings['authentication_source'] = uri_options['authsource']
|
||||
if 'authmechanism' in uri_options:
|
||||
conn_settings['authentication_mechanism'] = uri_options['authmechanism']
|
||||
if IS_PYMONGO_3 and 'readpreference' in uri_options:
|
||||
read_preferences = (
|
||||
ReadPreference.NEAREST,
|
||||
ReadPreference.PRIMARY,
|
||||
ReadPreference.PRIMARY_PREFERRED,
|
||||
ReadPreference.SECONDARY,
|
||||
ReadPreference.SECONDARY_PREFERRED)
|
||||
read_pf_mode = uri_options['readpreference'].lower()
|
||||
for preference in read_preferences:
|
||||
if preference.name.lower() == read_pf_mode:
|
||||
conn_settings['read_preference'] = preference
|
||||
break
|
||||
else:
|
||||
resolved_hosts.append(entity)
|
||||
conn_settings['host'] = resolved_hosts
|
||||
|
@ -1,9 +1,11 @@
|
||||
from contextlib import contextmanager
|
||||
from pymongo.write_concern import WriteConcern
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
|
||||
|
||||
__all__ = ('switch_db', 'switch_collection', 'no_dereference',
|
||||
'no_sub_classes', 'query_counter')
|
||||
'no_sub_classes', 'query_counter', 'set_write_concern')
|
||||
|
||||
|
||||
class switch_db(object):
|
||||
@ -143,66 +145,83 @@ class no_sub_classes(object):
|
||||
:param cls: the class to turn querying sub classes on
|
||||
"""
|
||||
self.cls = cls
|
||||
self.cls_initial_subclasses = None
|
||||
|
||||
def __enter__(self):
|
||||
"""Change the objects default and _auto_dereference values."""
|
||||
self.cls._all_subclasses = self.cls._subclasses
|
||||
self.cls._subclasses = (self.cls,)
|
||||
self.cls_initial_subclasses = self.cls._subclasses
|
||||
self.cls._subclasses = (self.cls._class_name,)
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
"""Reset the default and _auto_dereference values."""
|
||||
self.cls._subclasses = self.cls._all_subclasses
|
||||
delattr(self.cls, '_all_subclasses')
|
||||
return self.cls
|
||||
self.cls._subclasses = self.cls_initial_subclasses
|
||||
|
||||
|
||||
class query_counter(object):
|
||||
"""Query_counter context manager to get the number of queries."""
|
||||
"""Query_counter context manager to get the number of queries.
|
||||
This works by updating the `profiling_level` of the database so that all queries get logged,
|
||||
resetting the db.system.profile collection at the beginnig of the context and counting the new entries.
|
||||
|
||||
This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes
|
||||
can interfere with it
|
||||
|
||||
Be aware that:
|
||||
- Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of
|
||||
documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches)
|
||||
- Some queries are ignored by default by the counter (killcursors, db.system.indexes)
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Construct the query_counter."""
|
||||
self.counter = 0
|
||||
"""Construct the query_counter
|
||||
"""
|
||||
self.db = get_db()
|
||||
self.initial_profiling_level = None
|
||||
self._ctx_query_counter = 0 # number of queries issued by the context
|
||||
|
||||
def __enter__(self):
|
||||
"""On every with block we need to drop the profile collection."""
|
||||
self._ignored_query = {
|
||||
'ns':
|
||||
{'$ne': '%s.system.indexes' % self.db.name},
|
||||
'op':
|
||||
{'$ne': 'killcursors'}
|
||||
}
|
||||
|
||||
def _turn_on_profiling(self):
|
||||
self.initial_profiling_level = self.db.profiling_level()
|
||||
self.db.set_profiling_level(0)
|
||||
self.db.system.profile.drop()
|
||||
self.db.set_profiling_level(2)
|
||||
|
||||
def _resets_profiling(self):
|
||||
self.db.set_profiling_level(self.initial_profiling_level)
|
||||
|
||||
def __enter__(self):
|
||||
self._turn_on_profiling()
|
||||
return self
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
"""Reset the profiling level."""
|
||||
self.db.set_profiling_level(0)
|
||||
self._resets_profiling()
|
||||
|
||||
def __eq__(self, value):
|
||||
"""== Compare querycounter."""
|
||||
counter = self._get_count()
|
||||
return value == counter
|
||||
|
||||
def __ne__(self, value):
|
||||
"""!= Compare querycounter."""
|
||||
return not self.__eq__(value)
|
||||
|
||||
def __lt__(self, value):
|
||||
"""< Compare querycounter."""
|
||||
return self._get_count() < value
|
||||
|
||||
def __le__(self, value):
|
||||
"""<= Compare querycounter."""
|
||||
return self._get_count() <= value
|
||||
|
||||
def __gt__(self, value):
|
||||
"""> Compare querycounter."""
|
||||
return self._get_count() > value
|
||||
|
||||
def __ge__(self, value):
|
||||
""">= Compare querycounter."""
|
||||
return self._get_count() >= value
|
||||
|
||||
def __int__(self):
|
||||
"""int representation."""
|
||||
return self._get_count()
|
||||
|
||||
def __repr__(self):
|
||||
@ -210,8 +229,17 @@ class query_counter(object):
|
||||
return u"%s" % self._get_count()
|
||||
|
||||
def _get_count(self):
|
||||
"""Get the number of queries."""
|
||||
ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}}
|
||||
count = self.db.system.profile.find(ignore_query).count() - self.counter
|
||||
self.counter += 1
|
||||
"""Get the number of queries by counting the current number of entries in db.system.profile
|
||||
and substracting the queries issued by this context. In fact everytime this is called, 1 query is
|
||||
issued so we need to balance that
|
||||
"""
|
||||
count = self.db.system.profile.find(self._ignored_query).count() - self._ctx_query_counter
|
||||
self._ctx_query_counter += 1 # Account for the query we just issued to gather the information
|
||||
return count
|
||||
|
||||
|
||||
@contextmanager
|
||||
def set_write_concern(collection, write_concerns):
|
||||
combined_concerns = dict(collection.write_concern.document.items())
|
||||
combined_concerns.update(write_concerns)
|
||||
yield collection.with_options(write_concern=WriteConcern(**combined_concerns))
|
||||
|
@ -3,6 +3,7 @@ import six
|
||||
|
||||
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass, get_document)
|
||||
from mongoengine.base.datastructures import LazyReference
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.document import Document, EmbeddedDocument
|
||||
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
|
||||
@ -99,7 +100,10 @@ class DeReference(object):
|
||||
if isinstance(item, (Document, EmbeddedDocument)):
|
||||
for field_name, field in item._fields.iteritems():
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, DBRef):
|
||||
if isinstance(v, LazyReference):
|
||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
||||
continue
|
||||
elif isinstance(v, DBRef):
|
||||
reference_map.setdefault(field.document_type, set()).add(v.id)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
||||
@ -110,6 +114,9 @@ class DeReference(object):
|
||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||
key = field_cls
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
elif isinstance(item, LazyReference):
|
||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
||||
continue
|
||||
elif isinstance(item, DBRef):
|
||||
reference_map.setdefault(item.collection, set()).add(item.id)
|
||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||
@ -126,7 +133,12 @@ class DeReference(object):
|
||||
"""
|
||||
object_map = {}
|
||||
for collection, dbrefs in self.reference_map.iteritems():
|
||||
if hasattr(collection, 'objects'): # We have a document class for the refs
|
||||
|
||||
# we use getattr instead of hasattr because as hasattr swallows any exception under python2
|
||||
# so it could hide nasty things without raising exceptions (cfr bug #1688))
|
||||
ref_document_cls_exists = (getattr(collection, 'objects', None) is not None)
|
||||
|
||||
if ref_document_cls_exists:
|
||||
col_name = collection._get_collection_name()
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (col_name, dbref) not in object_map]
|
||||
@ -230,7 +242,7 @@ class DeReference(object):
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = '%s.%s' % (name, k) if name else name
|
||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
|
||||
elif hasattr(v, 'id'):
|
||||
elif isinstance(v, DBRef) and hasattr(v, 'id'):
|
||||
data[k] = self.object_map.get((v.collection, v.id), v)
|
||||
|
||||
if instance and name:
|
||||
|
@ -195,7 +195,10 @@ class Document(BaseDocument):
|
||||
|
||||
# Ensure indexes on the collection unless auto_create_index was
|
||||
# set to False.
|
||||
if cls._meta.get('auto_create_index', True):
|
||||
# Also there is no need to ensure indexes on slave.
|
||||
db = cls._get_db()
|
||||
if cls._meta.get('auto_create_index', True) and\
|
||||
db.client.is_primary:
|
||||
cls.ensure_indexes()
|
||||
|
||||
return cls._collection
|
||||
@ -280,6 +283,9 @@ class Document(BaseDocument):
|
||||
elif query[id_field] != self.pk:
|
||||
raise InvalidQueryError('Invalid document modify query: it must modify only this document.')
|
||||
|
||||
# Need to add shard key to query, or you get an error
|
||||
query.update(self._object_key)
|
||||
|
||||
updated = self._qs(**query).modify(new=True, **update)
|
||||
if updated is None:
|
||||
return False
|
||||
@ -576,12 +582,11 @@ class Document(BaseDocument):
|
||||
"""Delete the :class:`~mongoengine.Document` from the database. This
|
||||
will only take effect if the document has been previously saved.
|
||||
|
||||
:parm signal_kwargs: (optional) kwargs dictionary to be passed to
|
||||
:param signal_kwargs: (optional) kwargs dictionary to be passed to
|
||||
the signal calls.
|
||||
:param write_concern: Extra keyword arguments are passed down which
|
||||
will be used as options for the resultant
|
||||
``getLastError`` command. For example,
|
||||
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||
will be used as options for the resultant ``getLastError`` command.
|
||||
For example, ``save(..., w: 2, fsync: True)`` will
|
||||
wait until at least two servers have recorded the write and
|
||||
will force an fsync on the primary server.
|
||||
|
||||
@ -702,7 +707,6 @@ class Document(BaseDocument):
|
||||
obj = obj[0]
|
||||
else:
|
||||
raise self.DoesNotExist('Document does not exist')
|
||||
|
||||
for field in obj._data:
|
||||
if not fields or field in fields:
|
||||
try:
|
||||
@ -710,7 +714,7 @@ class Document(BaseDocument):
|
||||
except (KeyError, AttributeError):
|
||||
try:
|
||||
# If field is a special field, e.g. items is stored as _reserved_items,
|
||||
# an KeyError is thrown. So try to retrieve the field from _data
|
||||
# a KeyError is thrown. So try to retrieve the field from _data
|
||||
setattr(self, field, self._reload(field, obj._data.get(field)))
|
||||
except KeyError:
|
||||
# If field is removed from the database while the object
|
||||
@ -718,7 +722,9 @@ class Document(BaseDocument):
|
||||
# i.e. obj.update(unset__field=1) followed by obj.reload()
|
||||
delattr(self, field)
|
||||
|
||||
self._changed_fields = obj._changed_fields
|
||||
self._changed_fields = list(
|
||||
set(self._changed_fields) - set(fields)
|
||||
) if fields else obj._changed_fields
|
||||
self._created = False
|
||||
return self
|
||||
|
||||
@ -964,8 +970,16 @@ class Document(BaseDocument):
|
||||
"""
|
||||
|
||||
required = cls.list_indexes()
|
||||
existing = [info['key']
|
||||
for info in cls._get_collection().index_information().values()]
|
||||
|
||||
existing = []
|
||||
for info in cls._get_collection().index_information().values():
|
||||
if '_fts' in info['key'][0]:
|
||||
index_type = info['key'][0][1]
|
||||
text_index_fields = info.get('weights').keys()
|
||||
existing.append(
|
||||
[(key, index_type) for key in text_index_fields])
|
||||
else:
|
||||
existing.append(info['key'])
|
||||
missing = [index for index in required if index not in existing]
|
||||
extra = [index for index in existing if index not in required]
|
||||
|
||||
@ -985,7 +999,7 @@ class Document(BaseDocument):
|
||||
class DynamicDocument(Document):
|
||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||
way as an ordinary document but has expando style properties. Any data
|
||||
way as an ordinary document but has expanded style properties. Any data
|
||||
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
||||
not a field is automatically converted into a
|
||||
:class:`~mongoengine.fields.DynamicField` and data can be attributed to that
|
||||
@ -1010,6 +1024,7 @@ class DynamicDocument(Document):
|
||||
field_name = args[0]
|
||||
if field_name in self._dynamic_fields:
|
||||
setattr(self, field_name, None)
|
||||
self._dynamic_fields[field_name].null = False
|
||||
else:
|
||||
super(DynamicDocument, self).__delattr__(*args, **kwargs)
|
||||
|
||||
|
@ -5,7 +5,6 @@ import re
|
||||
import socket
|
||||
import time
|
||||
import uuid
|
||||
import warnings
|
||||
from operator import itemgetter
|
||||
|
||||
from bson import Binary, DBRef, ObjectId, SON
|
||||
@ -26,7 +25,10 @@ except ImportError:
|
||||
Int64 = long
|
||||
|
||||
from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField,
|
||||
GeoJsonBaseField, ObjectIdField, get_document)
|
||||
GeoJsonBaseField, LazyReference, ObjectIdField,
|
||||
get_document)
|
||||
from mongoengine.base.utils import LazyRegexCompiler
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
from mongoengine.document import Document, EmbeddedDocument
|
||||
from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError
|
||||
@ -41,11 +43,12 @@ except ImportError:
|
||||
|
||||
__all__ = (
|
||||
'StringField', 'URLField', 'EmailField', 'IntField', 'LongField',
|
||||
'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField',
|
||||
'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', 'DateField',
|
||||
'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField',
|
||||
'GenericEmbeddedDocumentField', 'DynamicField', 'ListField',
|
||||
'SortedListField', 'EmbeddedDocumentListField', 'DictField',
|
||||
'MapField', 'ReferenceField', 'CachedReferenceField',
|
||||
'LazyReferenceField', 'GenericLazyReferenceField',
|
||||
'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy',
|
||||
'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField',
|
||||
'GeoPointField', 'PointField', 'LineStringField', 'PolygonField',
|
||||
@ -120,7 +123,7 @@ class URLField(StringField):
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
|
||||
_URL_REGEX = re.compile(
|
||||
_URL_REGEX = LazyRegexCompiler(
|
||||
r'^(?:[a-z0-9\.\-]*)://' # scheme is validated separately
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(?<!-)\.?)|' # domain...
|
||||
r'localhost|' # localhost...
|
||||
@ -154,7 +157,7 @@ class EmailField(StringField):
|
||||
|
||||
.. versionadded:: 0.4
|
||||
"""
|
||||
USER_REGEX = re.compile(
|
||||
USER_REGEX = LazyRegexCompiler(
|
||||
# `dot-atom` defined in RFC 5322 Section 3.2.3.
|
||||
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z"
|
||||
# `quoted-string` defined in RFC 5322 Section 3.2.4.
|
||||
@ -162,7 +165,7 @@ class EmailField(StringField):
|
||||
re.IGNORECASE
|
||||
)
|
||||
|
||||
UTF8_USER_REGEX = re.compile(
|
||||
UTF8_USER_REGEX = LazyRegexCompiler(
|
||||
six.u(
|
||||
# RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to
|
||||
# include `UTF8-non-ascii`.
|
||||
@ -172,7 +175,7 @@ class EmailField(StringField):
|
||||
), re.IGNORECASE | re.UNICODE
|
||||
)
|
||||
|
||||
DOMAIN_REGEX = re.compile(
|
||||
DOMAIN_REGEX = LazyRegexCompiler(
|
||||
r'((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z',
|
||||
re.IGNORECASE
|
||||
)
|
||||
@ -361,7 +364,8 @@ class FloatField(BaseField):
|
||||
|
||||
|
||||
class DecimalField(BaseField):
|
||||
"""Fixed-point decimal number field.
|
||||
"""Fixed-point decimal number field. Stores the value as a float by default unless `force_string` is used.
|
||||
If using floats, beware of Decimal to float conversion (potential precision loss)
|
||||
|
||||
.. versionchanged:: 0.8
|
||||
.. versionadded:: 0.3
|
||||
@ -372,7 +376,9 @@ class DecimalField(BaseField):
|
||||
"""
|
||||
:param min_value: Validation rule for the minimum acceptable value.
|
||||
:param max_value: Validation rule for the maximum acceptable value.
|
||||
:param force_string: Store as a string.
|
||||
:param force_string: Store the value as a string (instead of a float).
|
||||
Be aware that this affects query sorting and operation like lte, gte (as string comparison is applied)
|
||||
and some query operator won't work (e.g: inc, dec)
|
||||
:param precision: Number of decimal places to store.
|
||||
:param rounding: The rounding rule from the python decimal library:
|
||||
|
||||
@ -459,6 +465,8 @@ class DateTimeField(BaseField):
|
||||
installed you can utilise it to convert varying types of date formats into valid
|
||||
python datetime objects.
|
||||
|
||||
Note: To default the field to the current datetime, use: DateTimeField(default=datetime.utcnow)
|
||||
|
||||
Note: Microseconds are rounded to the nearest millisecond.
|
||||
Pre UTC microsecond support is effectively broken.
|
||||
Use :class:`~mongoengine.fields.ComplexDateTimeField` if you
|
||||
@ -522,6 +530,22 @@ class DateTimeField(BaseField):
|
||||
return super(DateTimeField, self).prepare_query_value(op, self.to_mongo(value))
|
||||
|
||||
|
||||
class DateField(DateTimeField):
|
||||
def to_mongo(self, value):
|
||||
value = super(DateField, self).to_mongo(value)
|
||||
# drop hours, minutes, seconds
|
||||
if isinstance(value, datetime.datetime):
|
||||
value = datetime.datetime(value.year, value.month, value.day)
|
||||
return value
|
||||
|
||||
def to_python(self, value):
|
||||
value = super(DateField, self).to_python(value)
|
||||
# convert datetime to date
|
||||
if isinstance(value, datetime.datetime):
|
||||
value = datetime.date(value.year, value.month, value.day)
|
||||
return value
|
||||
|
||||
|
||||
class ComplexDateTimeField(StringField):
|
||||
"""
|
||||
ComplexDateTimeField handles microseconds exactly instead of rounding
|
||||
@ -538,11 +562,15 @@ class ComplexDateTimeField(StringField):
|
||||
The `,` as the separator can be easily modified by passing the `separator`
|
||||
keyword when initializing the field.
|
||||
|
||||
Note: To default the field to the current datetime, use: DateTimeField(default=datetime.utcnow)
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
def __init__(self, separator=',', **kwargs):
|
||||
self.names = ['year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond']
|
||||
"""
|
||||
:param separator: Allows to customize the separator used for storage (default ``,``)
|
||||
"""
|
||||
self.separator = separator
|
||||
self.format = separator.join(['%Y', '%m', '%d', '%H', '%M', '%S', '%f'])
|
||||
super(ComplexDateTimeField, self).__init__(**kwargs)
|
||||
@ -573,16 +601,20 @@ class ComplexDateTimeField(StringField):
|
||||
return datetime.datetime(*values)
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
data = super(ComplexDateTimeField, self).__get__(instance, owner)
|
||||
if data is None:
|
||||
return None if self.null else datetime.datetime.now()
|
||||
if isinstance(data, datetime.datetime):
|
||||
|
||||
if isinstance(data, datetime.datetime) or data is None:
|
||||
return data
|
||||
return self._convert_from_string(data)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
value = self._convert_from_datetime(value) if value else value
|
||||
return super(ComplexDateTimeField, self).__set__(instance, value)
|
||||
super(ComplexDateTimeField, self).__set__(instance, value)
|
||||
value = instance._data[self.name]
|
||||
if value is not None:
|
||||
instance._data[self.name] = self._convert_from_datetime(value)
|
||||
|
||||
def validate(self, value):
|
||||
value = self.to_python(value)
|
||||
@ -611,6 +643,7 @@ class EmbeddedDocumentField(BaseField):
|
||||
"""
|
||||
|
||||
def __init__(self, document_type, **kwargs):
|
||||
# XXX ValidationError raised outside of the "validate" method.
|
||||
if not (
|
||||
isinstance(document_type, six.string_types) or
|
||||
issubclass(document_type, EmbeddedDocument)
|
||||
@ -625,9 +658,17 @@ class EmbeddedDocumentField(BaseField):
|
||||
def document_type(self):
|
||||
if isinstance(self.document_type_obj, six.string_types):
|
||||
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
|
||||
self.document_type_obj = self.owner_document
|
||||
resolved_document_type = self.owner_document
|
||||
else:
|
||||
self.document_type_obj = get_document(self.document_type_obj)
|
||||
resolved_document_type = get_document(self.document_type_obj)
|
||||
|
||||
if not issubclass(resolved_document_type, EmbeddedDocument):
|
||||
# Due to the late resolution of the document_type
|
||||
# There is a chance that it won't be an EmbeddedDocument (#1661)
|
||||
self.error('Invalid embedded document class provided to an '
|
||||
'EmbeddedDocumentField')
|
||||
self.document_type_obj = resolved_document_type
|
||||
|
||||
return self.document_type_obj
|
||||
|
||||
def to_python(self, value):
|
||||
@ -686,16 +727,28 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
return value
|
||||
|
||||
def validate(self, value, clean=True):
|
||||
if self.choices and isinstance(value, SON):
|
||||
for choice in self.choices:
|
||||
if value['_cls'] == choice._class_name:
|
||||
return True
|
||||
|
||||
if not isinstance(value, EmbeddedDocument):
|
||||
self.error('Invalid embedded document instance provided to an '
|
||||
'GenericEmbeddedDocumentField')
|
||||
|
||||
value.validate(clean=clean)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
if self.choices:
|
||||
for choice in self.choices:
|
||||
field = choice._fields.get(member_name)
|
||||
if field:
|
||||
return field
|
||||
return None
|
||||
|
||||
def to_mongo(self, document, use_db_field=True, fields=None):
|
||||
if document is None:
|
||||
return None
|
||||
|
||||
data = document.to_mongo(use_db_field, fields)
|
||||
if '_cls' not in data:
|
||||
data['_cls'] = document._class_name
|
||||
@ -779,6 +832,17 @@ class ListField(ComplexBaseField):
|
||||
kwargs.setdefault('default', lambda: [])
|
||||
super(ListField, self).__init__(**kwargs)
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
value = instance._data.get(self.name)
|
||||
LazyReferenceField = _import_class('LazyReferenceField')
|
||||
GenericLazyReferenceField = _import_class('GenericLazyReferenceField')
|
||||
if isinstance(self.field, (LazyReferenceField, GenericLazyReferenceField)) and value:
|
||||
instance._data[self.name] = [self.field.build_lazyref(x) for x in value]
|
||||
return super(ListField, self).__get__(instance, owner)
|
||||
|
||||
def validate(self, value):
|
||||
"""Make sure that a list of valid fields is being used."""
|
||||
if (not isinstance(value, (list, tuple, QuerySet)) or
|
||||
@ -889,12 +953,10 @@ class DictField(ComplexBaseField):
|
||||
.. versionchanged:: 0.5 - Can now handle complex / varying types of data
|
||||
"""
|
||||
|
||||
def __init__(self, basecls=None, field=None, *args, **kwargs):
|
||||
def __init__(self, field=None, *args, **kwargs):
|
||||
self.field = field
|
||||
self._auto_dereference = False
|
||||
self.basecls = basecls or BaseField
|
||||
if not issubclass(self.basecls, BaseField):
|
||||
self.error('DictField only accepts dict values')
|
||||
|
||||
kwargs.setdefault('default', lambda: {})
|
||||
super(DictField, self).__init__(*args, **kwargs)
|
||||
|
||||
@ -913,7 +975,7 @@ class DictField(ComplexBaseField):
|
||||
super(DictField, self).validate(value)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return DictField(basecls=self.basecls, db_field=member_name)
|
||||
return DictField(db_field=member_name)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
match_operators = ['contains', 'icontains', 'startswith',
|
||||
@ -943,6 +1005,7 @@ class MapField(DictField):
|
||||
"""
|
||||
|
||||
def __init__(self, field=None, *args, **kwargs):
|
||||
# XXX ValidationError raised outside of the "validate" method.
|
||||
if not isinstance(field, BaseField):
|
||||
self.error('Argument to MapField constructor must be a valid '
|
||||
'field')
|
||||
@ -953,6 +1016,15 @@ class ReferenceField(BaseField):
|
||||
"""A reference to a document that will be automatically dereferenced on
|
||||
access (lazily).
|
||||
|
||||
Note this means you will get a database I/O access everytime you access
|
||||
this field. This is necessary because the field returns a :class:`~mongoengine.Document`
|
||||
which precise type can depend of the value of the `_cls` field present in the
|
||||
document in database.
|
||||
In short, using this type of field can lead to poor performances (especially
|
||||
if you access this field only to retrieve it `pk` field which is already
|
||||
known before dereference). To solve this you should consider using the
|
||||
:class:`~mongoengine.fields.LazyReferenceField`.
|
||||
|
||||
Use the `reverse_delete_rule` to handle what should happen if the document
|
||||
the field is referencing is deleted. EmbeddedDocuments, DictFields and
|
||||
MapFields does not support reverse_delete_rule and an `InvalidDocumentError`
|
||||
@ -971,11 +1043,13 @@ class ReferenceField(BaseField):
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(Document):
|
||||
content = StringField()
|
||||
foo = ReferenceField('Foo')
|
||||
class Org(Document):
|
||||
owner = ReferenceField('User')
|
||||
|
||||
Foo.register_delete_rule(Bar, 'foo', NULLIFY)
|
||||
class User(Document):
|
||||
org = ReferenceField('Org', reverse_delete_rule=CASCADE)
|
||||
|
||||
User.register_delete_rule(Org, 'owner', DENY)
|
||||
|
||||
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
||||
"""
|
||||
@ -993,6 +1067,7 @@ class ReferenceField(BaseField):
|
||||
A reference to an abstract document type is always stored as a
|
||||
:class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`.
|
||||
"""
|
||||
# XXX ValidationError raised outside of the "validate" method.
|
||||
if (
|
||||
not isinstance(document_type, six.string_types) and
|
||||
not issubclass(document_type, Document)
|
||||
@ -1047,6 +1122,8 @@ class ReferenceField(BaseField):
|
||||
if isinstance(document, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
id_ = document.pk
|
||||
|
||||
# XXX ValidationError raised outside of the "validate" method.
|
||||
if id_ is None:
|
||||
self.error('You can only reference documents once they have'
|
||||
' been saved to the database')
|
||||
@ -1086,19 +1163,20 @@ class ReferenceField(BaseField):
|
||||
return self.to_mongo(value)
|
||||
|
||||
def validate(self, value):
|
||||
|
||||
if not isinstance(value, (self.document_type, DBRef, ObjectId)):
|
||||
self.error('A ReferenceField only accepts DBRef, ObjectId or documents')
|
||||
if not isinstance(value, (self.document_type, LazyReference, DBRef, ObjectId)):
|
||||
self.error('A ReferenceField only accepts DBRef, LazyReference, ObjectId or documents')
|
||||
|
||||
if isinstance(value, Document) and value.id is None:
|
||||
self.error('You can only reference documents once they have been '
|
||||
'saved to the database')
|
||||
|
||||
if self.document_type._meta.get('abstract') and \
|
||||
not isinstance(value, self.document_type):
|
||||
if (
|
||||
self.document_type._meta.get('abstract') and
|
||||
not isinstance(value, self.document_type)
|
||||
):
|
||||
self.error(
|
||||
'%s is not an instance of abstract reference type %s' % (
|
||||
self.document_type._class_name)
|
||||
value, self.document_type._class_name)
|
||||
)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
@ -1121,6 +1199,7 @@ class CachedReferenceField(BaseField):
|
||||
if fields is None:
|
||||
fields = []
|
||||
|
||||
# XXX ValidationError raised outside of the "validate" method.
|
||||
if (
|
||||
not isinstance(document_type, six.string_types) and
|
||||
not issubclass(document_type, Document)
|
||||
@ -1195,6 +1274,7 @@ class CachedReferenceField(BaseField):
|
||||
id_field_name = self.document_type._meta['id_field']
|
||||
id_field = self.document_type._fields[id_field_name]
|
||||
|
||||
# XXX ValidationError raised outside of the "validate" method.
|
||||
if isinstance(document, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
id_ = document.pk
|
||||
@ -1203,7 +1283,6 @@ class CachedReferenceField(BaseField):
|
||||
' been saved to the database')
|
||||
else:
|
||||
self.error('Only accept a document object')
|
||||
# TODO: should raise here or will fail next statement
|
||||
|
||||
value = SON((
|
||||
('_id', id_field.to_mongo(id_)),
|
||||
@ -1221,16 +1300,20 @@ class CachedReferenceField(BaseField):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# XXX ValidationError raised outside of the "validate" method.
|
||||
if isinstance(value, Document):
|
||||
if value.pk is None:
|
||||
self.error('You can only reference documents once they have'
|
||||
' been saved to the database')
|
||||
return {'_id': value.pk}
|
||||
value_dict = {'_id': value.pk}
|
||||
for field in self.fields:
|
||||
value_dict.update({field: value[field]})
|
||||
|
||||
return value_dict
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def validate(self, value):
|
||||
|
||||
if not isinstance(value, self.document_type):
|
||||
self.error('A CachedReferenceField only accepts documents')
|
||||
|
||||
@ -1263,6 +1346,12 @@ class GenericReferenceField(BaseField):
|
||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||
that will be automatically dereferenced on access (lazily).
|
||||
|
||||
Note this field works the same way as :class:`~mongoengine.document.ReferenceField`,
|
||||
doing database I/O access the first time it is accessed (even if it's to access
|
||||
it ``pk`` or ``id`` field).
|
||||
To solve this you should consider using the
|
||||
:class:`~mongoengine.fields.GenericLazyReferenceField`.
|
||||
|
||||
.. note ::
|
||||
* Any documents used as a generic reference must be registered in the
|
||||
document registry. Importing the model will automatically register
|
||||
@ -1285,6 +1374,8 @@ class GenericReferenceField(BaseField):
|
||||
elif isinstance(choice, type) and issubclass(choice, Document):
|
||||
self.choices.append(choice._class_name)
|
||||
else:
|
||||
# XXX ValidationError raised outside of the "validate"
|
||||
# method.
|
||||
self.error('Invalid choices provided: must be a list of'
|
||||
'Document subclasses and/or six.string_typess')
|
||||
|
||||
@ -1348,6 +1439,7 @@ class GenericReferenceField(BaseField):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
id_ = document.id
|
||||
if id_ is None:
|
||||
# XXX ValidationError raised outside of the "validate" method.
|
||||
self.error('You can only reference documents once they have'
|
||||
' been saved to the database')
|
||||
else:
|
||||
@ -1453,9 +1545,9 @@ class GridFSProxy(object):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
|
||||
|
||||
def __str__(self):
|
||||
name = getattr(
|
||||
self.get(), 'filename', self.grid_id) if self.get() else '(no file)'
|
||||
return '<%s: %s>' % (self.__class__.__name__, name)
|
||||
gridout = self.get()
|
||||
filename = getattr(gridout, 'filename') if gridout else '<no file>'
|
||||
return '<%s: %s (%s)>' % (self.__class__.__name__, filename, self.grid_id)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, GridFSProxy):
|
||||
@ -1465,6 +1557,9 @@ class GridFSProxy(object):
|
||||
else:
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
@property
|
||||
def fs(self):
|
||||
if not self._fs:
|
||||
@ -2138,3 +2233,201 @@ class MultiPolygonField(GeoJsonBaseField):
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
_type = 'MultiPolygon'
|
||||
|
||||
|
||||
class LazyReferenceField(BaseField):
|
||||
"""A really lazy reference to a document.
|
||||
Unlike the :class:`~mongoengine.fields.ReferenceField` it will
|
||||
**not** be automatically (lazily) dereferenced on access.
|
||||
Instead, access will return a :class:`~mongoengine.base.LazyReference` class
|
||||
instance, allowing access to `pk` or manual dereference by using
|
||||
``fetch()`` method.
|
||||
|
||||
.. versionadded:: 0.15
|
||||
"""
|
||||
|
||||
def __init__(self, document_type, passthrough=False, dbref=False,
|
||||
reverse_delete_rule=DO_NOTHING, **kwargs):
|
||||
"""Initialises the Reference Field.
|
||||
|
||||
:param dbref: Store the reference as :class:`~pymongo.dbref.DBRef`
|
||||
or as the :class:`~pymongo.objectid.ObjectId`.id .
|
||||
:param reverse_delete_rule: Determines what to do when the referring
|
||||
object is deleted
|
||||
:param passthrough: When trying to access unknown fields, the
|
||||
:class:`~mongoengine.base.datastructure.LazyReference` instance will
|
||||
automatically call `fetch()` and try to retrive the field on the fetched
|
||||
document. Note this only work getting field (not setting or deleting).
|
||||
"""
|
||||
# XXX ValidationError raised outside of the "validate" method.
|
||||
if (
|
||||
not isinstance(document_type, six.string_types) and
|
||||
not issubclass(document_type, Document)
|
||||
):
|
||||
self.error('Argument to LazyReferenceField constructor must be a '
|
||||
'document class or a string')
|
||||
|
||||
self.dbref = dbref
|
||||
self.passthrough = passthrough
|
||||
self.document_type_obj = document_type
|
||||
self.reverse_delete_rule = reverse_delete_rule
|
||||
super(LazyReferenceField, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
def document_type(self):
|
||||
if isinstance(self.document_type_obj, six.string_types):
|
||||
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
|
||||
self.document_type_obj = self.owner_document
|
||||
else:
|
||||
self.document_type_obj = get_document(self.document_type_obj)
|
||||
return self.document_type_obj
|
||||
|
||||
def build_lazyref(self, value):
|
||||
if isinstance(value, LazyReference):
|
||||
if value.passthrough != self.passthrough:
|
||||
value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough)
|
||||
elif value is not None:
|
||||
if isinstance(value, self.document_type):
|
||||
value = LazyReference(self.document_type, value.pk, passthrough=self.passthrough)
|
||||
elif isinstance(value, DBRef):
|
||||
value = LazyReference(self.document_type, value.id, passthrough=self.passthrough)
|
||||
else:
|
||||
# value is the primary key of the referenced document
|
||||
value = LazyReference(self.document_type, value, passthrough=self.passthrough)
|
||||
return value
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
"""Descriptor to allow lazy dereferencing."""
|
||||
if instance is None:
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
|
||||
value = self.build_lazyref(instance._data.get(self.name))
|
||||
if value:
|
||||
instance._data[self.name] = value
|
||||
|
||||
return super(LazyReferenceField, self).__get__(instance, owner)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if isinstance(value, LazyReference):
|
||||
pk = value.pk
|
||||
elif isinstance(value, self.document_type):
|
||||
pk = value.pk
|
||||
elif isinstance(value, DBRef):
|
||||
pk = value.id
|
||||
else:
|
||||
# value is the primary key of the referenced document
|
||||
pk = value
|
||||
id_field_name = self.document_type._meta['id_field']
|
||||
id_field = self.document_type._fields[id_field_name]
|
||||
pk = id_field.to_mongo(pk)
|
||||
if self.dbref:
|
||||
return DBRef(self.document_type._get_collection_name(), pk)
|
||||
else:
|
||||
return pk
|
||||
|
||||
def validate(self, value):
|
||||
if isinstance(value, LazyReference):
|
||||
if value.collection != self.document_type._get_collection_name():
|
||||
self.error('Reference must be on a `%s` document.' % self.document_type)
|
||||
pk = value.pk
|
||||
elif isinstance(value, self.document_type):
|
||||
pk = value.pk
|
||||
elif isinstance(value, DBRef):
|
||||
# TODO: check collection ?
|
||||
collection = self.document_type._get_collection_name()
|
||||
if value.collection != collection:
|
||||
self.error("DBRef on bad collection (must be on `%s`)" % collection)
|
||||
pk = value.id
|
||||
else:
|
||||
# value is the primary key of the referenced document
|
||||
id_field_name = self.document_type._meta['id_field']
|
||||
id_field = getattr(self.document_type, id_field_name)
|
||||
pk = value
|
||||
try:
|
||||
id_field.validate(pk)
|
||||
except ValidationError:
|
||||
self.error(
|
||||
"value should be `{0}` document, LazyReference or DBRef on `{0}` "
|
||||
"or `{0}`'s primary key (i.e. `{1}`)".format(
|
||||
self.document_type.__name__, type(id_field).__name__))
|
||||
|
||||
if pk is None:
|
||||
self.error('You can only reference documents once they have been '
|
||||
'saved to the database')
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return None
|
||||
super(LazyReferenceField, self).prepare_query_value(op, value)
|
||||
return self.to_mongo(value)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return self.document_type._fields.get(member_name)
|
||||
|
||||
|
||||
class GenericLazyReferenceField(GenericReferenceField):
|
||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass.
|
||||
Unlike the :class:`~mongoengine.fields.GenericReferenceField` it will
|
||||
**not** be automatically (lazily) dereferenced on access.
|
||||
Instead, access will return a :class:`~mongoengine.base.LazyReference` class
|
||||
instance, allowing access to `pk` or manual dereference by using
|
||||
``fetch()`` method.
|
||||
|
||||
.. note ::
|
||||
* Any documents used as a generic reference must be registered in the
|
||||
document registry. Importing the model will automatically register
|
||||
it.
|
||||
|
||||
* You can use the choices param to limit the acceptable Document types
|
||||
|
||||
.. versionadded:: 0.15
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.passthrough = kwargs.pop('passthrough', False)
|
||||
super(GenericLazyReferenceField, self).__init__(*args, **kwargs)
|
||||
|
||||
def _validate_choices(self, value):
|
||||
if isinstance(value, LazyReference):
|
||||
value = value.document_type._class_name
|
||||
super(GenericLazyReferenceField, self)._validate_choices(value)
|
||||
|
||||
def build_lazyref(self, value):
|
||||
if isinstance(value, LazyReference):
|
||||
if value.passthrough != self.passthrough:
|
||||
value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough)
|
||||
elif value is not None:
|
||||
if isinstance(value, (dict, SON)):
|
||||
value = LazyReference(get_document(value['_cls']), value['_ref'].id, passthrough=self.passthrough)
|
||||
elif isinstance(value, Document):
|
||||
value = LazyReference(type(value), value.pk, passthrough=self.passthrough)
|
||||
return value
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
value = self.build_lazyref(instance._data.get(self.name))
|
||||
if value:
|
||||
instance._data[self.name] = value
|
||||
|
||||
return super(GenericLazyReferenceField, self).__get__(instance, owner)
|
||||
|
||||
def validate(self, value):
|
||||
if isinstance(value, LazyReference) and value.pk is None:
|
||||
self.error('You can only reference documents once they have been'
|
||||
' saved to the database')
|
||||
return super(GenericLazyReferenceField, self).validate(value)
|
||||
|
||||
def to_mongo(self, document):
|
||||
if document is None:
|
||||
return None
|
||||
|
||||
if isinstance(document, LazyReference):
|
||||
return SON((
|
||||
('_cls', document.document_type._class_name),
|
||||
('_ref', DBRef(document.document_type._get_collection_name(), document.pk))
|
||||
))
|
||||
else:
|
||||
return super(GenericLazyReferenceField, self).to_mongo(document)
|
||||
|
@ -6,11 +6,7 @@ import pymongo
|
||||
import six
|
||||
|
||||
|
||||
if pymongo.version_tuple[0] < 3:
|
||||
IS_PYMONGO_3 = False
|
||||
else:
|
||||
IS_PYMONGO_3 = True
|
||||
|
||||
IS_PYMONGO_3 = pymongo.version_tuple[0] >= 3
|
||||
|
||||
# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3.
|
||||
StringIO = six.BytesIO
|
||||
|
@ -18,7 +18,7 @@ from mongoengine import signals
|
||||
from mongoengine.base import get_document
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.context_managers import switch_db
|
||||
from mongoengine.context_managers import set_write_concern, switch_db
|
||||
from mongoengine.errors import (InvalidQueryError, LookUpError,
|
||||
NotUniqueError, OperationError)
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
@ -350,11 +350,24 @@ class BaseQuerySet(object):
|
||||
documents=docs, **signal_kwargs)
|
||||
|
||||
raw = [doc.to_mongo() for doc in docs]
|
||||
|
||||
with set_write_concern(self._collection, write_concern) as collection:
|
||||
insert_func = collection.insert_many
|
||||
if return_one:
|
||||
raw = raw[0]
|
||||
insert_func = collection.insert_one
|
||||
|
||||
try:
|
||||
ids = self._collection.insert(raw, **write_concern)
|
||||
inserted_result = insert_func(raw)
|
||||
ids = return_one and [inserted_result.inserted_id] or inserted_result.inserted_ids
|
||||
except pymongo.errors.DuplicateKeyError as err:
|
||||
message = 'Could not save document (%s)'
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
except pymongo.errors.BulkWriteError as err:
|
||||
# inserting documents that already have an _id field will
|
||||
# give huge performance debt or raise
|
||||
message = u'Document must not have _id value before bulk write (%s)'
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
except pymongo.errors.OperationFailure as err:
|
||||
message = 'Could not save document (%s)'
|
||||
if re.match('^E1100[01] duplicate key', six.text_type(err)):
|
||||
@ -368,7 +381,6 @@ class BaseQuerySet(object):
|
||||
signals.post_bulk_insert.send(
|
||||
self._document, documents=docs, loaded=False, **signal_kwargs)
|
||||
return return_one and ids[0] or ids
|
||||
|
||||
documents = self.in_bulk(ids)
|
||||
results = []
|
||||
for obj_id in ids:
|
||||
@ -486,8 +498,9 @@ class BaseQuerySet(object):
|
||||
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||
wait until at least two servers have recorded the write and
|
||||
will force an fsync on the primary server.
|
||||
:param full_result: Return the full result rather than just the number
|
||||
updated.
|
||||
:param full_result: Return the full result dictionary rather than just the number
|
||||
updated, e.g. return
|
||||
``{'n': 2, 'nModified': 2, 'ok': 1.0, 'updatedExisting': True}``.
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
.. versionadded:: 0.2
|
||||
@ -510,12 +523,15 @@ class BaseQuerySet(object):
|
||||
else:
|
||||
update['$set'] = {'_cls': queryset._document._class_name}
|
||||
try:
|
||||
result = queryset._collection.update(query, update, multi=multi,
|
||||
upsert=upsert, **write_concern)
|
||||
with set_write_concern(queryset._collection, write_concern) as collection:
|
||||
update_func = collection.update_one
|
||||
if multi:
|
||||
update_func = collection.update_many
|
||||
result = update_func(query, update, upsert=upsert)
|
||||
if full_result:
|
||||
return result
|
||||
elif result:
|
||||
return result['n']
|
||||
elif result.raw_result:
|
||||
return result.raw_result['n']
|
||||
except pymongo.errors.DuplicateKeyError as err:
|
||||
raise NotUniqueError(u'Update failed (%s)' % six.text_type(err))
|
||||
except pymongo.errors.OperationFailure as err:
|
||||
@ -544,10 +560,10 @@ class BaseQuerySet(object):
|
||||
write_concern=write_concern,
|
||||
full_result=True, **update)
|
||||
|
||||
if atomic_update['updatedExisting']:
|
||||
if atomic_update.raw_result['updatedExisting']:
|
||||
document = self.get()
|
||||
else:
|
||||
document = self._document.objects.with_id(atomic_update['upserted'])
|
||||
document = self._document.objects.with_id(atomic_update.upserted_id)
|
||||
return document
|
||||
|
||||
def update_one(self, upsert=False, write_concern=None, **update):
|
||||
@ -1183,6 +1199,10 @@ class BaseQuerySet(object):
|
||||
|
||||
pipeline = initial_pipeline + list(pipeline)
|
||||
|
||||
if IS_PYMONGO_3 and self._read_preference is not None:
|
||||
return self._collection.with_options(read_preference=self._read_preference) \
|
||||
.aggregate(pipeline, cursor={}, **kwargs)
|
||||
|
||||
return self._collection.aggregate(pipeline, cursor={}, **kwargs)
|
||||
|
||||
# JS functionality
|
||||
@ -1579,6 +1599,9 @@ class BaseQuerySet(object):
|
||||
if self._batch_size is not None:
|
||||
self._cursor_obj.batch_size(self._batch_size)
|
||||
|
||||
if self._comment is not None:
|
||||
self._cursor_obj.comment(self._comment)
|
||||
|
||||
return self._cursor_obj
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
|
@ -1,3 +1,5 @@
|
||||
import six
|
||||
|
||||
from mongoengine.errors import OperationError
|
||||
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
|
||||
NULLIFY, PULL)
|
||||
@ -87,10 +89,10 @@ class QuerySet(BaseQuerySet):
|
||||
yield self._result_cache[pos]
|
||||
pos += 1
|
||||
|
||||
# Raise StopIteration if we already established there were no more
|
||||
# return if we already established there were no more
|
||||
# docs in the db cursor.
|
||||
if not self._has_more:
|
||||
raise StopIteration
|
||||
return
|
||||
|
||||
# Otherwise, populate more of the cache and repeat.
|
||||
if len(self._result_cache) <= pos:
|
||||
@ -112,7 +114,7 @@ class QuerySet(BaseQuerySet):
|
||||
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
||||
# the result cache.
|
||||
try:
|
||||
for _ in xrange(ITER_CHUNK_SIZE):
|
||||
for _ in six.moves.range(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(self.next())
|
||||
except StopIteration:
|
||||
# Getting this exception means there are no more docs in the
|
||||
@ -166,7 +168,7 @@ class QuerySetNoCache(BaseQuerySet):
|
||||
return '.. queryset mid-iteration ..'
|
||||
|
||||
data = []
|
||||
for _ in xrange(REPR_OUTPUT_SIZE + 1):
|
||||
for _ in six.moves.range(REPR_OUTPUT_SIZE + 1):
|
||||
try:
|
||||
data.append(self.next())
|
||||
except StopIteration:
|
||||
|
@ -101,21 +101,8 @@ def query(_doc_cls=None, **kwargs):
|
||||
value = value['_id']
|
||||
|
||||
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
|
||||
# Raise an error if the in/nin/all/near param is not iterable. We need a
|
||||
# special check for BaseDocument, because - although it's iterable - using
|
||||
# it as such in the context of this method is most definitely a mistake.
|
||||
BaseDocument = _import_class('BaseDocument')
|
||||
if isinstance(value, BaseDocument):
|
||||
raise TypeError("When using the `in`, `nin`, `all`, or "
|
||||
"`near`-operators you can\'t use a "
|
||||
"`Document`, you must wrap your object "
|
||||
"in a list (object -> [object]).")
|
||||
elif not hasattr(value, '__iter__'):
|
||||
raise TypeError("The `in`, `nin`, `all`, or "
|
||||
"`near`-operators must be applied to an "
|
||||
"iterable (e.g. a list).")
|
||||
else:
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
# Raise an error if the in/nin/all/near param is not iterable.
|
||||
value = _prepare_query_for_iterable(field, op, value)
|
||||
|
||||
# If we're querying a GenericReferenceField, we need to alter the
|
||||
# key depending on the value:
|
||||
@ -160,7 +147,7 @@ def query(_doc_cls=None, **kwargs):
|
||||
if op is None or key not in mongo_query:
|
||||
mongo_query[key] = value
|
||||
elif key in mongo_query:
|
||||
if isinstance(mongo_query[key], dict):
|
||||
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
||||
mongo_query[key].update(value)
|
||||
# $max/minDistance needs to come last - convert to SON
|
||||
value_dict = mongo_query[key]
|
||||
@ -214,30 +201,37 @@ def update(_doc_cls=None, **update):
|
||||
format.
|
||||
"""
|
||||
mongo_update = {}
|
||||
|
||||
for key, value in update.items():
|
||||
if key == '__raw__':
|
||||
mongo_update.update(value)
|
||||
continue
|
||||
|
||||
parts = key.split('__')
|
||||
|
||||
# if there is no operator, default to 'set'
|
||||
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
|
||||
parts.insert(0, 'set')
|
||||
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
op = None
|
||||
if parts[0] in UPDATE_OPERATORS:
|
||||
op = parts.pop(0)
|
||||
# Convert Pythonic names to Mongo equivalents
|
||||
if op in ('push_all', 'pull_all'):
|
||||
op = op.replace('_all', 'All')
|
||||
elif op == 'dec':
|
||||
operator_map = {
|
||||
'push_all': 'pushAll',
|
||||
'pull_all': 'pullAll',
|
||||
'dec': 'inc',
|
||||
'add_to_set': 'addToSet',
|
||||
'set_on_insert': 'setOnInsert'
|
||||
}
|
||||
if op == 'dec':
|
||||
# Support decrement by flipping a positive value's sign
|
||||
# and using 'inc'
|
||||
op = 'inc'
|
||||
value = -value
|
||||
elif op == 'add_to_set':
|
||||
op = 'addToSet'
|
||||
elif op == 'set_on_insert':
|
||||
op = 'setOnInsert'
|
||||
# If the operator doesn't found from operator map, the op value
|
||||
# will stay unchanged
|
||||
op = operator_map.get(op, op)
|
||||
|
||||
match = None
|
||||
if parts[-1] in COMPARISON_OPERATORS:
|
||||
@ -284,9 +278,15 @@ def update(_doc_cls=None, **update):
|
||||
if isinstance(field, GeoJsonBaseField):
|
||||
value = field.to_mongo(value)
|
||||
|
||||
if op == 'push' and isinstance(value, (list, tuple, set)):
|
||||
if op == 'pull':
|
||||
if field.required or value is not None:
|
||||
if match == 'in' and not isinstance(value, dict):
|
||||
value = _prepare_query_for_iterable(field, op, value)
|
||||
else:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op == 'push' and isinstance(value, (list, tuple, set)):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif op in (None, 'set', 'push', 'pull'):
|
||||
elif op in (None, 'set', 'push'):
|
||||
if field.required or value is not None:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op in ('pushAll', 'pullAll'):
|
||||
@ -298,6 +298,8 @@ def update(_doc_cls=None, **update):
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op == 'unset':
|
||||
value = 1
|
||||
elif op == 'inc':
|
||||
value = field.prepare_query_value(op, value)
|
||||
|
||||
if match:
|
||||
match = '$' + match
|
||||
@ -321,11 +323,17 @@ def update(_doc_cls=None, **update):
|
||||
field_classes = [c.__class__ for c in cleaned_fields]
|
||||
field_classes.reverse()
|
||||
ListField = _import_class('ListField')
|
||||
if ListField in field_classes:
|
||||
# Join all fields via dot notation to the last ListField
|
||||
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||
if ListField in field_classes or EmbeddedDocumentListField in field_classes:
|
||||
# Join all fields via dot notation to the last ListField or EmbeddedDocumentListField
|
||||
# Then process as normal
|
||||
if ListField in field_classes:
|
||||
_check_field = ListField
|
||||
else:
|
||||
_check_field = EmbeddedDocumentListField
|
||||
|
||||
last_listField = len(
|
||||
cleaned_fields) - field_classes.index(ListField)
|
||||
cleaned_fields) - field_classes.index(_check_field)
|
||||
key = '.'.join(parts[:last_listField])
|
||||
parts = parts[last_listField:]
|
||||
parts.insert(0, key)
|
||||
@ -335,7 +343,7 @@ def update(_doc_cls=None, **update):
|
||||
value = {key: value}
|
||||
elif op == 'addToSet' and isinstance(value, list):
|
||||
value = {key: {'$each': value}}
|
||||
elif op == 'push':
|
||||
elif op in ('push', 'pushAll'):
|
||||
if parts[-1].isdigit():
|
||||
key = parts[0]
|
||||
position = int(parts[-1])
|
||||
@ -344,10 +352,14 @@ def update(_doc_cls=None, **update):
|
||||
if not isinstance(value, (set, tuple, list)):
|
||||
value = [value]
|
||||
value = {key: {'$each': value, '$position': position}}
|
||||
elif isinstance(value, list):
|
||||
value = {key: {'$each': value}}
|
||||
else:
|
||||
value = {key: value}
|
||||
if op == 'pushAll':
|
||||
op = 'push' # convert to non-deprecated keyword
|
||||
if not isinstance(value, (set, tuple, list)):
|
||||
value = [value]
|
||||
value = {key: {'$each': value}}
|
||||
else:
|
||||
value = {key: value}
|
||||
else:
|
||||
value = {key: value}
|
||||
key = '$' + op
|
||||
@ -439,3 +451,22 @@ def _infer_geometry(value):
|
||||
|
||||
raise InvalidQueryError('Invalid $geometry data. Can be either a '
|
||||
'dictionary or (nested) lists of coordinate(s)')
|
||||
|
||||
|
||||
def _prepare_query_for_iterable(field, op, value):
|
||||
# We need a special check for BaseDocument, because - although it's iterable - using
|
||||
# it as such in the context of this method is most definitely a mistake.
|
||||
BaseDocument = _import_class('BaseDocument')
|
||||
|
||||
if isinstance(value, BaseDocument):
|
||||
raise TypeError("When using the `in`, `nin`, `all`, or "
|
||||
"`near`-operators you can\'t use a "
|
||||
"`Document`, you must wrap your object "
|
||||
"in a list (object -> [object]).")
|
||||
|
||||
if not hasattr(value, '__iter__'):
|
||||
raise TypeError("The `in`, `nin`, `all`, or "
|
||||
"`near`-operators must be applied to an "
|
||||
"iterable (e.g. a list).")
|
||||
|
||||
return [field.prepare_query_value(op, v) for v in value]
|
||||
|
@ -1,11 +1,11 @@
|
||||
[nosetests]
|
||||
verbosity=2
|
||||
detailed-errors=1
|
||||
tests=tests
|
||||
#tests=tests
|
||||
cover-package=mongoengine
|
||||
|
||||
[flake8]
|
||||
ignore=E501,F401,F403,F405,I201
|
||||
ignore=E501,F401,F403,F405,I201,I202
|
||||
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
||||
max-complexity=47
|
||||
application-import-names=mongoengine,tests
|
||||
|
6
setup.py
6
setup.py
@ -70,9 +70,9 @@ setup(
|
||||
name='mongoengine',
|
||||
version=VERSION,
|
||||
author='Harry Marr',
|
||||
author_email='harry.marr@{nospam}gmail.com',
|
||||
maintainer="Ross Lawley",
|
||||
maintainer_email="ross.lawley@{nospam}gmail.com",
|
||||
author_email='harry.marr@gmail.com',
|
||||
maintainer="Stefan Wojcik",
|
||||
maintainer_email="wojcikstefan@gmail.com",
|
||||
url='http://mongoengine.org/',
|
||||
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
||||
license='MIT',
|
||||
|
@ -5,6 +5,7 @@ from mongoengine import *
|
||||
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
from mongoengine.connection import get_db
|
||||
from tests.utils import needs_mongodb_v26
|
||||
|
||||
__all__ = ("ClassMethodsTest", )
|
||||
|
||||
@ -65,10 +66,10 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
"""
|
||||
collection_name = 'person'
|
||||
self.Person(name='Test').save()
|
||||
self.assertTrue(collection_name in self.db.collection_names())
|
||||
self.assertIn(collection_name, self.db.collection_names())
|
||||
|
||||
self.Person.drop_collection()
|
||||
self.assertFalse(collection_name in self.db.collection_names())
|
||||
self.assertNotIn(collection_name, self.db.collection_names())
|
||||
|
||||
def test_register_delete_rule(self):
|
||||
"""Ensure that register delete rule adds a delete rule to the document
|
||||
@ -187,6 +188,26 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
|
||||
@needs_mongodb_v26
|
||||
def test_compare_indexes_for_text_indexes(self):
|
||||
""" Ensure that compare_indexes behaves correctly for text indexes """
|
||||
|
||||
class Doc(Document):
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
meta = {'indexes': [
|
||||
{'fields': ['$a', "$b"],
|
||||
'default_language': 'english',
|
||||
'weights': {'a': 10, 'b': 2}
|
||||
}
|
||||
]}
|
||||
|
||||
Doc.drop_collection()
|
||||
Doc.ensure_indexes()
|
||||
actual = Doc.compare_indexes()
|
||||
expected = {'missing': [], 'extra': []}
|
||||
self.assertEqual(actual, expected)
|
||||
|
||||
def test_list_indexes_inheritance(self):
|
||||
""" ensure that all of the indexes are listed regardless of the super-
|
||||
or sub-class that we call it from
|
||||
@ -319,7 +340,7 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
meta = {'collection': collection_name}
|
||||
|
||||
Person(name="Test User").save()
|
||||
self.assertTrue(collection_name in self.db.collection_names())
|
||||
self.assertIn(collection_name, self.db.collection_names())
|
||||
|
||||
user_obj = self.db[collection_name].find_one()
|
||||
self.assertEqual(user_obj['name'], "Test User")
|
||||
@ -328,7 +349,7 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
self.assertEqual(user_obj.name, "Test User")
|
||||
|
||||
Person.drop_collection()
|
||||
self.assertFalse(collection_name in self.db.collection_names())
|
||||
self.assertNotIn(collection_name, self.db.collection_names())
|
||||
|
||||
def test_collection_name_and_primary(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
|
@ -694,7 +694,7 @@ class DeltaTest(unittest.TestCase):
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertTrue('employees' in updates)
|
||||
self.assertIn('employees', updates)
|
||||
|
||||
def test_delta_with_dbref_false(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(Document, Document, False)
|
||||
@ -709,7 +709,7 @@ class DeltaTest(unittest.TestCase):
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertTrue('employees' in updates)
|
||||
self.assertIn('employees', updates)
|
||||
|
||||
def test_nested_nested_fields_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
|
@ -174,8 +174,8 @@ class DynamicTest(unittest.TestCase):
|
||||
|
||||
Employee.drop_collection()
|
||||
|
||||
self.assertTrue('name' in Employee._fields)
|
||||
self.assertTrue('salary' in Employee._fields)
|
||||
self.assertIn('name', Employee._fields)
|
||||
self.assertIn('salary', Employee._fields)
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
self.Person._get_collection_name())
|
||||
|
||||
@ -189,7 +189,7 @@ class DynamicTest(unittest.TestCase):
|
||||
self.assertEqual(1, Employee.objects(age=20).count())
|
||||
|
||||
joe_bloggs = self.Person.objects.first()
|
||||
self.assertTrue(isinstance(joe_bloggs, Employee))
|
||||
self.assertIsInstance(joe_bloggs, Employee)
|
||||
|
||||
def test_embedded_dynamic_document(self):
|
||||
"""Test dynamic embedded documents"""
|
||||
|
@ -70,7 +70,7 @@ class IndexesTest(unittest.TestCase):
|
||||
self.assertEqual(len(info), 4)
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
for expected in expected_specs:
|
||||
self.assertTrue(expected['fields'] in info)
|
||||
self.assertIn(expected['fields'], info)
|
||||
|
||||
def _index_test_inheritance(self, InheritFrom):
|
||||
|
||||
@ -102,7 +102,7 @@ class IndexesTest(unittest.TestCase):
|
||||
self.assertEqual(len(info), 4)
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
for expected in expected_specs:
|
||||
self.assertTrue(expected['fields'] in info)
|
||||
self.assertIn(expected['fields'], info)
|
||||
|
||||
class ExtendedBlogPost(BlogPost):
|
||||
title = StringField()
|
||||
@ -117,7 +117,7 @@ class IndexesTest(unittest.TestCase):
|
||||
info = ExtendedBlogPost.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
for expected in expected_specs:
|
||||
self.assertTrue(expected['fields'] in info)
|
||||
self.assertIn(expected['fields'], info)
|
||||
|
||||
def test_indexes_document_inheritance(self):
|
||||
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||
@ -226,7 +226,7 @@ class IndexesTest(unittest.TestCase):
|
||||
list(Person.objects)
|
||||
info = Person.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('rank.title', 1)] in info)
|
||||
self.assertIn([('rank.title', 1)], info)
|
||||
|
||||
def test_explicit_geo2d_index(self):
|
||||
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||
@ -246,7 +246,7 @@ class IndexesTest(unittest.TestCase):
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', '2d')] in info)
|
||||
self.assertIn([('location.point', '2d')], info)
|
||||
|
||||
def test_explicit_geo2d_index_embedded(self):
|
||||
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||
@ -269,7 +269,7 @@ class IndexesTest(unittest.TestCase):
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('current.location.point', '2d')] in info)
|
||||
self.assertIn([('current.location.point', '2d')], info)
|
||||
|
||||
def test_explicit_geosphere_index(self):
|
||||
"""Ensure that geosphere indexes work when created via meta[indexes]
|
||||
@ -289,7 +289,7 @@ class IndexesTest(unittest.TestCase):
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', '2dsphere')] in info)
|
||||
self.assertIn([('location.point', '2dsphere')], info)
|
||||
|
||||
def test_explicit_geohaystack_index(self):
|
||||
"""Ensure that geohaystack indexes work when created via meta[indexes]
|
||||
@ -311,7 +311,7 @@ class IndexesTest(unittest.TestCase):
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', 'geoHaystack')] in info)
|
||||
self.assertIn([('location.point', 'geoHaystack')], info)
|
||||
|
||||
def test_create_geohaystack_index(self):
|
||||
"""Ensure that geohaystack indexes can be created
|
||||
@ -323,7 +323,7 @@ class IndexesTest(unittest.TestCase):
|
||||
Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10)
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', 'geoHaystack'), ('name', 1)] in info)
|
||||
self.assertIn([('location.point', 'geoHaystack'), ('name', 1)], info)
|
||||
|
||||
def test_dictionary_indexes(self):
|
||||
"""Ensure that indexes are used when meta[indexes] contains
|
||||
@ -356,7 +356,7 @@ class IndexesTest(unittest.TestCase):
|
||||
value.get('unique', False),
|
||||
value.get('sparse', False))
|
||||
for key, value in info.iteritems()]
|
||||
self.assertTrue(([('addDate', -1)], True, True) in info)
|
||||
self.assertIn(([('addDate', -1)], True, True), info)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@ -803,7 +803,7 @@ class IndexesTest(unittest.TestCase):
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
index_item = [('_id', 1), ('comments.comment_id', 1)]
|
||||
self.assertTrue(index_item in info)
|
||||
self.assertIn(index_item, info)
|
||||
|
||||
def test_compound_key_embedded(self):
|
||||
|
||||
@ -850,8 +850,8 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
info = MyDoc.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('provider_ids.foo', 1)] in info)
|
||||
self.assertTrue([('provider_ids.bar', 1)] in info)
|
||||
self.assertIn([('provider_ids.foo', 1)], info)
|
||||
self.assertIn([('provider_ids.bar', 1)], info)
|
||||
|
||||
def test_sparse_compound_indexes(self):
|
||||
|
||||
@ -876,9 +876,9 @@ class IndexesTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
indexes = Book.objects._collection.index_information()
|
||||
self.assertTrue("title_text" in indexes)
|
||||
self.assertIn("title_text", indexes)
|
||||
key = indexes["title_text"]["key"]
|
||||
self.assertTrue(('_fts', 'text') in key)
|
||||
self.assertIn(('_fts', 'text'), key)
|
||||
|
||||
def test_hashed_indexes(self):
|
||||
|
||||
@ -889,8 +889,8 @@ class IndexesTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
indexes = Book.objects._collection.index_information()
|
||||
self.assertTrue("ref_id_hashed" in indexes)
|
||||
self.assertTrue(('ref_id', 'hashed') in indexes["ref_id_hashed"]["key"])
|
||||
self.assertIn("ref_id_hashed", indexes)
|
||||
self.assertIn(('ref_id', 'hashed'), indexes["ref_id_hashed"]["key"])
|
||||
|
||||
def test_indexes_after_database_drop(self):
|
||||
"""
|
||||
@ -1013,7 +1013,7 @@ class IndexesTest(unittest.TestCase):
|
||||
TestDoc.ensure_indexes()
|
||||
|
||||
index_info = TestDoc._get_collection().index_information()
|
||||
self.assertTrue('shard_1_1__cls_1_txt_1_1' in index_info)
|
||||
self.assertIn('shard_1_1__cls_1_txt_1_1', index_info)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -268,7 +268,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
|
||||
collection = self.db[Animal._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertFalse('_cls' in obj)
|
||||
self.assertNotIn('_cls', obj)
|
||||
|
||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||
@ -298,7 +298,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
|
||||
# Check that _cls isn't present in simple documents
|
||||
doc = Animal(name='dog')
|
||||
self.assertFalse('_cls' in doc.to_mongo())
|
||||
self.assertNotIn('_cls', doc.to_mongo())
|
||||
|
||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||
|
||||
@ -374,14 +374,14 @@ class InheritanceTest(unittest.TestCase):
|
||||
pass
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertFalse('_cls' in doc.to_mongo())
|
||||
self.assertNotIn('_cls', doc.to_mongo())
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertTrue('_cls' in doc.to_mongo())
|
||||
self.assertIn('_cls', doc.to_mongo())
|
||||
|
||||
def test_document_inheritance(self):
|
||||
"""Ensure mutliple inheritance of abstract documents
|
||||
@ -434,8 +434,8 @@ class InheritanceTest(unittest.TestCase):
|
||||
for cls in [Animal, Fish, Guppy]:
|
||||
self.assertEqual(cls._meta[k], v)
|
||||
|
||||
self.assertFalse('collection' in Animal._meta)
|
||||
self.assertFalse('collection' in Mammal._meta)
|
||||
self.assertNotIn('collection', Animal._meta)
|
||||
self.assertNotIn('collection', Mammal._meta)
|
||||
|
||||
self.assertEqual(Animal._get_collection_name(), None)
|
||||
self.assertEqual(Mammal._get_collection_name(), None)
|
||||
|
@ -8,9 +8,12 @@ import weakref
|
||||
|
||||
from datetime import datetime
|
||||
from bson import DBRef, ObjectId
|
||||
from pymongo.errors import DuplicateKeyError
|
||||
|
||||
from tests import fixtures
|
||||
from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest,
|
||||
PickleDynamicEmbedded, PickleDynamicTest)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import get_document, _document_registry
|
||||
@ -30,12 +33,9 @@ TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
|
||||
__all__ = ("InstanceTest",)
|
||||
|
||||
|
||||
class InstanceTest(unittest.TestCase):
|
||||
class InstanceTest(MongoDBTestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
class Job(EmbeddedDocument):
|
||||
name = StringField()
|
||||
years = IntField()
|
||||
@ -357,7 +357,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
user_son = User.objects._collection.find_one()
|
||||
self.assertEqual(user_son['_id'], 'test')
|
||||
self.assertTrue('username' not in user_son['_id'])
|
||||
self.assertNotIn('username', user_son['_id'])
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
@ -370,7 +370,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
user_son = User.objects._collection.find_one()
|
||||
self.assertEqual(user_son['_id'], 'mongo')
|
||||
self.assertTrue('username' not in user_son['_id'])
|
||||
self.assertNotIn('username', user_son['_id'])
|
||||
|
||||
def test_document_not_registered(self):
|
||||
class Place(Document):
|
||||
@ -476,6 +476,24 @@ class InstanceTest(unittest.TestCase):
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
def test_reload_with_changed_fields(self):
|
||||
"""Ensures reloading will not affect changed fields"""
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
number = IntField()
|
||||
User.drop_collection()
|
||||
|
||||
user = User(name="Bob", number=1).save()
|
||||
user.name = "John"
|
||||
user.number = 2
|
||||
|
||||
self.assertEqual(user._get_changed_fields(), ['name', 'number'])
|
||||
user.reload('number')
|
||||
self.assertEqual(user._get_changed_fields(), ['name'])
|
||||
user.save()
|
||||
user.reload()
|
||||
self.assertEqual(user.name, "John")
|
||||
|
||||
def test_reload_referencing(self):
|
||||
"""Ensures reloading updates weakrefs correctly."""
|
||||
class Embedded(EmbeddedDocument):
|
||||
@ -521,7 +539,7 @@ class InstanceTest(unittest.TestCase):
|
||||
doc.save()
|
||||
doc.dict_field['extra'] = 1
|
||||
doc = doc.reload(10, 'list_field')
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field.extra'])
|
||||
self.assertEqual(len(doc.list_field), 5)
|
||||
self.assertEqual(len(doc.dict_field), 3)
|
||||
self.assertEqual(len(doc.embedded_field.list_field), 4)
|
||||
@ -532,21 +550,14 @@ class InstanceTest(unittest.TestCase):
|
||||
pass
|
||||
|
||||
f = Foo()
|
||||
try:
|
||||
with self.assertRaises(Foo.DoesNotExist):
|
||||
f.reload()
|
||||
except Foo.DoesNotExist:
|
||||
pass
|
||||
except Exception:
|
||||
self.assertFalse("Threw wrong exception")
|
||||
|
||||
f.save()
|
||||
f.delete()
|
||||
try:
|
||||
|
||||
with self.assertRaises(Foo.DoesNotExist):
|
||||
f.reload()
|
||||
except Foo.DoesNotExist:
|
||||
pass
|
||||
except Exception:
|
||||
self.assertFalse("Threw wrong exception")
|
||||
|
||||
def test_reload_of_non_strict_with_special_field_name(self):
|
||||
"""Ensures reloading works for documents with meta strict == False."""
|
||||
@ -583,10 +594,10 @@ class InstanceTest(unittest.TestCase):
|
||||
# Length = length(assigned fields + id)
|
||||
self.assertEqual(len(person), 5)
|
||||
|
||||
self.assertTrue('age' in person)
|
||||
self.assertIn('age', person)
|
||||
person.age = None
|
||||
self.assertFalse('age' in person)
|
||||
self.assertFalse('nationality' in person)
|
||||
self.assertNotIn('age', person)
|
||||
self.assertNotIn('nationality', person)
|
||||
|
||||
def test_embedded_document_to_mongo(self):
|
||||
class Person(EmbeddedDocument):
|
||||
@ -616,8 +627,8 @@ class InstanceTest(unittest.TestCase):
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
self.assertTrue('content' in Comment._fields)
|
||||
self.assertFalse('id' in Comment._fields)
|
||||
self.assertIn('content', Comment._fields)
|
||||
self.assertNotIn('id', Comment._fields)
|
||||
|
||||
def test_embedded_document_instance(self):
|
||||
"""Ensure that embedded documents can reference parent instance."""
|
||||
@ -716,12 +727,12 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
t = TestDocument(status="draft", pub_date=datetime.now())
|
||||
|
||||
try:
|
||||
with self.assertRaises(ValidationError) as cm:
|
||||
t.save()
|
||||
except ValidationError as e:
|
||||
expect_msg = "Draft entries may not have a publication date."
|
||||
self.assertTrue(expect_msg in e.message)
|
||||
self.assertEqual(e.to_dict(), {'__all__': expect_msg})
|
||||
|
||||
expected_msg = "Draft entries may not have a publication date."
|
||||
self.assertIn(expected_msg, cm.exception.message)
|
||||
self.assertEqual(cm.exception.to_dict(), {'__all__': expected_msg})
|
||||
|
||||
t = TestDocument(status="published")
|
||||
t.save(clean=False)
|
||||
@ -755,12 +766,13 @@ class InstanceTest(unittest.TestCase):
|
||||
TestDocument.drop_collection()
|
||||
|
||||
t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15))
|
||||
try:
|
||||
|
||||
with self.assertRaises(ValidationError) as cm:
|
||||
t.save()
|
||||
except ValidationError as e:
|
||||
expect_msg = "Value of z != x + y"
|
||||
self.assertTrue(expect_msg in e.message)
|
||||
self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}})
|
||||
|
||||
expected_msg = "Value of z != x + y"
|
||||
self.assertIn(expected_msg, cm.exception.message)
|
||||
self.assertEqual(cm.exception.to_dict(), {'doc': {'__all__': expected_msg}})
|
||||
|
||||
t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save()
|
||||
self.assertEqual(t.doc.z, 35)
|
||||
@ -1341,6 +1353,23 @@ class InstanceTest(unittest.TestCase):
|
||||
site = Site.objects.first()
|
||||
self.assertEqual(site.page.log_message, "Error: Dummy message")
|
||||
|
||||
def test_update_list_field(self):
|
||||
"""Test update on `ListField` with $pull + $in.
|
||||
"""
|
||||
class Doc(Document):
|
||||
foo = ListField(StringField())
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc(foo=['a', 'b', 'c'])
|
||||
doc.save()
|
||||
|
||||
# Update
|
||||
doc = Doc.objects.first()
|
||||
doc.update(pull__foo__in=['a', 'c'])
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.foo, ['b'])
|
||||
|
||||
def test_embedded_update_db_field(self):
|
||||
"""Test update on `EmbeddedDocumentField` fields when db_field
|
||||
is other than default.
|
||||
@ -1426,9 +1455,9 @@ class InstanceTest(unittest.TestCase):
|
||||
user = User.objects.first()
|
||||
# Even if stored as ObjectId's internally mongoengine uses DBRefs
|
||||
# As ObjectId's aren't automatically derefenced
|
||||
self.assertTrue(isinstance(user._data['orgs'][0], DBRef))
|
||||
self.assertTrue(isinstance(user.orgs[0], Organization))
|
||||
self.assertTrue(isinstance(user._data['orgs'][0], Organization))
|
||||
self.assertIsInstance(user._data['orgs'][0], DBRef)
|
||||
self.assertIsInstance(user.orgs[0], Organization)
|
||||
self.assertIsInstance(user._data['orgs'][0], Organization)
|
||||
|
||||
# Changing a value
|
||||
with query_counter() as q:
|
||||
@ -1808,9 +1837,8 @@ class InstanceTest(unittest.TestCase):
|
||||
post_obj = BlogPost.objects.first()
|
||||
|
||||
# Test laziness
|
||||
self.assertTrue(isinstance(post_obj._data['author'],
|
||||
bson.DBRef))
|
||||
self.assertTrue(isinstance(post_obj.author, self.Person))
|
||||
self.assertIsInstance(post_obj._data['author'], bson.DBRef)
|
||||
self.assertIsInstance(post_obj.author, self.Person)
|
||||
self.assertEqual(post_obj.author.name, 'Test User')
|
||||
|
||||
# Ensure that the dereferenced object may be changed and saved
|
||||
@ -1884,6 +1912,25 @@ class InstanceTest(unittest.TestCase):
|
||||
author.delete()
|
||||
self.assertEqual(BlogPost.objects.count(), 0)
|
||||
|
||||
def test_reverse_delete_rule_pull(self):
|
||||
"""Ensure that a referenced document is also deleted with
|
||||
pull.
|
||||
"""
|
||||
class Record(Document):
|
||||
name = StringField()
|
||||
children = ListField(ReferenceField('self', reverse_delete_rule=PULL))
|
||||
|
||||
Record.drop_collection()
|
||||
|
||||
parent_record = Record(name='parent').save()
|
||||
child_record = Record(name='child').save()
|
||||
parent_record.children.append(child_record)
|
||||
parent_record.save()
|
||||
|
||||
child_record.delete()
|
||||
self.assertEqual(Record.objects(name='parent').get().children, [])
|
||||
|
||||
|
||||
def test_reverse_delete_rule_with_custom_id_field(self):
|
||||
"""Ensure that a referenced document with custom primary key
|
||||
is also deleted upon deletion.
|
||||
@ -2197,12 +2244,12 @@ class InstanceTest(unittest.TestCase):
|
||||
# Make sure docs are properly identified in a list (__eq__ is used
|
||||
# for the comparison).
|
||||
all_user_list = list(User.objects.all())
|
||||
self.assertTrue(u1 in all_user_list)
|
||||
self.assertTrue(u2 in all_user_list)
|
||||
self.assertTrue(u3 in all_user_list)
|
||||
self.assertTrue(u4 not in all_user_list) # New object
|
||||
self.assertTrue(b1 not in all_user_list) # Other object
|
||||
self.assertTrue(b2 not in all_user_list) # Other object
|
||||
self.assertIn(u1, all_user_list)
|
||||
self.assertIn(u2, all_user_list)
|
||||
self.assertIn(u3, all_user_list)
|
||||
self.assertNotIn(u4, all_user_list) # New object
|
||||
self.assertNotIn(b1, all_user_list) # Other object
|
||||
self.assertNotIn(b2, all_user_list) # Other object
|
||||
|
||||
# Make sure docs can be used as keys in a dict (__hash__ is used
|
||||
# for hashing the docs).
|
||||
@ -2220,10 +2267,10 @@ class InstanceTest(unittest.TestCase):
|
||||
# Make sure docs are properly identified in a set (__hash__ is used
|
||||
# for hashing the docs).
|
||||
all_user_set = set(User.objects.all())
|
||||
self.assertTrue(u1 in all_user_set)
|
||||
self.assertTrue(u4 not in all_user_set)
|
||||
self.assertTrue(b1 not in all_user_list)
|
||||
self.assertTrue(b2 not in all_user_list)
|
||||
self.assertIn(u1, all_user_set)
|
||||
self.assertNotIn(u4, all_user_set)
|
||||
self.assertNotIn(b1, all_user_list)
|
||||
self.assertNotIn(b2, all_user_list)
|
||||
|
||||
# Make sure duplicate docs aren't accepted in the set
|
||||
self.assertEqual(len(all_user_set), 3)
|
||||
@ -2924,7 +2971,7 @@ class InstanceTest(unittest.TestCase):
|
||||
Person(name="Harry Potter").save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertTrue('id' in person._data.keys())
|
||||
self.assertIn('id', person._data.keys())
|
||||
self.assertEqual(person._data.get('id'), person.id)
|
||||
|
||||
def test_complex_nesting_document_and_embedded_document(self):
|
||||
@ -3016,36 +3063,36 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
dbref2 = f._data['test2']
|
||||
obj2 = f.test2
|
||||
self.assertTrue(isinstance(dbref2, DBRef))
|
||||
self.assertTrue(isinstance(obj2, Test2))
|
||||
self.assertTrue(obj2.id == dbref2.id)
|
||||
self.assertTrue(obj2 == dbref2)
|
||||
self.assertTrue(dbref2 == obj2)
|
||||
self.assertIsInstance(dbref2, DBRef)
|
||||
self.assertIsInstance(obj2, Test2)
|
||||
self.assertEqual(obj2.id, dbref2.id)
|
||||
self.assertEqual(obj2, dbref2)
|
||||
self.assertEqual(dbref2, obj2)
|
||||
|
||||
dbref3 = f._data['test3']
|
||||
obj3 = f.test3
|
||||
self.assertTrue(isinstance(dbref3, DBRef))
|
||||
self.assertTrue(isinstance(obj3, Test3))
|
||||
self.assertTrue(obj3.id == dbref3.id)
|
||||
self.assertTrue(obj3 == dbref3)
|
||||
self.assertTrue(dbref3 == obj3)
|
||||
self.assertIsInstance(dbref3, DBRef)
|
||||
self.assertIsInstance(obj3, Test3)
|
||||
self.assertEqual(obj3.id, dbref3.id)
|
||||
self.assertEqual(obj3, dbref3)
|
||||
self.assertEqual(dbref3, obj3)
|
||||
|
||||
self.assertTrue(obj2.id == obj3.id)
|
||||
self.assertTrue(dbref2.id == dbref3.id)
|
||||
self.assertFalse(dbref2 == dbref3)
|
||||
self.assertFalse(dbref3 == dbref2)
|
||||
self.assertTrue(dbref2 != dbref3)
|
||||
self.assertTrue(dbref3 != dbref2)
|
||||
self.assertEqual(obj2.id, obj3.id)
|
||||
self.assertEqual(dbref2.id, dbref3.id)
|
||||
self.assertNotEqual(dbref2, dbref3)
|
||||
self.assertNotEqual(dbref3, dbref2)
|
||||
self.assertNotEqual(dbref2, dbref3)
|
||||
self.assertNotEqual(dbref3, dbref2)
|
||||
|
||||
self.assertFalse(obj2 == dbref3)
|
||||
self.assertFalse(dbref3 == obj2)
|
||||
self.assertTrue(obj2 != dbref3)
|
||||
self.assertTrue(dbref3 != obj2)
|
||||
self.assertNotEqual(obj2, dbref3)
|
||||
self.assertNotEqual(dbref3, obj2)
|
||||
self.assertNotEqual(obj2, dbref3)
|
||||
self.assertNotEqual(dbref3, obj2)
|
||||
|
||||
self.assertFalse(obj3 == dbref2)
|
||||
self.assertFalse(dbref2 == obj3)
|
||||
self.assertTrue(obj3 != dbref2)
|
||||
self.assertTrue(dbref2 != obj3)
|
||||
self.assertNotEqual(obj3, dbref2)
|
||||
self.assertNotEqual(dbref2, obj3)
|
||||
self.assertNotEqual(obj3, dbref2)
|
||||
self.assertNotEqual(dbref2, obj3)
|
||||
|
||||
def test_default_values(self):
|
||||
class Person(Document):
|
||||
@ -3094,6 +3141,64 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEquals(p.id, None)
|
||||
p.id = "12345" # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here
|
||||
|
||||
def test_from_son_created_False_without_id(self):
|
||||
class MyPerson(Document):
|
||||
name = StringField()
|
||||
|
||||
MyPerson.objects.delete()
|
||||
|
||||
p = MyPerson.from_json('{"name": "a_fancy_name"}', created=False)
|
||||
self.assertFalse(p._created)
|
||||
self.assertIsNone(p.id)
|
||||
p.save()
|
||||
self.assertIsNotNone(p.id)
|
||||
saved_p = MyPerson.objects.get(id=p.id)
|
||||
self.assertEqual(saved_p.name, 'a_fancy_name')
|
||||
|
||||
def test_from_son_created_False_with_id(self):
|
||||
# 1854
|
||||
class MyPerson(Document):
|
||||
name = StringField()
|
||||
|
||||
MyPerson.objects.delete()
|
||||
|
||||
p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=False)
|
||||
self.assertFalse(p._created)
|
||||
self.assertEqual(p._changed_fields, [])
|
||||
self.assertEqual(p.name, 'a_fancy_name')
|
||||
self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e'))
|
||||
p.save()
|
||||
|
||||
with self.assertRaises(DoesNotExist):
|
||||
# Since created=False and we gave an id in the json and _changed_fields is empty
|
||||
# mongoengine assumes that the document exits with that structure already
|
||||
# and calling .save() didn't save anything
|
||||
MyPerson.objects.get(id=p.id)
|
||||
|
||||
self.assertFalse(p._created)
|
||||
p.name = 'a new fancy name'
|
||||
self.assertEqual(p._changed_fields, ['name'])
|
||||
p.save()
|
||||
saved_p = MyPerson.objects.get(id=p.id)
|
||||
self.assertEqual(saved_p.name, p.name)
|
||||
|
||||
def test_from_son_created_True_with_an_id(self):
|
||||
class MyPerson(Document):
|
||||
name = StringField()
|
||||
|
||||
MyPerson.objects.delete()
|
||||
|
||||
p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=True)
|
||||
self.assertTrue(p._created)
|
||||
self.assertEqual(p._changed_fields, [])
|
||||
self.assertEqual(p.name, 'a_fancy_name')
|
||||
self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e'))
|
||||
p.save()
|
||||
|
||||
saved_p = MyPerson.objects.get(id=p.id)
|
||||
self.assertEqual(saved_p, p)
|
||||
self.assertEqual(p.name, 'a_fancy_name')
|
||||
|
||||
def test_null_field(self):
|
||||
# 734
|
||||
class User(Document):
|
||||
@ -3183,6 +3288,34 @@ class InstanceTest(unittest.TestCase):
|
||||
blog.reload()
|
||||
self.assertEqual(blog.tags, ['mongodb', 'code', 'python'])
|
||||
|
||||
def test_push_nested_list(self):
|
||||
"""Ensure that push update works in nested list"""
|
||||
class BlogPost(Document):
|
||||
slug = StringField()
|
||||
tags = ListField()
|
||||
|
||||
blog = BlogPost(slug="test").save()
|
||||
blog.update(push__tags=["value1", 123])
|
||||
blog.reload()
|
||||
self.assertEqual(blog.tags, [["value1", 123]])
|
||||
|
||||
def test_accessing_objects_with_indexes_error(self):
|
||||
insert_result = self.db.company.insert_many([{'name': 'Foo'},
|
||||
{'name': 'Foo'}]) # Force 2 doc with same name
|
||||
REF_OID = insert_result.inserted_ids[0]
|
||||
self.db.user.insert_one({'company': REF_OID}) # Force 2 doc with same name
|
||||
|
||||
class Company(Document):
|
||||
name = StringField(unique=True)
|
||||
|
||||
class User(Document):
|
||||
company = ReferenceField(Company)
|
||||
|
||||
|
||||
# Ensure index creation exception aren't swallowed (#1688)
|
||||
with self.assertRaises(DuplicateKeyError):
|
||||
User.objects().select_related()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -20,16 +20,16 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
# 1st level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertEqual(error.to_dict()['1st'], 'bad 1st')
|
||||
|
||||
# 2nd level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd'),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertIsInstance(error.to_dict()['1st'], dict)
|
||||
self.assertIn('2nd', error.to_dict()['1st'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||
|
||||
# moar levels
|
||||
@ -40,10 +40,10 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
}),
|
||||
}),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
||||
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertIn('2nd', error.to_dict()['1st'])
|
||||
self.assertIn('3rd', error.to_dict()['1st']['2nd'])
|
||||
self.assertIn('4th', error.to_dict()['1st']['2nd']['3rd'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||
'Inception')
|
||||
|
||||
@ -58,7 +58,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
User().validate()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("User:None" in e.message)
|
||||
self.assertIn("User:None", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'username': 'Field is required',
|
||||
'name': 'Field is required'})
|
||||
@ -68,7 +68,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
user.save()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("User:RossC0" in e.message)
|
||||
self.assertIn("User:RossC0", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'name': 'Field is required'})
|
||||
|
||||
@ -116,7 +116,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
Doc(id="bad").validate()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("SubDoc:None" in e.message)
|
||||
self.assertIn("SubDoc:None", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
|
||||
@ -127,14 +127,14 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
doc = Doc.objects.first()
|
||||
keys = doc._data.keys()
|
||||
self.assertEqual(2, len(keys))
|
||||
self.assertTrue('e' in keys)
|
||||
self.assertTrue('id' in keys)
|
||||
self.assertIn('e', keys)
|
||||
self.assertIn('id', keys)
|
||||
|
||||
doc.e.val = "OK"
|
||||
try:
|
||||
doc.save()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("Doc:test" in e.message)
|
||||
self.assertIn("Doc:test", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -53,8 +53,8 @@ class FileTest(MongoDBTestCase):
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello>")
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello (%s)>" % result.the_file.grid_id)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.delete() # Remove file from GridFS
|
||||
@ -71,7 +71,7 @@ class FileTest(MongoDBTestCase):
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.delete()
|
||||
@ -96,7 +96,7 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertTrue(streamfile == result)
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.seek(0)
|
||||
@ -132,7 +132,7 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertTrue(streamfile == result)
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
# self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.seek(0)
|
||||
@ -161,7 +161,7 @@ class FileTest(MongoDBTestCase):
|
||||
setfile.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertTrue(setfile == result)
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
|
||||
# Try replacing file with new one
|
||||
@ -169,7 +169,7 @@ class FileTest(MongoDBTestCase):
|
||||
result.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertTrue(setfile == result)
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), more_text)
|
||||
result.the_file.delete()
|
||||
|
||||
@ -231,8 +231,8 @@ class FileTest(MongoDBTestCase):
|
||||
test_file_dupe = TestFile()
|
||||
data = test_file_dupe.the_file.read() # Should be None
|
||||
|
||||
self.assertTrue(test_file.name != test_file_dupe.name)
|
||||
self.assertTrue(test_file.the_file.read() != data)
|
||||
self.assertNotEqual(test_file.name, test_file_dupe.name)
|
||||
self.assertNotEqual(test_file.the_file.read(), data)
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
@ -291,7 +291,7 @@ class FileTest(MongoDBTestCase):
|
||||
the_file = FileField()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertFalse(test_file.the_file in [{"test": 1}])
|
||||
self.assertNotIn(test_file.the_file, [{"test": 1}])
|
||||
|
||||
def test_file_disk_space(self):
|
||||
""" Test disk space usage when we delete/replace a file """
|
||||
|
@ -298,9 +298,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
polygon = PolygonField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies)
|
||||
self.assertIn({'fields': [('line', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('polygon', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('point', '2dsphere')]}, geo_indicies)
|
||||
|
||||
def test_indexes_2dsphere_embedded(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
@ -316,9 +316,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies)
|
||||
self.assertIn({'fields': [('venue.line', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('venue.polygon', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('venue.point', '2dsphere')]}, geo_indicies)
|
||||
|
||||
def test_geo_indexes_recursion(self):
|
||||
|
||||
@ -335,9 +335,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
|
||||
Parent(name='Berlin').save()
|
||||
info = Parent._get_collection().index_information()
|
||||
self.assertFalse('location_2d' in info)
|
||||
self.assertNotIn('location_2d', info)
|
||||
info = Location._get_collection().index_information()
|
||||
self.assertTrue('location_2d' in info)
|
||||
self.assertIn('location_2d', info)
|
||||
|
||||
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||
self.assertEqual(len(Location._geo_indices()), 1)
|
||||
|
@ -181,7 +181,7 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
employee.save()
|
||||
|
||||
obj = self.Person.objects(id=employee.id).only('age').get()
|
||||
self.assertTrue(isinstance(obj, Employee))
|
||||
self.assertIsInstance(obj, Employee)
|
||||
|
||||
# Check field names are looked up properly
|
||||
obj = Employee.objects(id=employee.id).only('salary').get()
|
||||
|
@ -95,9 +95,9 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = list(events)
|
||||
self.assertTrue(event2 not in events)
|
||||
self.assertTrue(event1 in events)
|
||||
self.assertTrue(event3 in events)
|
||||
self.assertNotIn(event2, events)
|
||||
self.assertIn(event1, events)
|
||||
self.assertIn(event3, events)
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point_and_distance = [[-122.415579, 37.7566023], 10]
|
||||
@ -285,9 +285,9 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
location__geo_within_center=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = list(events)
|
||||
self.assertTrue(event2 not in events)
|
||||
self.assertTrue(event1 in events)
|
||||
self.assertTrue(event3 in events)
|
||||
self.assertNotIn(event2, events)
|
||||
self.assertIn(event1, events)
|
||||
self.assertIn(event3, events)
|
||||
|
||||
def _test_embedded(self, point_field_class):
|
||||
"""Helper test method ensuring given point field class works
|
||||
@ -510,6 +510,24 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
def test_aspymongo_with_only(self):
|
||||
"""Ensure as_pymongo works with only"""
|
||||
class Place(Document):
|
||||
location = PointField()
|
||||
|
||||
Place.drop_collection()
|
||||
p = Place(location=[24.946861267089844, 60.16311983618494])
|
||||
p.save()
|
||||
qs = Place.objects().only('location')
|
||||
self.assertDictEqual(
|
||||
qs.as_pymongo()[0]['location'],
|
||||
{u'type': u'Point',
|
||||
u'coordinates': [
|
||||
24.946861267089844,
|
||||
60.16311983618494]
|
||||
}
|
||||
)
|
||||
|
||||
def test_2dsphere_point_sets_correctly(self):
|
||||
class Location(Document):
|
||||
loc = PointField()
|
||||
|
@ -3,12 +3,14 @@
|
||||
import datetime
|
||||
import unittest
|
||||
import uuid
|
||||
from decimal import Decimal
|
||||
|
||||
from bson import DBRef, ObjectId
|
||||
from nose.plugins.skip import SkipTest
|
||||
import pymongo
|
||||
from pymongo.errors import ConfigurationError
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
from pymongo.results import UpdateResult
|
||||
import six
|
||||
|
||||
from mongoengine import *
|
||||
@ -57,11 +59,10 @@ class QuerySetTest(unittest.TestCase):
|
||||
def test_initialisation(self):
|
||||
"""Ensure that a QuerySet is correctly initialised by QuerySetManager.
|
||||
"""
|
||||
self.assertTrue(isinstance(self.Person.objects, QuerySet))
|
||||
self.assertIsInstance(self.Person.objects, QuerySet)
|
||||
self.assertEqual(self.Person.objects._collection.name,
|
||||
self.Person._get_collection_name())
|
||||
self.assertTrue(isinstance(self.Person.objects._collection,
|
||||
pymongo.collection.Collection))
|
||||
self.assertIsInstance(self.Person.objects._collection, pymongo.collection.Collection)
|
||||
|
||||
def test_cannot_perform_joins_references(self):
|
||||
|
||||
@ -87,8 +88,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(people.count(), 2)
|
||||
results = list(people)
|
||||
|
||||
self.assertTrue(isinstance(results[0], self.Person))
|
||||
self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode)))
|
||||
self.assertIsInstance(results[0], self.Person)
|
||||
self.assertIsInstance(results[0].id, (ObjectId, str, unicode))
|
||||
|
||||
self.assertEqual(results[0], user_a)
|
||||
self.assertEqual(results[0].name, 'User A')
|
||||
@ -228,7 +229,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
# Retrieve the first person from the database
|
||||
person = self.Person.objects.first()
|
||||
self.assertTrue(isinstance(person, self.Person))
|
||||
self.assertIsInstance(person, self.Person)
|
||||
self.assertEqual(person.name, "User A")
|
||||
self.assertEqual(person.age, 20)
|
||||
|
||||
@ -594,6 +595,20 @@ class QuerySetTest(unittest.TestCase):
|
||||
Scores.objects(id=scores.id).update(max__high_score=500)
|
||||
self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000)
|
||||
|
||||
@needs_mongodb_v26
|
||||
def test_update_multiple(self):
|
||||
class Product(Document):
|
||||
item = StringField()
|
||||
price = FloatField()
|
||||
|
||||
product = Product.objects.create(item='ABC', price=10.99)
|
||||
product = Product.objects.create(item='ABC', price=10.99)
|
||||
Product.objects(id=product.id).update(mul__price=1.25)
|
||||
self.assertEqual(Product.objects.get(id=product.id).price, 13.7375)
|
||||
unknown_product = Product.objects.create(item='Unknown')
|
||||
Product.objects(id=unknown_product.id).update(mul__price=100)
|
||||
self.assertEqual(Product.objects.get(id=unknown_product.id).price, 0)
|
||||
|
||||
def test_updates_can_have_match_operators(self):
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
@ -661,14 +676,14 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
result = self.Person(name="Bob", age=25).update(
|
||||
upsert=True, full_result=True)
|
||||
self.assertTrue(isinstance(result, dict))
|
||||
self.assertTrue("upserted" in result)
|
||||
self.assertFalse(result["updatedExisting"])
|
||||
self.assertIsInstance(result, UpdateResult)
|
||||
self.assertIn("upserted", result.raw_result)
|
||||
self.assertFalse(result.raw_result["updatedExisting"])
|
||||
|
||||
bob = self.Person.objects.first()
|
||||
result = bob.update(set__age=30, full_result=True)
|
||||
self.assertTrue(isinstance(result, dict))
|
||||
self.assertTrue(result["updatedExisting"])
|
||||
self.assertIsInstance(result, UpdateResult)
|
||||
self.assertTrue(result.raw_result["updatedExisting"])
|
||||
|
||||
self.Person(name="Bob", age=20).save()
|
||||
result = self.Person.objects(name="Bob").update(
|
||||
@ -835,11 +850,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
blogs.append(Blog(title="post %s" % i, posts=[post1, post2]))
|
||||
|
||||
Blog.objects.insert(blogs, load_bulk=False)
|
||||
if mongodb_version < (2, 6):
|
||||
self.assertEqual(q, 1)
|
||||
else:
|
||||
# profiling logs each doc now in the bulk op
|
||||
self.assertEqual(q, 99)
|
||||
# profiling logs each doc now in the bulk op
|
||||
self.assertEqual(q, 99)
|
||||
|
||||
Blog.drop_collection()
|
||||
Blog.ensure_indexes()
|
||||
@ -848,11 +860,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
Blog.objects.insert(blogs)
|
||||
if mongodb_version < (2, 6):
|
||||
self.assertEqual(q, 2) # 1 for insert, and 1 for in bulk fetch
|
||||
else:
|
||||
# 99 for insert, and 1 for in bulk fetch
|
||||
self.assertEqual(q, 100)
|
||||
self.assertEqual(q, 100) # 99 for insert 1 for fetch
|
||||
|
||||
Blog.drop_collection()
|
||||
|
||||
@ -917,10 +925,6 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(Blog.objects.count(), 2)
|
||||
|
||||
Blog.objects.insert([blog2, blog3],
|
||||
write_concern={"w": 0, 'continue_on_error': True})
|
||||
self.assertEqual(Blog.objects.count(), 3)
|
||||
|
||||
def test_get_changed_fields_query_count(self):
|
||||
"""Make sure we don't perform unnecessary db operations when
|
||||
none of document's fields were updated.
|
||||
@ -994,7 +998,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
# Retrieve the first person from the database
|
||||
person = self.Person.objects.slave_okay(True).first()
|
||||
self.assertTrue(isinstance(person, self.Person))
|
||||
self.assertIsInstance(person, self.Person)
|
||||
self.assertEqual(person.name, "User A")
|
||||
self.assertEqual(person.age, 20)
|
||||
|
||||
@ -1061,10 +1065,10 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(docs.count(), 1000)
|
||||
|
||||
docs_string = "%s" % docs
|
||||
self.assertTrue("Doc: 0" in docs_string)
|
||||
self.assertIn("Doc: 0", docs_string)
|
||||
|
||||
self.assertEqual(docs.count(), 1000)
|
||||
self.assertTrue('(remaining elements truncated)' in "%s" % docs)
|
||||
self.assertIn('(remaining elements truncated)', "%s" % docs)
|
||||
|
||||
# Limit and skip
|
||||
docs = docs[1:4]
|
||||
@ -1203,6 +1207,14 @@ class QuerySetTest(unittest.TestCase):
|
||||
BlogPost.drop_collection()
|
||||
Blog.drop_collection()
|
||||
|
||||
def test_filter_chaining_with_regex(self):
|
||||
person = self.Person(name='Guido van Rossum')
|
||||
person.save()
|
||||
|
||||
people = self.Person.objects
|
||||
people = people.filter(name__startswith='Gui').filter(name__not__endswith='tum')
|
||||
self.assertEqual(people.count(), 1)
|
||||
|
||||
def assertSequence(self, qs, expected):
|
||||
qs = list(qs)
|
||||
expected = list(expected)
|
||||
@ -1273,7 +1285,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
with db_ops_tracker() as q:
|
||||
BlogPost.objects.filter(title='whatever').order_by().first()
|
||||
self.assertEqual(len(q.get_ops()), 1)
|
||||
self.assertFalse('$orderby' in q.get_ops()[0]['query'])
|
||||
self.assertNotIn('$orderby', q.get_ops()[0]['query'])
|
||||
|
||||
# calling an explicit order_by should use a specified sort
|
||||
with db_ops_tracker() as q:
|
||||
@ -1289,7 +1301,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
qs = BlogPost.objects.filter(title='whatever').order_by('published_date')
|
||||
qs.order_by().first()
|
||||
self.assertEqual(len(q.get_ops()), 1)
|
||||
self.assertFalse('$orderby' in q.get_ops()[0]['query'])
|
||||
self.assertNotIn('$orderby', q.get_ops()[0]['query'])
|
||||
|
||||
def test_no_ordering_for_get(self):
|
||||
""" Ensure that Doc.objects.get doesn't use any ordering.
|
||||
@ -1308,13 +1320,13 @@ class QuerySetTest(unittest.TestCase):
|
||||
with db_ops_tracker() as q:
|
||||
BlogPost.objects.get(title='whatever')
|
||||
self.assertEqual(len(q.get_ops()), 1)
|
||||
self.assertFalse('$orderby' in q.get_ops()[0]['query'])
|
||||
self.assertNotIn('$orderby', q.get_ops()[0]['query'])
|
||||
|
||||
# Ordering should be ignored for .get even if we set it explicitly
|
||||
with db_ops_tracker() as q:
|
||||
BlogPost.objects.order_by('-title').get(title='whatever')
|
||||
self.assertEqual(len(q.get_ops()), 1)
|
||||
self.assertFalse('$orderby' in q.get_ops()[0]['query'])
|
||||
self.assertNotIn('$orderby', q.get_ops()[0]['query'])
|
||||
|
||||
def test_find_embedded(self):
|
||||
"""Ensure that an embedded document is properly returned from
|
||||
@ -1336,15 +1348,15 @@ class QuerySetTest(unittest.TestCase):
|
||||
)
|
||||
|
||||
result = BlogPost.objects.first()
|
||||
self.assertTrue(isinstance(result.author, User))
|
||||
self.assertIsInstance(result.author, User)
|
||||
self.assertEqual(result.author.name, 'Test User')
|
||||
|
||||
result = BlogPost.objects.get(author__name=user.name)
|
||||
self.assertTrue(isinstance(result.author, User))
|
||||
self.assertIsInstance(result.author, User)
|
||||
self.assertEqual(result.author.name, 'Test User')
|
||||
|
||||
result = BlogPost.objects.get(author={'name': user.name})
|
||||
self.assertTrue(isinstance(result.author, User))
|
||||
self.assertIsInstance(result.author, User)
|
||||
self.assertEqual(result.author.name, 'Test User')
|
||||
|
||||
# Fails, since the string is not a type that is able to represent the
|
||||
@ -1462,7 +1474,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
code_chunks = ['doc["cmnts"];', 'doc["doc-name"],',
|
||||
'doc["cmnts"][i]["body"]']
|
||||
for chunk in code_chunks:
|
||||
self.assertTrue(chunk in sub_code)
|
||||
self.assertIn(chunk, sub_code)
|
||||
|
||||
results = BlogPost.objects.exec_js(code)
|
||||
expected_results = [
|
||||
@ -1852,21 +1864,16 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(
|
||||
1, BlogPost.objects(author__in=["%s" % me.pk]).count())
|
||||
|
||||
def test_update(self):
|
||||
"""Ensure that atomic updates work properly.
|
||||
"""
|
||||
def test_update_intfield_operator(self):
|
||||
class BlogPost(Document):
|
||||
name = StringField()
|
||||
title = StringField()
|
||||
hits = IntField()
|
||||
tags = ListField(StringField())
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(name="Test Post", hits=5, tags=['test'])
|
||||
post = BlogPost(hits=5)
|
||||
post.save()
|
||||
|
||||
BlogPost.objects.update(set__hits=10)
|
||||
BlogPost.objects.update_one(set__hits=10)
|
||||
post.reload()
|
||||
self.assertEqual(post.hits, 10)
|
||||
|
||||
@ -1883,13 +1890,63 @@ class QuerySetTest(unittest.TestCase):
|
||||
post.reload()
|
||||
self.assertEqual(post.hits, 11)
|
||||
|
||||
def test_update_decimalfield_operator(self):
|
||||
class BlogPost(Document):
|
||||
review = DecimalField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(review=3.5)
|
||||
post.save()
|
||||
|
||||
BlogPost.objects.update_one(inc__review=0.1) # test with floats
|
||||
post.reload()
|
||||
self.assertEqual(float(post.review), 3.6)
|
||||
|
||||
BlogPost.objects.update_one(dec__review=0.1)
|
||||
post.reload()
|
||||
self.assertEqual(float(post.review), 3.5)
|
||||
|
||||
BlogPost.objects.update_one(inc__review=Decimal(0.12)) # test with Decimal
|
||||
post.reload()
|
||||
self.assertEqual(float(post.review), 3.62)
|
||||
|
||||
BlogPost.objects.update_one(dec__review=Decimal(0.12))
|
||||
post.reload()
|
||||
self.assertEqual(float(post.review), 3.5)
|
||||
|
||||
def test_update_decimalfield_operator_not_working_with_force_string(self):
|
||||
class BlogPost(Document):
|
||||
review = DecimalField(force_string=True)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(review=3.5)
|
||||
post.save()
|
||||
|
||||
with self.assertRaises(OperationError):
|
||||
BlogPost.objects.update_one(inc__review=0.1) # test with floats
|
||||
|
||||
def test_update_listfield_operator(self):
|
||||
"""Ensure that atomic updates work properly.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(tags=['test'])
|
||||
post.save()
|
||||
|
||||
# ListField operator
|
||||
BlogPost.objects.update(push__tags='mongo')
|
||||
post.reload()
|
||||
self.assertTrue('mongo' in post.tags)
|
||||
self.assertIn('mongo', post.tags)
|
||||
|
||||
BlogPost.objects.update_one(push_all__tags=['db', 'nosql'])
|
||||
post.reload()
|
||||
self.assertTrue('db' in post.tags and 'nosql' in post.tags)
|
||||
self.assertIn('db', post.tags)
|
||||
self.assertIn('nosql', post.tags)
|
||||
|
||||
tags = post.tags[:-1]
|
||||
BlogPost.objects.update(pop__tags=1)
|
||||
@ -1901,13 +1958,23 @@ class QuerySetTest(unittest.TestCase):
|
||||
post.reload()
|
||||
self.assertEqual(post.tags.count('unique'), 1)
|
||||
|
||||
self.assertNotEqual(post.hits, None)
|
||||
BlogPost.objects.update_one(unset__hits=1)
|
||||
post.reload()
|
||||
self.assertEqual(post.hits, None)
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_update_unset(self):
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(title='garbage').save()
|
||||
|
||||
self.assertNotEqual(post.title, None)
|
||||
BlogPost.objects.update_one(unset__title=1)
|
||||
post.reload()
|
||||
self.assertEqual(post.title, None)
|
||||
pymongo_doc = BlogPost.objects.as_pymongo().first()
|
||||
self.assertNotIn('title', pymongo_doc)
|
||||
|
||||
@needs_mongodb_v26
|
||||
def test_update_push_with_position(self):
|
||||
"""Ensure that the 'push' update with position works properly.
|
||||
@ -1934,6 +2001,21 @@ class QuerySetTest(unittest.TestCase):
|
||||
post.reload()
|
||||
self.assertEqual(post.tags, ['scala', 'mongodb', 'python', 'java'])
|
||||
|
||||
def test_update_push_list_of_list(self):
|
||||
"""Ensure that the 'push' update operation works in the list of list
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
slug = StringField()
|
||||
tags = ListField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(slug="test").save()
|
||||
|
||||
BlogPost.objects.filter(slug="test").update(push__tags=["value1", 123])
|
||||
post.reload()
|
||||
self.assertEqual(post.tags, [["value1", 123]])
|
||||
|
||||
def test_update_push_and_pull_add_to_set(self):
|
||||
"""Ensure that the 'pull' update operation works correctly.
|
||||
"""
|
||||
@ -2076,6 +2158,23 @@ class QuerySetTest(unittest.TestCase):
|
||||
Site.objects(id=s.id).update_one(
|
||||
pull_all__collaborators__helpful__user=['Ross'])
|
||||
|
||||
def test_pull_in_genericembedded_field(self):
|
||||
|
||||
class Foo(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Bar(Document):
|
||||
foos = ListField(GenericEmbeddedDocumentField(
|
||||
choices=[Foo, ]))
|
||||
|
||||
Bar.drop_collection()
|
||||
|
||||
foo = Foo(name="bar")
|
||||
bar = Bar(foos=[foo]).save()
|
||||
Bar.objects(id=bar.id).update(pull__foos=foo)
|
||||
bar.reload()
|
||||
self.assertEqual(len(bar.foos), 0)
|
||||
|
||||
def test_update_one_pop_generic_reference(self):
|
||||
|
||||
class BlogTag(Document):
|
||||
@ -2169,6 +2268,24 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(message.authors[1].name, "Ross")
|
||||
self.assertEqual(message.authors[2].name, "Adam")
|
||||
|
||||
def test_set_generic_embedded_documents(self):
|
||||
|
||||
class Bar(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
username = StringField()
|
||||
bar = GenericEmbeddedDocumentField(choices=[Bar,])
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User(username='abc').save()
|
||||
User.objects(username='abc').update(
|
||||
set__bar=Bar(name='test'), upsert=True)
|
||||
|
||||
user = User.objects(username='abc').first()
|
||||
self.assertEqual(user.bar.name, "test")
|
||||
|
||||
def test_reload_embedded_docs_instance(self):
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
@ -2338,14 +2455,19 @@ class QuerySetTest(unittest.TestCase):
|
||||
age = IntField()
|
||||
|
||||
with db_ops_tracker() as q:
|
||||
adult = (User.objects.filter(age__gte=18)
|
||||
adult1 = (User.objects.filter(age__gte=18)
|
||||
.comment('looking for an adult')
|
||||
.first())
|
||||
|
||||
adult2 = (User.objects.comment('looking for an adult')
|
||||
.filter(age__gte=18)
|
||||
.first())
|
||||
|
||||
ops = q.get_ops()
|
||||
self.assertEqual(len(ops), 1)
|
||||
op = ops[0]
|
||||
self.assertEqual(op['query']['$query'], {'age': {'$gte': 18}})
|
||||
self.assertEqual(op['query']['$comment'], 'looking for an adult')
|
||||
self.assertEqual(len(ops), 2)
|
||||
for op in ops:
|
||||
self.assertEqual(op['query']['$query'], {'age': {'$gte': 18}})
|
||||
self.assertEqual(op['query']['$comment'], 'looking for an adult')
|
||||
|
||||
def test_map_reduce(self):
|
||||
"""Ensure map/reduce is both mapping and reducing.
|
||||
@ -3157,8 +3279,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
News.drop_collection()
|
||||
info = News.objects._collection.index_information()
|
||||
self.assertTrue('title_text_content_text' in info)
|
||||
self.assertTrue('textIndexVersion' in info['title_text_content_text'])
|
||||
self.assertIn('title_text_content_text', info)
|
||||
self.assertIn('textIndexVersion', info['title_text_content_text'])
|
||||
|
||||
News(title="Neymar quebrou a vertebra",
|
||||
content="O Brasil sofre com a perda de Neymar").save()
|
||||
@ -3192,15 +3314,15 @@ class QuerySetTest(unittest.TestCase):
|
||||
'$search': 'dilma', '$language': 'pt'},
|
||||
'is_active': False})
|
||||
|
||||
self.assertEqual(new.is_active, False)
|
||||
self.assertTrue('dilma' in new.content)
|
||||
self.assertTrue('planejamento' in new.title)
|
||||
self.assertFalse(new.is_active)
|
||||
self.assertIn('dilma', new.content)
|
||||
self.assertIn('planejamento', new.title)
|
||||
|
||||
query = News.objects.search_text("candidata")
|
||||
self.assertEqual(query._search_text, "candidata")
|
||||
new = query.first()
|
||||
|
||||
self.assertTrue(isinstance(new.get_text_score(), float))
|
||||
self.assertIsInstance(new.get_text_score(), float)
|
||||
|
||||
# count
|
||||
query = News.objects.search_text('brasil').order_by('$text_score')
|
||||
@ -3495,39 +3617,12 @@ class QuerySetTest(unittest.TestCase):
|
||||
Group.objects(id=group.id).update(set__members=[user1, user2])
|
||||
group.reload()
|
||||
|
||||
self.assertTrue(len(group.members) == 2)
|
||||
self.assertEqual(len(group.members), 2)
|
||||
self.assertEqual(group.members[0].name, user1.name)
|
||||
self.assertEqual(group.members[1].name, user2.name)
|
||||
|
||||
Group.drop_collection()
|
||||
|
||||
def test_dict_with_custom_baseclass(self):
|
||||
"""Ensure DictField working with custom base clases.
|
||||
"""
|
||||
class Test(Document):
|
||||
testdict = DictField()
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
t = Test(testdict={'f': 'Value'})
|
||||
t.save()
|
||||
|
||||
self.assertEqual(
|
||||
Test.objects(testdict__f__startswith='Val').count(), 1)
|
||||
self.assertEqual(Test.objects(testdict__f='Value').count(), 1)
|
||||
Test.drop_collection()
|
||||
|
||||
class Test(Document):
|
||||
testdict = DictField(basecls=StringField)
|
||||
|
||||
t = Test(testdict={'f': 'Value'})
|
||||
t.save()
|
||||
|
||||
self.assertEqual(Test.objects(testdict__f='Value').count(), 1)
|
||||
self.assertEqual(
|
||||
Test.objects(testdict__f__startswith='Val').count(), 1)
|
||||
Test.drop_collection()
|
||||
|
||||
def test_bulk(self):
|
||||
"""Ensure bulk querying by object id returns a proper dict.
|
||||
"""
|
||||
@ -3553,13 +3648,13 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(len(objects), 3)
|
||||
|
||||
self.assertTrue(post_1.id in objects)
|
||||
self.assertTrue(post_2.id in objects)
|
||||
self.assertTrue(post_5.id in objects)
|
||||
self.assertIn(post_1.id, objects)
|
||||
self.assertIn(post_2.id, objects)
|
||||
self.assertIn(post_5.id, objects)
|
||||
|
||||
self.assertTrue(objects[post_1.id].title == post_1.title)
|
||||
self.assertTrue(objects[post_2.id].title == post_2.title)
|
||||
self.assertTrue(objects[post_5.id].title == post_5.title)
|
||||
self.assertEqual(objects[post_1.id].title, post_1.title)
|
||||
self.assertEqual(objects[post_2.id].title, post_2.title)
|
||||
self.assertEqual(objects[post_5.id].title, post_5.title)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@ -3579,7 +3674,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
Post.drop_collection()
|
||||
|
||||
self.assertTrue(isinstance(Post.objects, CustomQuerySet))
|
||||
self.assertIsInstance(Post.objects, CustomQuerySet)
|
||||
self.assertFalse(Post.objects.not_empty())
|
||||
|
||||
Post().save()
|
||||
@ -3604,7 +3699,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
Post.drop_collection()
|
||||
|
||||
self.assertTrue(isinstance(Post.objects, CustomQuerySet))
|
||||
self.assertIsInstance(Post.objects, CustomQuerySet)
|
||||
self.assertFalse(Post.objects.not_empty())
|
||||
|
||||
Post().save()
|
||||
@ -3651,7 +3746,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
pass
|
||||
|
||||
Post.drop_collection()
|
||||
self.assertTrue(isinstance(Post.objects, CustomQuerySet))
|
||||
self.assertIsInstance(Post.objects, CustomQuerySet)
|
||||
self.assertFalse(Post.objects.not_empty())
|
||||
|
||||
Post().save()
|
||||
@ -3679,7 +3774,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
pass
|
||||
|
||||
Post.drop_collection()
|
||||
self.assertTrue(isinstance(Post.objects, CustomQuerySet))
|
||||
self.assertIsInstance(Post.objects, CustomQuerySet)
|
||||
self.assertFalse(Post.objects.not_empty())
|
||||
|
||||
Post().save()
|
||||
@ -3770,17 +3865,17 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
test = Number.objects
|
||||
test2 = test.clone()
|
||||
self.assertFalse(test == test2)
|
||||
self.assertNotEqual(test, test2)
|
||||
self.assertEqual(test.count(), test2.count())
|
||||
|
||||
test = test.filter(n__gt=11)
|
||||
test2 = test.clone()
|
||||
self.assertFalse(test == test2)
|
||||
self.assertNotEqual(test, test2)
|
||||
self.assertEqual(test.count(), test2.count())
|
||||
|
||||
test = test.limit(10)
|
||||
test2 = test.clone()
|
||||
self.assertFalse(test == test2)
|
||||
self.assertNotEqual(test, test2)
|
||||
self.assertEqual(test.count(), test2.count())
|
||||
|
||||
Number.drop_collection()
|
||||
@ -3870,7 +3965,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
value.get('unique', False),
|
||||
value.get('sparse', False))
|
||||
for key, value in info.iteritems()]
|
||||
self.assertTrue(([('_cls', 1), ('message', 1)], False, False) in info)
|
||||
self.assertIn(([('_cls', 1), ('message', 1)], False, False), info)
|
||||
|
||||
def test_where(self):
|
||||
"""Ensure that where clauses work.
|
||||
@ -3894,13 +3989,13 @@ class QuerySetTest(unittest.TestCase):
|
||||
'this["fielda"] >= this["fieldb"]', query._where_clause)
|
||||
results = list(query)
|
||||
self.assertEqual(2, len(results))
|
||||
self.assertTrue(a in results)
|
||||
self.assertTrue(c in results)
|
||||
self.assertIn(a, results)
|
||||
self.assertIn(c, results)
|
||||
|
||||
query = IntPair.objects.where('this[~fielda] == this[~fieldb]')
|
||||
results = list(query)
|
||||
self.assertEqual(1, len(results))
|
||||
self.assertTrue(a in results)
|
||||
self.assertIn(a, results)
|
||||
|
||||
query = IntPair.objects.where(
|
||||
'function() { return this[~fielda] >= this[~fieldb] }')
|
||||
@ -3908,8 +4003,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
'function() { return this["fielda"] >= this["fieldb"] }', query._where_clause)
|
||||
results = list(query)
|
||||
self.assertEqual(2, len(results))
|
||||
self.assertTrue(a in results)
|
||||
self.assertTrue(c in results)
|
||||
self.assertIn(a, results)
|
||||
self.assertIn(c, results)
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
list(IntPair.objects.where(fielda__gte=3))
|
||||
@ -4291,7 +4386,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
Test.drop_collection()
|
||||
Test.objects(test='foo').update_one(upsert=True, set__test='foo')
|
||||
self.assertFalse('_cls' in Test._collection.find_one())
|
||||
self.assertNotIn('_cls', Test._collection.find_one())
|
||||
|
||||
class Test(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
@ -4300,7 +4395,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
Test.drop_collection()
|
||||
|
||||
Test.objects(test='foo').update_one(upsert=True, set__test='foo')
|
||||
self.assertTrue('_cls' in Test._collection.find_one())
|
||||
self.assertIn('_cls', Test._collection.find_one())
|
||||
|
||||
def test_update_upsert_looks_like_a_digit(self):
|
||||
class MyDoc(DynamicDocument):
|
||||
@ -4384,6 +4479,25 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(bars._cursor._Cursor__read_preference,
|
||||
ReadPreference.SECONDARY_PREFERRED)
|
||||
|
||||
@needs_mongodb_v26
|
||||
def test_read_preference_aggregation_framework(self):
|
||||
class Bar(Document):
|
||||
txt = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': ['txt']
|
||||
}
|
||||
# Aggregates with read_preference
|
||||
bars = Bar.objects \
|
||||
.read_preference(ReadPreference.SECONDARY_PREFERRED) \
|
||||
.aggregate()
|
||||
if IS_PYMONGO_3:
|
||||
self.assertEqual(bars._CommandCursor__collection.read_preference,
|
||||
ReadPreference.SECONDARY_PREFERRED)
|
||||
else:
|
||||
self.assertNotEqual(bars._CommandCursor__collection.read_preference,
|
||||
ReadPreference.SECONDARY_PREFERRED)
|
||||
|
||||
def test_json_simple(self):
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
@ -4493,8 +4607,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
users = User.objects.only('name', 'price').as_pymongo()
|
||||
results = list(users)
|
||||
self.assertTrue(isinstance(results[0], dict))
|
||||
self.assertTrue(isinstance(results[1], dict))
|
||||
self.assertIsInstance(results[0], dict)
|
||||
self.assertIsInstance(results[1], dict)
|
||||
self.assertEqual(results[0]['name'], 'Bob Dole')
|
||||
self.assertEqual(results[0]['price'], 1.11)
|
||||
self.assertEqual(results[1]['name'], 'Barack Obama')
|
||||
@ -4502,8 +4616,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
users = User.objects.only('name', 'last_login').as_pymongo()
|
||||
results = list(users)
|
||||
self.assertTrue(isinstance(results[0], dict))
|
||||
self.assertTrue(isinstance(results[1], dict))
|
||||
self.assertIsInstance(results[0], dict)
|
||||
self.assertIsInstance(results[1], dict)
|
||||
self.assertEqual(results[0], {
|
||||
'name': 'Bob Dole'
|
||||
})
|
||||
@ -4560,12 +4674,10 @@ class QuerySetTest(unittest.TestCase):
|
||||
User(name="Bob Dole", organization=whitehouse).save()
|
||||
|
||||
qs = User.objects()
|
||||
self.assertTrue(isinstance(qs.first().organization, Organization))
|
||||
self.assertFalse(isinstance(qs.no_dereference().first().organization,
|
||||
Organization))
|
||||
self.assertFalse(isinstance(qs.no_dereference().get().organization,
|
||||
Organization))
|
||||
self.assertTrue(isinstance(qs.first().organization, Organization))
|
||||
self.assertIsInstance(qs.first().organization, Organization)
|
||||
self.assertNotIsInstance(qs.no_dereference().first().organization, Organization)
|
||||
self.assertNotIsInstance(qs.no_dereference().get().organization, Organization)
|
||||
self.assertIsInstance(qs.first().organization, Organization)
|
||||
|
||||
def test_no_dereference_embedded_doc(self):
|
||||
|
||||
@ -4598,9 +4710,9 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
result = Organization.objects().no_dereference().first()
|
||||
|
||||
self.assertTrue(isinstance(result.admin[0], (DBRef, ObjectId)))
|
||||
self.assertTrue(isinstance(result.member.user, (DBRef, ObjectId)))
|
||||
self.assertTrue(isinstance(result.members[0].user, (DBRef, ObjectId)))
|
||||
self.assertIsInstance(result.admin[0], (DBRef, ObjectId))
|
||||
self.assertIsInstance(result.member.user, (DBRef, ObjectId))
|
||||
self.assertIsInstance(result.members[0].user, (DBRef, ObjectId))
|
||||
|
||||
def test_cached_queryset(self):
|
||||
class Person(Document):
|
||||
@ -4641,18 +4753,27 @@ class QuerySetTest(unittest.TestCase):
|
||||
for i in range(100):
|
||||
Person(name="No: %s" % i).save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
people = Person.objects.no_cache()
|
||||
with query_counter() as q:
|
||||
try:
|
||||
self.assertEqual(q, 0)
|
||||
people = Person.objects.no_cache()
|
||||
|
||||
[x for x in people]
|
||||
self.assertEqual(q, 1)
|
||||
[x for x in people]
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
list(people)
|
||||
self.assertEqual(q, 2)
|
||||
list(people)
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
people.count()
|
||||
self.assertEqual(q, 3)
|
||||
except AssertionError as exc:
|
||||
db = get_db()
|
||||
msg = ''
|
||||
for q in list(db.system.profile.find())[-50:]:
|
||||
msg += str([q['ts'], q['ns'], q.get('query'), q['op']])+'\n'
|
||||
msg += str(q)
|
||||
raise AssertionError(str(exc) + '\n'+msg)
|
||||
|
||||
people.count()
|
||||
self.assertEqual(q, 3)
|
||||
|
||||
def test_cache_not_cloned(self):
|
||||
|
||||
@ -4795,6 +4916,30 @@ class QuerySetTest(unittest.TestCase):
|
||||
for obj in C.objects.no_sub_classes():
|
||||
self.assertEqual(obj.__class__, C)
|
||||
|
||||
def test_query_generic_embedded_document(self):
|
||||
"""Ensure that querying sub field on generic_embedded_field works
|
||||
"""
|
||||
class A(EmbeddedDocument):
|
||||
a_name = StringField()
|
||||
|
||||
class B(EmbeddedDocument):
|
||||
b_name = StringField()
|
||||
|
||||
class Doc(Document):
|
||||
document = GenericEmbeddedDocumentField(choices=(A, B))
|
||||
|
||||
Doc.drop_collection()
|
||||
Doc(document=A(a_name='A doc')).save()
|
||||
Doc(document=B(b_name='B doc')).save()
|
||||
|
||||
# Using raw in filter working fine
|
||||
self.assertEqual(Doc.objects(
|
||||
__raw__={'document.a_name': 'A doc'}).count(), 1)
|
||||
self.assertEqual(Doc.objects(
|
||||
__raw__={'document.b_name': 'B doc'}).count(), 1)
|
||||
self.assertEqual(Doc.objects(document__a_name='A doc').count(), 1)
|
||||
self.assertEqual(Doc.objects(document__b_name='B doc').count(), 1)
|
||||
|
||||
def test_query_reference_to_custom_pk_doc(self):
|
||||
|
||||
class A(Document):
|
||||
@ -4919,7 +5064,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
op = q.db.system.profile.find({"ns":
|
||||
{"$ne": "%s.system.indexes" % q.db.name}})[0]
|
||||
|
||||
self.assertFalse('$orderby' in op['query'],
|
||||
self.assertNotIn('$orderby', op['query'],
|
||||
'BaseQuerySet cannot use orderby in if stmt')
|
||||
|
||||
with query_counter() as p:
|
||||
@ -4930,8 +5075,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
op = p.db.system.profile.find({"ns":
|
||||
{"$ne": "%s.system.indexes" % q.db.name}})[0]
|
||||
|
||||
self.assertTrue('$orderby' in op['query'],
|
||||
'BaseQuerySet cannot remove orderby in for loop')
|
||||
self.assertIn('$orderby', op['query'], 'BaseQuerySet cannot remove orderby in for loop')
|
||||
|
||||
def test_bool_with_ordering_from_meta_dict(self):
|
||||
|
||||
@ -4955,7 +5099,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
op = q.db.system.profile.find({"ns":
|
||||
{"$ne": "%s.system.indexes" % q.db.name}})[0]
|
||||
|
||||
self.assertFalse('$orderby' in op['query'],
|
||||
self.assertNotIn('$orderby', op['query'],
|
||||
'BaseQuerySet must remove orderby from meta in boolen test')
|
||||
|
||||
self.assertEqual(Person.objects.first().name, 'A')
|
||||
|
@ -1,5 +1,7 @@
|
||||
import unittest
|
||||
|
||||
from bson.son import SON
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.queryset import Q, transform
|
||||
|
||||
@ -28,12 +30,16 @@ class TransformTest(unittest.TestCase):
|
||||
{'name': {'$exists': True}})
|
||||
|
||||
def test_transform_update(self):
|
||||
class LisDoc(Document):
|
||||
foo = ListField(StringField())
|
||||
|
||||
class DicDoc(Document):
|
||||
dictField = DictField()
|
||||
|
||||
class Doc(Document):
|
||||
pass
|
||||
|
||||
LisDoc.drop_collection()
|
||||
DicDoc.drop_collection()
|
||||
Doc.drop_collection()
|
||||
|
||||
@ -42,14 +48,28 @@ class TransformTest(unittest.TestCase):
|
||||
|
||||
for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")):
|
||||
update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc})
|
||||
self.assertTrue(isinstance(update[v]["dictField.test"], dict))
|
||||
self.assertIsInstance(update[v]["dictField.test"], dict)
|
||||
|
||||
# Update special cases
|
||||
update = transform.update(DicDoc, unset__dictField__test=doc)
|
||||
self.assertEqual(update["$unset"]["dictField.test"], 1)
|
||||
|
||||
update = transform.update(DicDoc, pull__dictField__test=doc)
|
||||
self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict))
|
||||
self.assertIsInstance(update["$pull"]["dictField"]["test"], dict)
|
||||
|
||||
update = transform.update(LisDoc, pull__foo__in=['a'])
|
||||
self.assertEqual(update, {'$pull': {'foo': {'$in': ['a']}}})
|
||||
|
||||
def test_transform_update_push(self):
|
||||
"""Ensure the differences in behvaior between 'push' and 'push_all'"""
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
update = transform.update(BlogPost, push__tags=['mongo', 'db'])
|
||||
self.assertEqual(update, {'$push': {'tags': ['mongo', 'db']}})
|
||||
|
||||
update = transform.update(BlogPost, push_all__tags=['mongo', 'db'])
|
||||
self.assertEqual(update, {'$push': {'tags': {'$each': ['mongo', 'db']}}})
|
||||
|
||||
def test_query_field_name(self):
|
||||
"""Ensure that the correct field name is used when querying.
|
||||
@ -68,17 +88,15 @@ class TransformTest(unittest.TestCase):
|
||||
post = BlogPost(**data)
|
||||
post.save()
|
||||
|
||||
self.assertTrue('postTitle' in
|
||||
BlogPost.objects(title=data['title'])._query)
|
||||
self.assertIn('postTitle', BlogPost.objects(title=data['title'])._query)
|
||||
self.assertFalse('title' in
|
||||
BlogPost.objects(title=data['title'])._query)
|
||||
self.assertEqual(BlogPost.objects(title=data['title']).count(), 1)
|
||||
|
||||
self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query)
|
||||
self.assertIn('_id', BlogPost.objects(pk=post.id)._query)
|
||||
self.assertEqual(BlogPost.objects(pk=post.id).count(), 1)
|
||||
|
||||
self.assertTrue('postComments.commentContent' in
|
||||
BlogPost.objects(comments__content='test')._query)
|
||||
self.assertIn('postComments.commentContent', BlogPost.objects(comments__content='test')._query)
|
||||
self.assertEqual(BlogPost.objects(comments__content='test').count(), 1)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
@ -96,8 +114,8 @@ class TransformTest(unittest.TestCase):
|
||||
post = BlogPost(**data)
|
||||
post.save()
|
||||
|
||||
self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query)
|
||||
self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query)
|
||||
self.assertIn('_id', BlogPost.objects(pk=data['title'])._query)
|
||||
self.assertIn('_id', BlogPost.objects(title=data['title'])._query)
|
||||
self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
@ -241,6 +259,30 @@ class TransformTest(unittest.TestCase):
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
events.count()
|
||||
|
||||
def test_update_pull_for_list_fields(self):
|
||||
"""
|
||||
Test added to check pull operation in update for
|
||||
EmbeddedDocumentListField which is inside a EmbeddedDocumentField
|
||||
"""
|
||||
class Word(EmbeddedDocument):
|
||||
word = StringField()
|
||||
index = IntField()
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
heading = ListField(StringField())
|
||||
text = EmbeddedDocumentListField(Word)
|
||||
|
||||
class MainDoc(Document):
|
||||
title = StringField()
|
||||
content = EmbeddedDocumentField(SubDoc)
|
||||
|
||||
word = Word(word='abc', index=1)
|
||||
update = transform.update(MainDoc, pull__content__text=word)
|
||||
self.assertEqual(update, {'$pull': {'content.text': SON([('word', u'abc'), ('index', 1)])}})
|
||||
|
||||
update = transform.update(MainDoc, pull__content__heading='xyz')
|
||||
self.assertEqual(update, {'$pull': {'content.heading': 'xyz'}})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -196,7 +196,7 @@ class QTest(unittest.TestCase):
|
||||
|
||||
test2 = test.clone()
|
||||
self.assertEqual(test2.count(), 3)
|
||||
self.assertFalse(test2 == test)
|
||||
self.assertNotEqual(test2, test)
|
||||
|
||||
test3 = test2.filter(x=6)
|
||||
self.assertEqual(test3.count(), 1)
|
||||
|
@ -39,15 +39,15 @@ class ConnectionTest(unittest.TestCase):
|
||||
connect('mongoenginetest')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
connect('mongoenginetest2', alias='testdb')
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
def test_connect_in_mocking(self):
|
||||
"""Ensure that the connect() method works properly in mocking.
|
||||
@ -59,31 +59,31 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
connect('mongoenginetest', host='mongomock://localhost')
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2')
|
||||
conn = get_connection('testdb2')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3')
|
||||
conn = get_connection('testdb3')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest4', is_mock=True, alias='testdb4')
|
||||
conn = get_connection('testdb4')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5')
|
||||
conn = get_connection('testdb5')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6')
|
||||
conn = get_connection('testdb6')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7')
|
||||
conn = get_connection('testdb7')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
def test_connect_with_host_list(self):
|
||||
"""Ensure that the connect() method works when host is a list
|
||||
@ -97,27 +97,27 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
connect(host=['mongomock://localhost'])
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2')
|
||||
conn = get_connection('testdb2')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['localhost'], is_mock=True, alias='testdb3')
|
||||
conn = get_connection('testdb3')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4')
|
||||
conn = get_connection('testdb4')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5')
|
||||
conn = get_connection('testdb5')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6')
|
||||
conn = get_connection('testdb6')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
def test_disconnect(self):
|
||||
"""Ensure that the disconnect() method works properly
|
||||
@ -163,10 +163,10 @@ class ConnectionTest(unittest.TestCase):
|
||||
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
c.admin.system.users.remove({})
|
||||
@ -179,10 +179,10 @@ class ConnectionTest(unittest.TestCase):
|
||||
connect("mongoenginetest", host='mongodb://localhost/')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
def test_connect_uri_default_db(self):
|
||||
@ -192,10 +192,10 @@ class ConnectionTest(unittest.TestCase):
|
||||
connect(host='mongodb://localhost/')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'test')
|
||||
|
||||
def test_uri_without_credentials_doesnt_override_conn_settings(self):
|
||||
@ -242,7 +242,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
'mongoenginetest?authSource=admin')
|
||||
)
|
||||
db = get_db('test2')
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
# Clear all users
|
||||
@ -255,10 +255,10 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(MongoEngineConnectionError, get_connection)
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db('testdb')
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest2')
|
||||
|
||||
def test_register_connection_defaults(self):
|
||||
@ -267,7 +267,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
register_connection('testdb', 'mongoenginetest', host=None, port=None)
|
||||
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
def test_connection_kwargs(self):
|
||||
"""Ensure that connection kwargs get passed to pymongo."""
|
||||
@ -326,7 +326,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
if IS_PYMONGO_3:
|
||||
c = connect(host='mongodb://localhost/test?replicaSet=local-rs')
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'test')
|
||||
else:
|
||||
# PyMongo < v3.x raises an exception:
|
||||
@ -343,7 +343,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
self.assertEqual(c._MongoClient__options.replica_set_name,
|
||||
'local-rs')
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'test')
|
||||
else:
|
||||
# PyMongo < v3.x raises an exception:
|
||||
@ -364,6 +364,12 @@ class ConnectionTest(unittest.TestCase):
|
||||
date_doc = DateDoc.objects.first()
|
||||
self.assertEqual(d, date_doc.the_date)
|
||||
|
||||
def test_read_preference_from_parse(self):
|
||||
if IS_PYMONGO_3:
|
||||
from pymongo import ReadPreference
|
||||
conn = connect(host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred")
|
||||
self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_PREFERRED)
|
||||
|
||||
def test_multiple_connection_settings(self):
|
||||
connect('mongoenginetest', alias='t1', host="localhost")
|
||||
|
||||
@ -371,8 +377,8 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
mongo_connections = mongoengine.connection._connections
|
||||
self.assertEqual(len(mongo_connections.items()), 2)
|
||||
self.assertTrue('t1' in mongo_connections.keys())
|
||||
self.assertTrue('t2' in mongo_connections.keys())
|
||||
self.assertIn('t1', mongo_connections.keys())
|
||||
self.assertIn('t2', mongo_connections.keys())
|
||||
if not IS_PYMONGO_3:
|
||||
self.assertEqual(mongo_connections['t1'].host, 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].host, '127.0.0.1')
|
||||
|
@ -89,15 +89,15 @@ class ContextManagersTest(unittest.TestCase):
|
||||
|
||||
with no_dereference(Group) as Group:
|
||||
group = Group.objects.first()
|
||||
self.assertTrue(all([not isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertFalse(isinstance(group.ref, User))
|
||||
self.assertFalse(isinstance(group.generic, User))
|
||||
for m in group.members:
|
||||
self.assertNotIsInstance(m, User)
|
||||
self.assertNotIsInstance(group.ref, User)
|
||||
self.assertNotIsInstance(group.generic, User)
|
||||
|
||||
self.assertTrue(all([isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertTrue(isinstance(group.ref, User))
|
||||
self.assertTrue(isinstance(group.generic, User))
|
||||
for m in group.members:
|
||||
self.assertIsInstance(m, User)
|
||||
self.assertIsInstance(group.ref, User)
|
||||
self.assertIsInstance(group.generic, User)
|
||||
|
||||
def test_no_dereference_context_manager_dbref(self):
|
||||
"""Ensure that DBRef items in ListFields aren't dereferenced.
|
||||
@ -129,19 +129,17 @@ class ContextManagersTest(unittest.TestCase):
|
||||
group = Group.objects.first()
|
||||
self.assertTrue(all([not isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertFalse(isinstance(group.ref, User))
|
||||
self.assertFalse(isinstance(group.generic, User))
|
||||
self.assertNotIsInstance(group.ref, User)
|
||||
self.assertNotIsInstance(group.generic, User)
|
||||
|
||||
self.assertTrue(all([isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertTrue(isinstance(group.ref, User))
|
||||
self.assertTrue(isinstance(group.generic, User))
|
||||
self.assertIsInstance(group.ref, User)
|
||||
self.assertIsInstance(group.generic, User)
|
||||
|
||||
def test_no_sub_classes(self):
|
||||
class A(Document):
|
||||
x = IntField()
|
||||
y = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class B(A):
|
||||
@ -152,29 +150,29 @@ class ContextManagersTest(unittest.TestCase):
|
||||
|
||||
A.drop_collection()
|
||||
|
||||
A(x=10, y=20).save()
|
||||
A(x=15, y=30).save()
|
||||
B(x=20, y=40).save()
|
||||
B(x=30, y=50).save()
|
||||
C(x=40, y=60).save()
|
||||
A(x=10).save()
|
||||
A(x=15).save()
|
||||
B(x=20).save()
|
||||
B(x=30).save()
|
||||
C(x=40).save()
|
||||
|
||||
self.assertEqual(A.objects.count(), 5)
|
||||
self.assertEqual(B.objects.count(), 3)
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
with no_sub_classes(A) as A:
|
||||
with no_sub_classes(A):
|
||||
self.assertEqual(A.objects.count(), 2)
|
||||
|
||||
for obj in A.objects:
|
||||
self.assertEqual(obj.__class__, A)
|
||||
|
||||
with no_sub_classes(B) as B:
|
||||
with no_sub_classes(B):
|
||||
self.assertEqual(B.objects.count(), 2)
|
||||
|
||||
for obj in B.objects:
|
||||
self.assertEqual(obj.__class__, B)
|
||||
|
||||
with no_sub_classes(C) as C:
|
||||
with no_sub_classes(C):
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
for obj in C.objects:
|
||||
@ -185,18 +183,125 @@ class ContextManagersTest(unittest.TestCase):
|
||||
self.assertEqual(B.objects.count(), 3)
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
def test_no_sub_classes_modification_to_document_class_are_temporary(self):
|
||||
class A(Document):
|
||||
x = IntField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class B(A):
|
||||
z = IntField()
|
||||
|
||||
self.assertEqual(A._subclasses, ('A', 'A.B'))
|
||||
with no_sub_classes(A):
|
||||
self.assertEqual(A._subclasses, ('A',))
|
||||
self.assertEqual(A._subclasses, ('A', 'A.B'))
|
||||
|
||||
self.assertEqual(B._subclasses, ('A.B',))
|
||||
with no_sub_classes(B):
|
||||
self.assertEqual(B._subclasses, ('A.B',))
|
||||
self.assertEqual(B._subclasses, ('A.B',))
|
||||
|
||||
def test_no_subclass_context_manager_does_not_swallow_exception(self):
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
with no_sub_classes(User):
|
||||
raise TypeError()
|
||||
|
||||
def test_query_counter_does_not_swallow_exception(self):
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
with query_counter() as q:
|
||||
raise TypeError()
|
||||
|
||||
def test_query_counter_temporarily_modifies_profiling_level(self):
|
||||
connect('mongoenginetest')
|
||||
db = get_db()
|
||||
|
||||
initial_profiling_level = db.profiling_level()
|
||||
|
||||
try:
|
||||
NEW_LEVEL = 1
|
||||
db.set_profiling_level(NEW_LEVEL)
|
||||
self.assertEqual(db.profiling_level(), NEW_LEVEL)
|
||||
with query_counter() as q:
|
||||
self.assertEqual(db.profiling_level(), 2)
|
||||
self.assertEqual(db.profiling_level(), NEW_LEVEL)
|
||||
except Exception:
|
||||
db.set_profiling_level(initial_profiling_level) # Ensures it gets reseted no matter the outcome of the test
|
||||
raise
|
||||
|
||||
def test_query_counter(self):
|
||||
connect('mongoenginetest')
|
||||
db = get_db()
|
||||
db.test.find({})
|
||||
|
||||
collection = db.query_counter
|
||||
collection.drop()
|
||||
|
||||
def issue_1_count_query():
|
||||
collection.find({}).count()
|
||||
|
||||
def issue_1_insert_query():
|
||||
collection.insert_one({'test': 'garbage'})
|
||||
|
||||
def issue_1_find_query():
|
||||
collection.find_one()
|
||||
|
||||
counter = 0
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, counter)
|
||||
self.assertEqual(q, counter) # Ensures previous count query did not get counted
|
||||
|
||||
for _ in range(10):
|
||||
issue_1_insert_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
|
||||
for _ in range(4):
|
||||
issue_1_find_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
|
||||
for _ in range(3):
|
||||
issue_1_count_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
|
||||
def test_query_counter_counts_getmore_queries(self):
|
||||
connect('mongoenginetest')
|
||||
db = get_db()
|
||||
|
||||
collection = db.query_counter
|
||||
collection.drop()
|
||||
|
||||
many_docs = [{'test': 'garbage %s' % i} for i in range(150)]
|
||||
collection.insert_many(many_docs) # first batch of documents contains 101 documents
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(0, q)
|
||||
self.assertEqual(q, 0)
|
||||
list(collection.find())
|
||||
self.assertEqual(q, 2) # 1st select + 1 getmore
|
||||
|
||||
for i in range(1, 51):
|
||||
db.test.find({}).count()
|
||||
def test_query_counter_ignores_particular_queries(self):
|
||||
connect('mongoenginetest')
|
||||
db = get_db()
|
||||
|
||||
self.assertEqual(50, q)
|
||||
collection = db.query_counter
|
||||
collection.insert_many([{'test': 'garbage %s' % i} for i in range(10)])
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
cursor = collection.find()
|
||||
self.assertEqual(q, 0) # cursor wasn't opened yet
|
||||
_ = next(cursor) # opens the cursor and fires the find query
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
cursor.close() # issues a `killcursors` query that is ignored by the context
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
_ = db.system.indexes.find_one() # queries on db.system.indexes are ignored as well
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -1,6 +1,21 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine.base.datastructures import StrictDict, SemiStrictDict
|
||||
from mongoengine.base.datastructures import StrictDict, BaseList
|
||||
|
||||
|
||||
class TestBaseList(unittest.TestCase):
|
||||
|
||||
def test_iter_simple(self):
|
||||
values = [True, False, True, False]
|
||||
base_list = BaseList(values, instance=None, name='my_name')
|
||||
self.assertEqual(values, list(base_list))
|
||||
|
||||
def test_iter_allow_modification_while_iterating_withou_error(self):
|
||||
# regular list allows for this, thus this subclass must comply to that
|
||||
base_list = BaseList([True, False, True, False], instance=None, name='my_name')
|
||||
for idx, val in enumerate(base_list):
|
||||
if val:
|
||||
base_list.pop(idx)
|
||||
|
||||
|
||||
class TestStrictDict(unittest.TestCase):
|
||||
@ -76,44 +91,5 @@ class TestStrictDict(unittest.TestCase):
|
||||
assert dict(**d) == {'a': 1, 'b': 2}
|
||||
|
||||
|
||||
class TestSemiSrictDict(TestStrictDict):
|
||||
def strict_dict_class(self, *args, **kwargs):
|
||||
return SemiStrictDict.create(*args, **kwargs)
|
||||
|
||||
def test_init_fails_on_nonexisting_attrs(self):
|
||||
# disable irrelevant test
|
||||
pass
|
||||
|
||||
def test_setattr_raises_on_nonexisting_attr(self):
|
||||
# disable irrelevant test
|
||||
pass
|
||||
|
||||
def test_setattr_getattr_nonexisting_attr_succeeds(self):
|
||||
d = self.dtype()
|
||||
d.x = 1
|
||||
self.assertEqual(d.x, 1)
|
||||
|
||||
def test_init_succeeds_with_nonexisting_attrs(self):
|
||||
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||
self.assertEqual((d.a, d.b, d.c, d.x), (1, 1, 1, 2))
|
||||
|
||||
def test_iter_with_nonexisting_attrs(self):
|
||||
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||
self.assertEqual(list(d), ['a', 'b', 'c', 'x'])
|
||||
|
||||
def test_iteritems_with_nonexisting_attrs(self):
|
||||
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||
self.assertEqual(list(d.iteritems()), [('a', 1), ('b', 1), ('c', 1), ('x', 2)])
|
||||
|
||||
def tets_cmp_with_strict_dicts(self):
|
||||
d = self.dtype(a=1, b=1, c=1)
|
||||
dd = StrictDict.create(("a", "b", "c"))(a=1, b=1, c=1)
|
||||
self.assertEqual(d, dd)
|
||||
|
||||
def test_cmp_with_strict_dict_with_nonexisting_attrs(self):
|
||||
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||
dd = StrictDict.create(("a", "b", "c", "x"))(a=1, b=1, c=1, x=2)
|
||||
self.assertEqual(d, dd)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -200,8 +200,8 @@ class FieldTest(unittest.TestCase):
|
||||
group = Group(author=user, members=[user]).save()
|
||||
|
||||
raw_data = Group._get_collection().find_one()
|
||||
self.assertTrue(isinstance(raw_data['author'], DBRef))
|
||||
self.assertTrue(isinstance(raw_data['members'][0], DBRef))
|
||||
self.assertIsInstance(raw_data['author'], DBRef)
|
||||
self.assertIsInstance(raw_data['members'][0], DBRef)
|
||||
group = Group.objects.first()
|
||||
|
||||
self.assertEqual(group.author, user)
|
||||
@ -224,8 +224,8 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(group.members, [user])
|
||||
|
||||
raw_data = Group._get_collection().find_one()
|
||||
self.assertTrue(isinstance(raw_data['author'], ObjectId))
|
||||
self.assertTrue(isinstance(raw_data['members'][0], ObjectId))
|
||||
self.assertIsInstance(raw_data['author'], ObjectId)
|
||||
self.assertIsInstance(raw_data['members'][0], ObjectId)
|
||||
|
||||
def test_recursive_reference(self):
|
||||
"""Ensure that ReferenceFields can reference their own documents.
|
||||
@ -469,7 +469,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@ -485,7 +485,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@ -502,7 +502,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
@ -560,7 +560,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@ -576,7 +576,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@ -593,7 +593,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
@ -633,7 +633,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, User))
|
||||
self.assertIsInstance(m, User)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@ -646,7 +646,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, User))
|
||||
self.assertIsInstance(m, User)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@ -660,7 +660,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, User))
|
||||
self.assertIsInstance(m, User)
|
||||
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
@ -715,7 +715,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@ -731,7 +731,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@ -748,7 +748,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
Group.objects.delete()
|
||||
Group().save()
|
||||
@ -806,7 +806,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, UserA))
|
||||
self.assertIsInstance(m, UserA)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@ -822,7 +822,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, UserA))
|
||||
self.assertIsInstance(m, UserA)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@ -839,7 +839,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, UserA))
|
||||
self.assertIsInstance(m, UserA)
|
||||
|
||||
UserA.drop_collection()
|
||||
Group.drop_collection()
|
||||
@ -894,7 +894,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@ -910,7 +910,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@ -927,7 +927,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
Group.objects.delete()
|
||||
Group().save()
|
||||
@ -1209,10 +1209,10 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
# Can't use query_counter across databases - so test the _data object
|
||||
book = Book.objects.first()
|
||||
self.assertFalse(isinstance(book._data['author'], User))
|
||||
self.assertNotIsInstance(book._data['author'], User)
|
||||
|
||||
book.select_related()
|
||||
self.assertTrue(isinstance(book._data['author'], User))
|
||||
self.assertIsInstance(book._data['author'], User)
|
||||
|
||||
def test_non_ascii_pk(self):
|
||||
"""
|
||||
|
38
tests/test_utils.py
Normal file
38
tests/test_utils.py
Normal file
@ -0,0 +1,38 @@
|
||||
import unittest
|
||||
import re
|
||||
|
||||
from mongoengine.base.utils import LazyRegexCompiler
|
||||
|
||||
signal_output = []
|
||||
|
||||
|
||||
class LazyRegexCompilerTest(unittest.TestCase):
|
||||
|
||||
def test_lazy_regex_compiler_verify_laziness_of_descriptor(self):
|
||||
class UserEmail(object):
|
||||
EMAIL_REGEX = LazyRegexCompiler('@', flags=32)
|
||||
|
||||
descriptor = UserEmail.__dict__['EMAIL_REGEX']
|
||||
self.assertIsNone(descriptor._compiled_regex)
|
||||
|
||||
regex = UserEmail.EMAIL_REGEX
|
||||
self.assertEqual(regex, re.compile('@', flags=32))
|
||||
self.assertEqual(regex.search('user@domain.com').group(), '@')
|
||||
|
||||
user_email = UserEmail()
|
||||
self.assertIs(user_email.EMAIL_REGEX, UserEmail.EMAIL_REGEX)
|
||||
|
||||
def test_lazy_regex_compiler_verify_cannot_set_descriptor_on_instance(self):
|
||||
class UserEmail(object):
|
||||
EMAIL_REGEX = LazyRegexCompiler('@')
|
||||
|
||||
user_email = UserEmail()
|
||||
with self.assertRaises(AttributeError):
|
||||
user_email.EMAIL_REGEX = re.compile('@')
|
||||
|
||||
def test_lazy_regex_compiler_verify_can_override_class_attr(self):
|
||||
class UserEmail(object):
|
||||
EMAIL_REGEX = LazyRegexCompiler('@')
|
||||
|
||||
UserEmail.EMAIL_REGEX = re.compile('cookies')
|
||||
self.assertEqual(UserEmail.EMAIL_REGEX.search('Cake & cookies').group(), 'cookies')
|
@ -7,12 +7,12 @@ from mongoengine.connection import get_db, get_connection
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
|
||||
|
||||
MONGO_TEST_DB = 'mongoenginetest'
|
||||
MONGO_TEST_DB = 'mongoenginetest' # standard name for the test database
|
||||
|
||||
|
||||
class MongoDBTestCase(unittest.TestCase):
|
||||
"""Base class for tests that need a mongodb connection
|
||||
db is being dropped automatically
|
||||
It ensures that the db is clean at the beginning and dropped at the end automatically
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
@ -32,6 +32,7 @@ def get_mongodb_version():
|
||||
"""
|
||||
return tuple(get_connection().server_info()['versionArray'])
|
||||
|
||||
|
||||
def _decorated_with_ver_requirement(func, ver_tuple):
|
||||
"""Return a given function decorated with the version requirement
|
||||
for a particular MongoDB version tuple.
|
||||
@ -50,18 +51,21 @@ def _decorated_with_ver_requirement(func, ver_tuple):
|
||||
|
||||
return _inner
|
||||
|
||||
|
||||
def needs_mongodb_v26(func):
|
||||
"""Raise a SkipTest exception if we're working with MongoDB version
|
||||
lower than v2.6.
|
||||
"""
|
||||
return _decorated_with_ver_requirement(func, (2, 6))
|
||||
|
||||
|
||||
def needs_mongodb_v3(func):
|
||||
"""Raise a SkipTest exception if we're working with MongoDB version
|
||||
lower than v3.0.
|
||||
"""
|
||||
return _decorated_with_ver_requirement(func, (3, 0))
|
||||
|
||||
|
||||
def skip_pymongo3(f):
|
||||
"""Raise a SkipTest exception if we're running a test against
|
||||
PyMongo v3.x.
|
||||
|
7
tox.ini
7
tox.ini
@ -1,13 +1,12 @@
|
||||
[tox]
|
||||
envlist = {py27,py35,pypy,pypy3}-{mg27,mg28,mg30}
|
||||
envlist = {py27,py35,pypy,pypy3}-{mg35,mg3x}
|
||||
|
||||
[testenv]
|
||||
commands =
|
||||
python setup.py nosetests {posargs}
|
||||
deps =
|
||||
nose
|
||||
mg27: PyMongo<2.8
|
||||
mg28: PyMongo>=2.8,<2.9
|
||||
mg30: PyMongo>=3.0
|
||||
mg35: PyMongo==3.5
|
||||
mg3x: PyMongo>=3.0,<3.7
|
||||
setenv =
|
||||
PYTHON_EGG_CACHE = {envdir}/python-eggs
|
||||
|
Loading…
x
Reference in New Issue
Block a user