Compare commits

..

1 Commits

Author SHA1 Message Date
Stefan Wojcik
7195236a3b better db_field validation 2017-05-07 20:26:52 -04:00
24 changed files with 341 additions and 356 deletions

View File

@@ -1,6 +1,5 @@
#!/bin/bash
sudo apt-get remove mongodb-org-server
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
if [ "$MONGODB" = "2.4" ]; then
@@ -14,7 +13,7 @@ elif [ "$MONGODB" = "2.6" ]; then
sudo apt-get install mongodb-org-server=2.6.12
# service should be started automatically
elif [ "$MONGODB" = "3.0" ]; then
echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
echo "deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
sudo apt-get update
sudo apt-get install mongodb-org-server=3.0.14
# service should be started automatically
@@ -22,6 +21,3 @@ else
echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0."
exit 1
fi;
mkdir db
1>db/logs mongod --dbpath=db &

View File

@@ -15,8 +15,9 @@ language: python
python:
- 2.7
- 3.5
- 3.6
- pypy
- pypy3.3-5.2-alpha1
env:
- MONGODB=2.6 PYMONGO=2.7
@@ -40,15 +41,9 @@ matrix:
env: MONGODB=2.4 PYMONGO=3.0
- python: 3.5
env: MONGODB=3.0 PYMONGO=3.0
- python: 3.6
env: MONGODB=2.4 PYMONGO=3.0
- python: 3.6
env: MONGODB=3.0 PYMONGO=3.0
before_install:
- bash .install_mongodb_on_travis.sh
- sleep 15 # https://docs.travis-ci.com/user/database-setup/#MongoDB-does-not-immediately-accept-connections
- mongo --eval 'db.version();'
install:
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
@@ -97,7 +92,7 @@ deploy:
distributions: "sdist bdist_wheel"
# only deploy on tagged commits (aka GitHub releases) and only for the
# parent repo's builds running Python 2.7 along with PyMongo v3.0 (we run
# parent repo's builds running Python 2.7 along with dev PyMongo (we run
# Travis against many different Python and PyMongo versions and we don't
# want the deploy to occur multiple times).
on:

View File

@@ -243,5 +243,3 @@ that much better:
* Victor Varvaryuk
* Stanislav Kaledin (https://github.com/sallyruthstruik)
* Dmitry Yantsen (https://github.com/mrTable)
* Renjianxin (https://github.com/Davidrjx)
* Erdenezul Batmunkh (https://github.com/erdenezul)

View File

@@ -6,20 +6,6 @@ Development
===========
- (Fill this out as you fix issues and develop your features).
Changes in 0.14.1
=================
- Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630
- Added support for the `$position` param in the `$push` operator #1566
- Fixed `DateTimeField` interpreting an empty string as today #1533
- Added a missing `__ne__` method to the `GridFSProxy` class #1632
- Fixed `BaseQuerySet._fields_to_db_fields` #1553
Changes in 0.14.0
=================
- BREAKING CHANGE: Removed the `coerce_types` param from `QuerySet.as_pymongo` #1549
- POTENTIAL BREAKING CHANGE: Made EmbeddedDocument not hashable by default #1528
- Improved code quality #1531, #1540, #1541, #1547
Changes in 0.13.0
=================
- POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see

View File

@@ -565,15 +565,6 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
>>> post.tags
['database', 'mongodb']
From MongoDB version 2.6, push operator supports $position value which allows
to push values with index.
>>> post = BlogPost(title="Test", tags=["mongo"])
>>> post.save()
>>> post.update(push__tags__0=["database", "code"])
>>> post.reload()
>>> post.tags
['database', 'code', 'mongo']
.. note::
Currently only top level lists are handled, future versions of mongodb /
pymongo plan to support nested positional operators. See `The $ positional

View File

@@ -6,18 +6,6 @@ Development
***********
(Fill this out whenever you introduce breaking changes to MongoEngine)
0.14.0
******
This release includes a few bug fixes and a significant code cleanup. The most
important change is that `QuerySet.as_pymongo` no longer supports a
`coerce_types` mode. If you used it in the past, a) please let us know of your
use case, b) you'll need to override `as_pymongo` to get the desired outcome.
This release also makes the EmbeddedDocument not hashable by default. If you
use embedded documents in sets or dictionaries, you might have to override
`__hash__` and implement a hashing logic specific to your use case. See #1528
for the reason behind this change.
0.13.0
******
This release adds Unicode support to the `EmailField` and changes its

View File

@@ -23,7 +23,7 @@ __all__ = (list(document.__all__) + list(fields.__all__) +
list(signals.__all__) + list(errors.__all__))
VERSION = (0, 14, 1)
VERSION = (0, 13, 0)
def get_version():

View File

@@ -127,7 +127,7 @@ class BaseList(list):
return value
def __iter__(self):
for i in six.moves.range(self.__len__()):
for i in xrange(self.__len__()):
yield self[i]
def __setitem__(self, key, value, *args, **kwargs):
@@ -445,3 +445,42 @@ class StrictDict(object):
cls._classes[allowed_keys] = SpecificStrictDict
return cls._classes[allowed_keys]
class SemiStrictDict(StrictDict):
__slots__ = ('_extras', )
_classes = {}
def __getattr__(self, attr):
try:
super(SemiStrictDict, self).__getattr__(attr)
except AttributeError:
try:
return self.__getattribute__('_extras')[attr]
except KeyError as e:
raise AttributeError(e)
def __setattr__(self, attr, value):
try:
super(SemiStrictDict, self).__setattr__(attr, value)
except AttributeError:
try:
self._extras[attr] = value
except AttributeError:
self._extras = {attr: value}
def __delattr__(self, attr):
try:
super(SemiStrictDict, self).__delattr__(attr)
except AttributeError:
try:
del self._extras[attr]
except KeyError as e:
raise AttributeError(e)
def __iter__(self):
try:
extras_iter = iter(self.__getattribute__('_extras'))
except AttributeError:
extras_iter = ()
return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter)

View File

@@ -13,13 +13,13 @@ from mongoengine import signals
from mongoengine.base.common import get_document
from mongoengine.base.datastructures import (BaseDict, BaseList,
EmbeddedDocumentList,
StrictDict)
SemiStrictDict, StrictDict)
from mongoengine.base.fields import ComplexBaseField
from mongoengine.common import _import_class
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError,
LookUpError, OperationError, ValidationError)
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
__all__ = ('BaseDocument',)
NON_FIELD_ERRORS = '__all__'
@@ -79,7 +79,8 @@ class BaseDocument(object):
if self.STRICT and not self._dynamic:
self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
else:
self._data = {}
self._data = SemiStrictDict.create(
allowed_keys=self._fields_ordered)()
self._dynamic_fields = SON()

View File

@@ -146,14 +146,13 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
raise MongoEngineConnectionError(msg)
def _clean_settings(settings_dict):
# set literal more efficient than calling set function
irrelevant_fields_set = {
'name', 'username', 'password',
'authentication_source', 'authentication_mechanism'
}
irrelevant_fields = set([
'name', 'username', 'password', 'authentication_source',
'authentication_mechanism'
])
return {
k: v for k, v in settings_dict.items()
if k not in irrelevant_fields_set
if k not in irrelevant_fields
}
# Retrieve a copy of the connection settings associated with the requested

View File

@@ -1,3 +1,4 @@
from collections import OrderedDict
from bson import DBRef, SON
import six
@@ -201,6 +202,10 @@ class DeReference(object):
as_tuple = isinstance(items, tuple)
iterator = enumerate(items)
data = []
elif isinstance(items, OrderedDict):
is_list = False
iterator = items.iteritems()
data = OrderedDict()
else:
is_list = False
iterator = items.iteritems()

View File

@@ -320,7 +320,7 @@ class Document(BaseDocument):
:param save_condition: only perform save if matching record in db
satisfies condition(s) (e.g. version number).
Raises :class:`OperationError` if the conditions are not satisfied
:param signal_kwargs: (optional) kwargs dictionary to be passed to
:parm signal_kwargs: (optional) kwargs dictionary to be passed to
the signal calls.
.. versionchanged:: 0.5

View File

@@ -6,6 +6,7 @@ import socket
import time
import uuid
import warnings
from collections import Mapping
from operator import itemgetter
from bson import Binary, DBRef, ObjectId, SON
@@ -483,10 +484,6 @@ class DateTimeField(BaseField):
if not isinstance(value, six.string_types):
return None
value = value.strip()
if not value:
return None
# Attempt to parse a datetime:
if dateutil:
try:
@@ -708,6 +705,14 @@ class DynamicField(BaseField):
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
def __init__(self, container_class=dict, *args, **kwargs):
self._container_cls = container_class
if not issubclass(self._container_cls, Mapping):
self.error('The class that is specified in `container_class` parameter '
'must be a subclass of `dict`.')
super(DynamicField, self).__init__(*args, **kwargs)
def to_mongo(self, value, use_db_field=True, fields=None):
"""Convert a Python type to a MongoDB compatible type.
"""
@@ -733,7 +738,7 @@ class DynamicField(BaseField):
is_list = True
value = {k: v for k, v in enumerate(value)}
data = {}
data = self._container_cls()
for k, v in value.iteritems():
data[k] = self.to_mongo(v, use_db_field, fields)
@@ -1465,9 +1470,6 @@ class GridFSProxy(object):
else:
return False
def __ne__(self, other):
return not self == other
@property
def fs(self):
if not self._fs:

View File

@@ -67,6 +67,7 @@ class BaseQuerySet(object):
self._scalar = []
self._none = False
self._as_pymongo = False
self._as_pymongo_coerce = False
self._search_text = None
# If inheritance is allowed, only return instances and instances of
@@ -727,12 +728,11 @@ class BaseQuerySet(object):
'%s is not a subclass of BaseQuerySet' % new_qs.__name__)
copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj',
'_where_clause', '_loaded_fields', '_ordering',
'_snapshot', '_timeout', '_class_check', '_slave_okay',
'_read_preference', '_iter', '_scalar', '_as_pymongo',
'_where_clause', '_loaded_fields', '_ordering', '_snapshot',
'_timeout', '_class_check', '_slave_okay', '_read_preference',
'_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce',
'_limit', '_skip', '_hint', '_auto_dereference',
'_search_text', 'only_fields', '_max_time_ms',
'_comment')
'_search_text', 'only_fields', '_max_time_ms', '_comment')
for prop in copy_props:
val = getattr(self, prop)
@@ -939,8 +939,7 @@ class BaseQuerySet(object):
posts = BlogPost.objects(...).fields(slice__comments=5)
:param kwargs: A set of keyword arguments identifying what to
include, exclude, or slice.
:param kwargs: A set keywors arguments identifying what to include.
.. versionadded:: 0.5
"""
@@ -1129,15 +1128,16 @@ class BaseQuerySet(object):
"""An alias for scalar"""
return self.scalar(*fields)
def as_pymongo(self):
def as_pymongo(self, coerce_types=False):
"""Instead of returning Document instances, return raw values from
pymongo.
This method is particularly useful if you don't need dereferencing
and care primarily about the speed of data retrieval.
:param coerce_types: Field types (if applicable) would be use to
coerce types.
"""
queryset = self.clone()
queryset._as_pymongo = True
queryset._as_pymongo_coerce = coerce_types
return queryset
def max_time_ms(self, ms):
@@ -1722,33 +1722,25 @@ class BaseQuerySet(object):
return frequencies
def _fields_to_dbfields(self, fields):
"""Translate fields' paths to their db equivalents."""
"""Translate fields paths to its db equivalents"""
ret = []
subclasses = []
if self._document._meta['allow_inheritance']:
document = self._document
if document._meta['allow_inheritance']:
subclasses = [get_document(x)
for x in self._document._subclasses][1:]
db_field_paths = []
for x in document._subclasses][1:]
for field in fields:
field_parts = field.split('.')
try:
field = '.'.join(
f if isinstance(f, six.string_types) else f.db_field
for f in self._document._lookup_field(field_parts)
)
db_field_paths.append(field)
field = '.'.join(f.db_field for f in
document._lookup_field(field.split('.')))
ret.append(field)
except LookUpError as err:
found = False
# If a field path wasn't found on the main document, go
# through its subclasses and see if it exists on any of them.
for subdoc in subclasses:
try:
subfield = '.'.join(
f if isinstance(f, six.string_types) else f.db_field
for f in subdoc._lookup_field(field_parts)
)
db_field_paths.append(subfield)
subfield = '.'.join(f.db_field for f in
subdoc._lookup_field(field.split('.')))
ret.append(subfield)
found = True
break
except LookUpError:
@@ -1756,8 +1748,7 @@ class BaseQuerySet(object):
if not found:
raise err
return db_field_paths
return ret
def _get_order_by(self, keys):
"""Given a list of MongoEngine-style sort keys, return a list
@@ -1808,25 +1799,59 @@ class BaseQuerySet(object):
return tuple(data)
def _get_as_pymongo(self, doc):
"""Clean up a PyMongo doc, removing fields that were only fetched
for the sake of MongoEngine's implementation, and return it.
"""
# Always remove _cls as a MongoEngine's implementation detail.
if '_cls' in doc:
del doc['_cls']
def _get_as_pymongo(self, row):
# Extract which fields paths we should follow if .fields(...) was
# used. If not, handle all fields.
if not getattr(self, '__as_pymongo_fields', None):
self.__as_pymongo_fields = []
# If the _id was not included in a .only or was excluded in a .exclude,
# remove it from the doc (we always fetch it so that we can properly
# construct documents).
fields = self._loaded_fields
if fields and '_id' in doc and (
(fields.value == QueryFieldList.ONLY and '_id' not in fields.fields) or
(fields.value == QueryFieldList.EXCLUDE and '_id' in fields.fields)
):
del doc['_id']
for field in self._loaded_fields.fields - set(['_cls']):
self.__as_pymongo_fields.append(field)
while '.' in field:
field, _ = field.rsplit('.', 1)
self.__as_pymongo_fields.append(field)
return doc
all_fields = not self.__as_pymongo_fields
def clean(data, path=None):
path = path or ''
if isinstance(data, dict):
new_data = {}
for key, value in data.iteritems():
new_path = '%s.%s' % (path, key) if path else key
if all_fields:
include_field = True
elif self._loaded_fields.value == QueryFieldList.ONLY:
include_field = new_path in self.__as_pymongo_fields
else:
include_field = new_path not in self.__as_pymongo_fields
if include_field:
new_data[key] = clean(value, path=new_path)
data = new_data
elif isinstance(data, list):
data = [clean(d, path=path) for d in data]
else:
if self._as_pymongo_coerce:
# If we need to coerce types, we need to determine the
# type of this field and use the corresponding
# .to_python(...)
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
obj = self._document
for chunk in path.split('.'):
obj = getattr(obj, chunk, None)
if obj is None:
break
elif isinstance(obj, EmbeddedDocumentField):
obj = obj.document_type
if obj and data is not None:
data = obj.to_python(data)
return data
return clean(row)
def _sub_js_fields(self, code):
"""When fields are specified with [~fieldname] syntax, where

View File

@@ -1,5 +1,3 @@
import six
from mongoengine.errors import OperationError
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
NULLIFY, PULL)
@@ -114,7 +112,7 @@ class QuerySet(BaseQuerySet):
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
# the result cache.
try:
for _ in six.moves.range(ITER_CHUNK_SIZE):
for _ in xrange(ITER_CHUNK_SIZE):
self._result_cache.append(self.next())
except StopIteration:
# Getting this exception means there are no more docs in the
@@ -168,7 +166,7 @@ class QuerySetNoCache(BaseQuerySet):
return '.. queryset mid-iteration ..'
data = []
for _ in six.moves.range(REPR_OUTPUT_SIZE + 1):
for _ in xrange(REPR_OUTPUT_SIZE + 1):
try:
data.append(self.next())
except StopIteration:

View File

@@ -284,9 +284,7 @@ def update(_doc_cls=None, **update):
if isinstance(field, GeoJsonBaseField):
value = field.to_mongo(value)
if op == 'push' and isinstance(value, (list, tuple, set)):
value = [field.prepare_query_value(op, v) for v in value]
elif op in (None, 'set', 'push', 'pull'):
if op in (None, 'set', 'push', 'pull'):
if field.required or value is not None:
value = field.prepare_query_value(op, value)
elif op in ('pushAll', 'pullAll'):
@@ -335,22 +333,10 @@ def update(_doc_cls=None, **update):
value = {key: value}
elif op == 'addToSet' and isinstance(value, list):
value = {key: {'$each': value}}
elif op == 'push':
if parts[-1].isdigit():
key = parts[0]
position = int(parts[-1])
# $position expects an iterable. If pushing a single value,
# wrap it in a list.
if not isinstance(value, (set, tuple, list)):
value = [value]
value = {key: {'$each': value, '$position': position}}
elif isinstance(value, list):
value = {key: {'$each': value}}
else:
value = {key: value}
else:
value = {key: value}
key = '$' + op
if key not in mongo_update:
mongo_update[key] = value
elif key in mongo_update and isinstance(mongo_update[key], dict):

View File

@@ -22,8 +22,6 @@ from mongoengine.queryset import NULLIFY, Q
from mongoengine.context_managers import switch_db, query_counter
from mongoengine import signals
from tests.utils import needs_mongodb_v26
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
'../fields/mongoengine.png')
@@ -828,22 +826,6 @@ class InstanceTest(unittest.TestCase):
self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())])
@needs_mongodb_v26
def test_modify_with_positional_push(self):
class BlogPost(Document):
tags = ListField(StringField())
post = BlogPost.objects.create(tags=['python'])
self.assertEqual(post.tags, ['python'])
post.modify(push__tags__0=['code', 'mongo'])
self.assertEqual(post.tags, ['code', 'mongo', 'python'])
# Assert same order of the list items is maintained in the db
self.assertEqual(
BlogPost._get_collection().find_one({'_id': post.pk})['tags'],
['code', 'mongo', 'python']
)
def test_save(self):
"""Ensure that a document may be saved in the database."""
@@ -3167,22 +3149,6 @@ class InstanceTest(unittest.TestCase):
person.update(set__height=2.0)
@needs_mongodb_v26
def test_push_with_position(self):
"""Ensure that push with position works properly for an instance."""
class BlogPost(Document):
slug = StringField()
tags = ListField(StringField())
blog = BlogPost()
blog.slug = "ABC"
blog.tags = ["python"]
blog.save()
blog.update(push__tags__0=["mongodb", "code"])
blog.reload()
self.assertEqual(blog.tags, ['mongodb', 'code', 'python'])
if __name__ == '__main__':
unittest.main()

View File

@@ -5,9 +5,11 @@ import uuid
import math
import itertools
import re
import pymongo
import sys
from nose.plugins.skip import SkipTest
from collections import OrderedDict
import six
try:
@@ -26,37 +28,18 @@ except ImportError:
from mongoengine import *
from mongoengine.connection import get_db
from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList,
_document_registry)
_document_registry, TopLevelDocumentMetaclass)
from tests.utils import MongoDBTestCase
from tests.utils import MongoDBTestCase, MONGO_TEST_DB
from mongoengine.python_support import IS_PYMONGO_3
if IS_PYMONGO_3:
from bson import CodecOptions
__all__ = ("FieldTest", "EmbeddedDocumentListFieldTestCase")
class FieldTest(MongoDBTestCase):
def test_datetime_from_empty_string(self):
"""
Ensure an exception is raised when trying to
cast an empty string to datetime.
"""
class MyDoc(Document):
dt = DateTimeField()
md = MyDoc(dt='')
self.assertRaises(ValidationError, md.save)
def test_datetime_from_whitespace_string(self):
"""
Ensure an exception is raised when trying to
cast a whitespace-only string to datetime.
"""
class MyDoc(Document):
dt = DateTimeField()
md = MyDoc(dt=' ')
self.assertRaises(ValidationError, md.save)
def test_default_values_nothing_set(self):
"""Ensure that default field values are used when creating
a document.
@@ -4205,6 +4188,67 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
self.assertTrue(hasattr(CustomData.c_field, 'custom_data'))
self.assertEqual(custom_data['a'], CustomData.c_field.custom_data['a'])
def test_dynamicfield_with_container_class(self):
"""
Tests that object can be stored in order by DynamicField class
with container_class parameter.
"""
raw_data = [('d', 1), ('c', 2), ('b', 3), ('a', 4)]
class Doc(Document):
ordered_data = DynamicField(container_class=OrderedDict)
unordered_data = DynamicField()
Doc.drop_collection()
doc = Doc(ordered_data=OrderedDict(raw_data), unordered_data=dict(raw_data)).save()
# checks that the data is in order
self.assertEqual(type(doc.ordered_data), OrderedDict)
self.assertEqual(type(doc.unordered_data), dict)
self.assertEqual(','.join(doc.ordered_data.keys()), 'd,c,b,a')
# checks that the data is stored to the database in order
pymongo_db = pymongo.MongoClient()[MONGO_TEST_DB]
if IS_PYMONGO_3:
codec_option = CodecOptions(document_class=OrderedDict)
db_doc = pymongo_db.doc.with_options(codec_options=codec_option).find_one()
else:
db_doc = pymongo_db.doc.find_one(as_class=OrderedDict)
self.assertEqual(','.join(doc.ordered_data.keys()), 'd,c,b,a')
def test_dynamicfield_with_wrong_container_class(self):
with self.assertRaises(ValidationError):
class DocWithInvalidField:
data = DynamicField(container_class=list)
def test_dynamicfield_with_wrong_container_class_and_reload_docuemnt(self):
# This is because 'codec_options' is supported on pymongo3 or later
if IS_PYMONGO_3:
class OrderedDocument(Document):
my_metaclass = TopLevelDocumentMetaclass
__metaclass__ = TopLevelDocumentMetaclass
@classmethod
def _get_collection(cls):
collection = super(OrderedDocument, cls)._get_collection()
opts = CodecOptions(document_class=OrderedDict)
return collection.with_options(codec_options=opts)
raw_data = [('d', 1), ('c', 2), ('b', 3), ('a', 4)]
class Doc(OrderedDocument):
data = DynamicField(container_class=OrderedDict)
Doc.drop_collection()
doc = Doc(data=OrderedDict(raw_data)).save()
doc.reload()
self.assertEqual(type(doc.data), OrderedDict)
self.assertEqual(','.join(doc.data.keys()), 'd,c,b,a')
class CachedReferenceFieldTest(MongoDBTestCase):

View File

@@ -197,18 +197,14 @@ class OnlyExcludeAllTest(unittest.TestCase):
title = StringField()
text = StringField()
class VariousData(EmbeddedDocument):
some = BooleanField()
class BlogPost(Document):
content = StringField()
author = EmbeddedDocumentField(User)
comments = ListField(EmbeddedDocumentField(Comment))
various = MapField(field=EmbeddedDocumentField(VariousData))
BlogPost.drop_collection()
post = BlogPost(content='Had a good coffee today...', various={'test_dynamic':{'some': True}})
post = BlogPost(content='Had a good coffee today...')
post.author = User(name='Test User')
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
post.save()
@@ -219,9 +215,6 @@ class OnlyExcludeAllTest(unittest.TestCase):
self.assertEqual(obj.author.name, 'Test User')
self.assertEqual(obj.comments, [])
obj = BlogPost.objects.only('various.test_dynamic.some').get()
self.assertEqual(obj.various["test_dynamic"].some, True)
obj = BlogPost.objects.only('content', 'comments.title',).get()
self.assertEqual(obj.content, 'Had a good coffee today...')
self.assertEqual(obj.author, None)

View File

@@ -510,24 +510,6 @@ class GeoQueriesTest(MongoDBTestCase):
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
self.assertEqual(1, roads)
def test_aspymongo_with_only(self):
"""Ensure as_pymongo works with only"""
class Place(Document):
location = PointField()
Place.drop_collection()
p = Place(location=[24.946861267089844, 60.16311983618494])
p.save()
qs = Place.objects().only('location')
self.assertDictEqual(
qs.as_pymongo()[0]['location'],
{u'type': u'Point',
u'coordinates': [
24.946861267089844,
60.16311983618494]
}
)
def test_2dsphere_point_sets_correctly(self):
class Location(Document):
loc = PointField()

View File

@@ -1,8 +1,6 @@
import unittest
from mongoengine import connect, Document, IntField, StringField, ListField
from tests.utils import needs_mongodb_v26
from mongoengine import connect, Document, IntField
__all__ = ("FindAndModifyTest",)
@@ -96,37 +94,6 @@ class FindAndModifyTest(unittest.TestCase):
self.assertEqual(old_doc.to_mongo(), {"_id": 1})
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
@needs_mongodb_v26
def test_modify_with_push(self):
class BlogPost(Document):
tags = ListField(StringField())
BlogPost.drop_collection()
blog = BlogPost.objects.create()
# Push a new tag via modify with new=False (default).
BlogPost(id=blog.id).modify(push__tags='code')
self.assertEqual(blog.tags, [])
blog.reload()
self.assertEqual(blog.tags, ['code'])
# Push a new tag via modify with new=True.
blog = BlogPost.objects(id=blog.id).modify(push__tags='java', new=True)
self.assertEqual(blog.tags, ['code', 'java'])
# Push a new tag with a positional argument.
blog = BlogPost.objects(id=blog.id).modify(
push__tags__0='python',
new=True)
self.assertEqual(blog.tags, ['python', 'code', 'java'])
# Push multiple new tags with a positional argument.
blog = BlogPost.objects(id=blog.id).modify(
push__tags__1=['go', 'rust'],
new=True)
self.assertEqual(blog.tags, ['python', 'go', 'rust', 'code', 'java'])
if __name__ == '__main__':
unittest.main()

View File

@@ -1903,32 +1903,6 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection()
@needs_mongodb_v26
def test_update_push_with_position(self):
"""Ensure that the 'push' update with position works properly.
"""
class BlogPost(Document):
slug = StringField()
tags = ListField(StringField())
BlogPost.drop_collection()
post = BlogPost.objects.create(slug="test")
BlogPost.objects.filter(id=post.id).update(push__tags="code")
BlogPost.objects.filter(id=post.id).update(push__tags__0=["mongodb", "python"])
post.reload()
self.assertEqual(post.tags, ['mongodb', 'python', 'code'])
BlogPost.objects.filter(id=post.id).update(set__tags__2="java")
post.reload()
self.assertEqual(post.tags, ['mongodb', 'python', 'java'])
#test push with singular value
BlogPost.objects.filter(id=post.id).update(push__tags__0='scala')
post.reload()
self.assertEqual(post.tags, ['scala', 'mongodb', 'python', 'java'])
def test_update_push_and_pull_add_to_set(self):
"""Ensure that the 'pull' update operation works correctly.
"""
@@ -4073,35 +4047,6 @@ class QuerySetTest(unittest.TestCase):
plist = list(Person.objects.scalar('name', 'state'))
self.assertEqual(plist, [(u'Wilson JR', s1)])
def test_generic_reference_field_with_only_and_as_pymongo(self):
class TestPerson(Document):
name = StringField()
class TestActivity(Document):
name = StringField()
owner = GenericReferenceField()
TestPerson.drop_collection()
TestActivity.drop_collection()
person = TestPerson(name='owner')
person.save()
a1 = TestActivity(name='a1', owner=person)
a1.save()
activity = TestActivity.objects(owner=person).scalar('id', 'owner').no_dereference().first()
self.assertEqual(activity[0], a1.pk)
self.assertEqual(activity[1]['_ref'], DBRef('test_person', person.pk))
activity = TestActivity.objects(owner=person).only('id', 'owner')[0]
self.assertEqual(activity.pk, a1.pk)
self.assertEqual(activity.owner, person)
activity = TestActivity.objects(owner=person).only('id', 'owner').as_pymongo().first()
self.assertEqual(activity['_id'], a1.pk)
self.assertTrue(activity['owner']['_ref'], DBRef('test_person', person.pk))
def test_scalar_db_field(self):
class TestDoc(Document):
@@ -4447,44 +4392,21 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(doc_objects, Doc.objects.from_json(json_data))
def test_as_pymongo(self):
from decimal import Decimal
class LastLogin(EmbeddedDocument):
location = StringField()
ip = StringField()
from decimal import Decimal
class User(Document):
id = ObjectIdField('_id')
name = StringField()
age = IntField()
price = DecimalField()
last_login = EmbeddedDocumentField(LastLogin)
User.drop_collection()
User.objects.create(name="Bob Dole", age=89, price=Decimal('1.11'))
User.objects.create(
name="Barack Obama",
age=51,
price=Decimal('2.22'),
last_login=LastLogin(
location='White House',
ip='104.107.108.116'
)
)
results = User.objects.as_pymongo()
self.assertEqual(
set(results[0].keys()),
set(['_id', 'name', 'age', 'price'])
)
self.assertEqual(
set(results[1].keys()),
set(['_id', 'name', 'age', 'price', 'last_login'])
)
User(name="Bob Dole", age=89, price=Decimal('1.11')).save()
User(name="Barack Obama", age=51, price=Decimal('2.22')).save()
results = User.objects.only('id', 'name').as_pymongo()
self.assertEqual(set(results[0].keys()), set(['_id', 'name']))
self.assertEqual(sorted(results[0].keys()), sorted(['_id', 'name']))
users = User.objects.only('name', 'price').as_pymongo()
results = list(users)
@@ -4495,20 +4417,16 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(results[1]['name'], 'Barack Obama')
self.assertEqual(results[1]['price'], 2.22)
users = User.objects.only('name', 'last_login').as_pymongo()
# Test coerce_types
users = User.objects.only(
'name', 'price').as_pymongo(coerce_types=True)
results = list(users)
self.assertTrue(isinstance(results[0], dict))
self.assertTrue(isinstance(results[1], dict))
self.assertEqual(results[0], {
'name': 'Bob Dole'
})
self.assertEqual(results[1], {
'name': 'Barack Obama',
'last_login': {
'location': 'White House',
'ip': '104.107.108.116'
}
})
self.assertEqual(results[0]['name'], 'Bob Dole')
self.assertEqual(results[0]['price'], Decimal('1.11'))
self.assertEqual(results[1]['name'], 'Barack Obama')
self.assertEqual(results[1]['price'], Decimal('2.22'))
def test_as_pymongo_json_limit_fields(self):
@@ -4672,6 +4590,7 @@ class QuerySetTest(unittest.TestCase):
def test_no_cache(self):
"""Ensure you can add meta data to file"""
class Noddy(Document):
fields = DictField()
@@ -4689,19 +4608,15 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(len(list(docs)), 100)
# Can't directly get a length of a no-cache queryset.
with self.assertRaises(TypeError):
len(docs)
# Another iteration over the queryset should result in another db op.
with query_counter() as q:
self.assertEqual(q, 0)
list(docs)
self.assertEqual(q, 1)
# ... and another one to double-check.
with query_counter() as q:
list(docs)
self.assertEqual(q, 1)
self.assertEqual(q, 2)
def test_nested_queryset_iterator(self):
# Try iterating the same queryset twice, nested.

View File

@@ -1,6 +1,6 @@
import unittest
from mongoengine.base.datastructures import StrictDict
from mongoengine.base.datastructures import StrictDict, SemiStrictDict
class TestStrictDict(unittest.TestCase):
@@ -76,5 +76,44 @@ class TestStrictDict(unittest.TestCase):
assert dict(**d) == {'a': 1, 'b': 2}
class TestSemiSrictDict(TestStrictDict):
def strict_dict_class(self, *args, **kwargs):
return SemiStrictDict.create(*args, **kwargs)
def test_init_fails_on_nonexisting_attrs(self):
# disable irrelevant test
pass
def test_setattr_raises_on_nonexisting_attr(self):
# disable irrelevant test
pass
def test_setattr_getattr_nonexisting_attr_succeeds(self):
d = self.dtype()
d.x = 1
self.assertEqual(d.x, 1)
def test_init_succeeds_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
self.assertEqual((d.a, d.b, d.c, d.x), (1, 1, 1, 2))
def test_iter_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
self.assertEqual(list(d), ['a', 'b', 'c', 'x'])
def test_iteritems_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
self.assertEqual(list(d.iteritems()), [('a', 1), ('b', 1), ('c', 1), ('x', 2)])
def tets_cmp_with_strict_dicts(self):
d = self.dtype(a=1, b=1, c=1)
dd = StrictDict.create(("a", "b", "c"))(a=1, b=1, c=1)
self.assertEqual(d, dd)
def test_cmp_with_strict_dict_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
dd = StrictDict.create(("a", "b", "c", "x"))(a=1, b=1, c=1, x=2)
self.assertEqual(d, dd)
if __name__ == '__main__':
unittest.main()

View File

@@ -2,10 +2,15 @@
import unittest
from bson import DBRef, ObjectId
from collections import OrderedDict
from mongoengine import *
from mongoengine.connection import get_db
from mongoengine.context_managers import query_counter
from mongoengine.python_support import IS_PYMONGO_3
from mongoengine.base import TopLevelDocumentMetaclass
if IS_PYMONGO_3:
from bson import CodecOptions
class FieldTest(unittest.TestCase):
@@ -1287,5 +1292,70 @@ class FieldTest(unittest.TestCase):
self.assertEqual(q, 2)
def test_dynamic_field_dereference(self):
class Merchandise(Document):
name = StringField()
price = IntField()
class Store(Document):
merchandises = DynamicField()
Merchandise.drop_collection()
Store.drop_collection()
merchandises = {
'#1': Merchandise(name='foo', price=100).save(),
'#2': Merchandise(name='bar', price=120).save(),
'#3': Merchandise(name='baz', price=110).save(),
}
Store(merchandises=merchandises).save()
store = Store.objects().first()
for obj in store.merchandises.values():
self.assertFalse(isinstance(obj, Merchandise))
store.select_related()
for obj in store.merchandises.values():
self.assertTrue(isinstance(obj, Merchandise))
def test_dynamic_field_dereference_with_ordering_guarantee_on_pymongo3(self):
# This is because 'codec_options' is supported on pymongo3 or later
if IS_PYMONGO_3:
class OrderedDocument(Document):
my_metaclass = TopLevelDocumentMetaclass
__metaclass__ = TopLevelDocumentMetaclass
@classmethod
def _get_collection(cls):
collection = super(OrderedDocument, cls)._get_collection()
opts = CodecOptions(document_class=OrderedDict)
return collection.with_options(codec_options=opts)
class Merchandise(Document):
name = StringField()
price = IntField()
class Store(OrderedDocument):
merchandises = DynamicField(container_class=OrderedDict)
Merchandise.drop_collection()
Store.drop_collection()
merchandises = OrderedDict()
merchandises['#1'] = Merchandise(name='foo', price=100).save()
merchandises['#2'] = Merchandise(name='bar', price=120).save()
merchandises['#3'] = Merchandise(name='baz', price=110).save()
Store(merchandises=merchandises).save()
store = Store.objects().first()
store.select_related()
# confirms that the load data order is same with the one at storing
self.assertTrue(type(store.merchandises), OrderedDict)
self.assertEqual(','.join(store.merchandises.keys()), '#1,#2,#3')
if __name__ == '__main__':
unittest.main()