Merge pull request #980 from MRigal/fix/various-fixes

Pep8, code clean-up and 0.10.0 changelog finalisation
This commit is contained in:
Matthieu Rigal 2015-06-24 10:20:42 +02:00
commit 45cb991254
32 changed files with 196 additions and 209 deletions

View File

@ -12,7 +12,7 @@ Laine Herron https://github.com/LaineHerron
CONTRIBUTORS
Dervived from the git logs, inevitably incomplete but all of whom and others
Derived from the git logs, inevitably incomplete but all of whom and others
have submitted patches, reported bugs and generally helped make MongoEngine
that much better:

View File

@ -29,7 +29,10 @@ Style Guide
-----------
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
including 4 space indents and 79 character line limits.
including 4 space indents. When possible we try to stick to 79 character line limits.
However, screens got bigger and an ORM has a strong focus on readability and
if it can help, we accept 119 as maximum line length, in a similar way as
`django does <https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
Testing
-------
@ -38,6 +41,10 @@ All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
and any pull requests are automatically tested by Travis. Any pull requests
without tests will take longer to be integrated and might be refused.
You may also submit a simple failing test as a PullRequest if you don't know
how to fix it, it will be easier for other people to work on it and it may get
fixed faster.
General Guidelines
------------------
@ -48,6 +55,7 @@ General Guidelines
from the cmd line to run the test suite).
- Ensure tests pass on every Python and PyMongo versions.
You can test on these versions locally by executing ``tox``
- Add enhancements or problematic bug fixes to docs/changelog.rst
- Add yourself to AUTHORS :)
Documentation

View File

@ -26,7 +26,9 @@ a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html
Installation
============
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ and thus
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
setup.py install``.
@ -114,7 +116,7 @@ Also use the -s argument if you want to print out whatever or access pdb while t
.. code-block:: shell
$ python setup.py nosetests --tests tests/test_django.py:QuerySetTest.test_get_document_or_404 -s
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest.test_cls_field -s
Community
=========

View File

@ -2,7 +2,7 @@
{% if next or prev %}
<div class="rst-footer-buttons">
{% if next %}
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}"/>Next <span class="icon icon-circle-arrow-right"></span></a>
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}">Next <span class="icon icon-circle-arrow-right"></span></a>
{% endif %}
{% if prev %}
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>

View File

@ -2,9 +2,15 @@
Changelog
=========
Changes in 0.10.1 - DEV
=======================
Changes in 0.9.X - DEV
======================
Changes in 0.10.0
=================
- Django support was removed and will be available as a separate extension. #958
- Allow to load undeclared field with meta attribute 'strict': False #957
- Support for PyMongo 3+ #946
- Removed get_or_create() deprecated since 0.8.0. #300
- Improve Document._created status when switch collection and db #1020
- Queryset update doesn't go through field validation #453
- Added support for specifying authentication source as option `authSource` in URI. #967
@ -14,17 +20,14 @@ Changes in 0.9.X - DEV
- Use sets for populating dbrefs to dereference
- Fixed unpickled documents replacing the global field's list. #888
- Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910
- Django support was removed and will be available as a separate extension. #958
- Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769)
- Fix for updating sorting in SortedListField. #978
- Added __ support to escape field name in fields lookup keywords that match operators names #949
- Support for PyMongo 3+ #946
- Fix for issue where FileField deletion did not free space in GridFS.
- No_dereference() not respected on embedded docs containing reference. #517
- Document save raise an exception if save_condition fails #1005
- Fixes some internal _id handling issue. #961
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
- Removed get_or_create() deprecated since 0.8.0. #300
- Capped collection multiple of 256. #1011
- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods.
- Fix for delete with write_concern {'w': 0}. #1008

View File

@ -15,5 +15,5 @@ The MongoEngine team is looking for help contributing and maintaining a new
Django extension for MongoEngine! If you have Django experience and would like
to help contribute to the project, please get in touch on the
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
simpily contributing on
simply contributing on
`GitHub <https://github.com/MongoEngine/django-mongoengine>`_.

View File

@ -598,7 +598,7 @@ Some variables are made available in the scope of the Javascript function:
The following example demonstrates the intended usage of
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
over a field on a document (this functionality is already available throught
over a field on a document (this functionality is already available through
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
example)::

View File

@ -1,5 +1,6 @@
import weakref
import itertools
from mongoengine.common import _import_class
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
@ -20,7 +21,7 @@ class BaseDict(dict):
if isinstance(instance, (Document, EmbeddedDocument)):
self._instance = weakref.proxy(instance)
self._name = name
return super(BaseDict, self).__init__(dict_items)
super(BaseDict, self).__init__(dict_items)
def __getitem__(self, key, *args, **kwargs):
value = super(BaseDict, self).__getitem__(key)
@ -65,7 +66,7 @@ class BaseDict(dict):
def clear(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).clear(*args, **kwargs)
return super(BaseDict, self).clear()
def pop(self, *args, **kwargs):
self._mark_as_changed()
@ -73,7 +74,7 @@ class BaseDict(dict):
def popitem(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).popitem(*args, **kwargs)
return super(BaseDict, self).popitem()
def setdefault(self, *args, **kwargs):
self._mark_as_changed()
@ -189,7 +190,7 @@ class BaseList(list):
def reverse(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).reverse(*args, **kwargs)
return super(BaseList, self).reverse()
def sort(self, *args, **kwargs):
self._mark_as_changed()
@ -368,25 +369,31 @@ class StrictDict(object):
__slots__ = ()
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
_classes = {}
def __init__(self, **kwargs):
for k,v in kwargs.iteritems():
for k, v in kwargs.iteritems():
setattr(self, k, v)
def __getitem__(self, key):
key = '_reserved_' + key if key in self._special_fields else key
try:
return getattr(self, key)
except AttributeError:
raise KeyError(key)
def __setitem__(self, key, value):
key = '_reserved_' + key if key in self._special_fields else key
return setattr(self, key, value)
def __contains__(self, key):
return hasattr(self, key)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def pop(self, key, default=None):
v = self.get(key, default)
try:
@ -394,19 +401,29 @@ class StrictDict(object):
except AttributeError:
pass
return v
def iteritems(self):
for key in self:
yield key, self[key]
def items(self):
return [(k, self[k]) for k in iter(self)]
def iterkeys(self):
return iter(self)
def keys(self):
return list(iter(self))
def __iter__(self):
return (key for key in self.__slots__ if hasattr(self, key))
def __len__(self):
return len(list(self.iteritems()))
def __eq__(self, other):
return self.items() == other.items()
def __neq__(self, other):
return self.items() != other.items()
@ -417,15 +434,18 @@ class StrictDict(object):
if allowed_keys not in cls._classes:
class SpecificStrictDict(cls):
__slots__ = allowed_keys_tuple
def __repr__(self):
return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k,v) for (k,v) in self.iteritems())
return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k) for k in self.iterkeys())
cls._classes[allowed_keys] = SpecificStrictDict
return cls._classes[allowed_keys]
class SemiStrictDict(StrictDict):
__slots__ = ('_extras')
__slots__ = ('_extras', )
_classes = {}
def __getattr__(self, attr):
try:
super(SemiStrictDict, self).__getattr__(attr)
@ -434,6 +454,7 @@ class SemiStrictDict(StrictDict):
return self.__getattribute__('_extras')[attr]
except KeyError as e:
raise AttributeError(e)
def __setattr__(self, attr, value):
try:
super(SemiStrictDict, self).__setattr__(attr, value)

View File

@ -14,7 +14,6 @@ from mongoengine.common import _import_class
from mongoengine.errors import (ValidationError, InvalidDocumentError,
LookUpError, FieldDoesNotExist)
from mongoengine.python_support import PY3, txt_type
from mongoengine.base.common import get_document, ALLOW_INHERITANCE
from mongoengine.base.datastructures import (
BaseDict,
@ -150,7 +149,6 @@ class BaseDocument(object):
# Handle dynamic data only if an initialised dynamic document
if self._dynamic and not self._dynamic_lock:
field = None
if not hasattr(self, name) and not name.startswith('_'):
DynamicField = _import_class("DynamicField")
field = DynamicField(db_field=name)
@ -183,8 +181,8 @@ class BaseDocument(object):
except AttributeError:
self__initialised = False
# Check if the user has created a new instance of a class
if (self._is_document and self__initialised
and self__created and name == self._meta.get('id_field')):
if (self._is_document and self__initialised and
self__created and name == self._meta.get('id_field')):
super(BaseDocument, self).__setattr__('_created', False)
super(BaseDocument, self).__setattr__(name, value)
@ -328,7 +326,7 @@ class BaseDocument(object):
if value is not None:
if isinstance(field, (EmbeddedDocumentField)):
if isinstance(field, EmbeddedDocumentField):
if fields:
key = '%s.' % field_name
embedded_fields = [
@ -417,7 +415,8 @@ class BaseDocument(object):
def to_json(self, *args, **kwargs):
"""Converts a document to JSON.
:param use_db_field: Set to True by default but enables the output of the json structure with the field names and not the mongodb store db_names in case of set to False
:param use_db_field: Set to True by default but enables the output of the json structure with the field names
and not the mongodb store db_names in case of set to False
"""
use_db_field = kwargs.pop('use_db_field', True)
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
@ -578,7 +577,7 @@ class BaseDocument(object):
if (hasattr(field, 'field') and
isinstance(field.field, ReferenceField)):
continue
elif (isinstance(field, SortedListField) and field._ordering):
elif isinstance(field, SortedListField) and field._ordering:
# if ordering is affected whole list is changed
if any(map(lambda d: field._ordering in d._changed_fields, data)):
changed_fields.append(db_field_name)
@ -621,18 +620,18 @@ class BaseDocument(object):
else:
set_data = doc
if '_id' in set_data:
del(set_data['_id'])
del set_data['_id']
# Determine if any changed items were actually unset.
for path, value in set_data.items():
if value or isinstance(value, (numbers.Number, bool)):
continue
# If we've set a value that ain't the default value dont unset it.
# If we've set a value that ain't the default value don't unset it.
default = None
if (self._dynamic and len(parts) and parts[0] in
self._dynamic_fields):
del(set_data[path])
del set_data[path]
unset_data[path] = 1
continue
elif path in self._fields:
@ -666,7 +665,7 @@ class BaseDocument(object):
if default != value:
continue
del(set_data[path])
del set_data[path]
unset_data[path] = 1
return set_data, unset_data
@ -821,7 +820,6 @@ class BaseDocument(object):
parts = key.split('.')
if parts in (['pk'], ['id'], ['_id']):
key = '_id'
fields = []
else:
fields = cls._lookup_field(parts)
parts = []

View File

@ -7,7 +7,6 @@ import pymongo
from mongoengine.common import _import_class
from mongoengine.errors import ValidationError
from mongoengine.base.common import ALLOW_INHERITANCE
from mongoengine.base.datastructures import (
BaseDict, BaseList, EmbeddedDocumentList
@ -23,7 +22,6 @@ UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
class BaseField(object):
"""A base class for fields in a MongoDB document. Instances of this class
may be added to subclasses of `Document` to define a document's schema.
@ -114,7 +112,7 @@ class BaseField(object):
"""Descriptor for assigning a value to a field in a document.
"""
# If setting to None and theres a default
# If setting to None and there is a default
# Then set the value to the default value
if value is None:
if self.null:
@ -212,7 +210,6 @@ class BaseField(object):
class ComplexBaseField(BaseField):
"""Handles complex fields, such as lists / dictionaries.
Allows for nesting of embedded documents inside complex types.
@ -262,8 +259,8 @@ class ComplexBaseField(BaseField):
instance._data[self.name] = value
if (self._auto_dereference and instance._initialised and
isinstance(value, (BaseList, BaseDict))
and not value._dereferenced):
isinstance(value, (BaseList, BaseDict)) and
not value._dereferenced):
value = _dereference(
value, max_depth=1, instance=instance, name=self.name
)
@ -330,8 +327,8 @@ class ComplexBaseField(BaseField):
return GenericReferenceField().to_mongo(value)
cls = value.__class__
val = value.to_mongo()
# If we its a document thats not inherited add _cls
if (isinstance(value, EmbeddedDocument)):
# If it's a document that is not inherited add _cls
if isinstance(value, EmbeddedDocument):
val['_cls'] = cls.__name__
return val
@ -370,8 +367,8 @@ class ComplexBaseField(BaseField):
elif hasattr(v, 'to_mongo'):
cls = v.__class__
val = v.to_mongo()
# If we its a document thats not inherited add _cls
if (isinstance(v, (Document, EmbeddedDocument))):
# If it's a document that is not inherited add _cls
if isinstance(v, (Document, EmbeddedDocument)):
val['_cls'] = cls.__name__
value_dict[k] = val
else:
@ -422,7 +419,6 @@ class ComplexBaseField(BaseField):
class ObjectIdField(BaseField):
"""A field wrapper around MongoDB's ObjectIds.
"""
@ -454,7 +450,6 @@ class ObjectIdField(BaseField):
class GeoJsonBaseField(BaseField):
"""A geo json field storing a geojson style object.
.. versionadded:: 0.8

View File

@ -14,7 +14,6 @@ __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
class DocumentMetaclass(type):
"""Metaclass for all documents.
"""
@ -111,7 +110,7 @@ class DocumentMetaclass(type):
for base in flattened_bases:
if (not getattr(base, '_is_base_cls', True) and
not getattr(base, '_meta', {}).get('abstract', True)):
# Collate heirarchy for _cls and _subclasses
# Collate hierarchy for _cls and _subclasses
class_name.append(base.__name__)
if hasattr(base, '_meta'):
@ -184,7 +183,7 @@ class DocumentMetaclass(type):
"CachedReferenceFields is not allowed in EmbeddedDocuments")
if not f.document_type:
raise InvalidDocumentError(
"Document is not avaiable to sync")
"Document is not available to sync")
if f.auto_sync:
f.start_listener()
@ -246,11 +245,10 @@ class DocumentMetaclass(type):
EmbeddedDocument = _import_class('EmbeddedDocument')
DictField = _import_class('DictField')
CachedReferenceField = _import_class('CachedReferenceField')
return (Document, EmbeddedDocument, DictField, CachedReferenceField)
return Document, EmbeddedDocument, DictField, CachedReferenceField
class TopLevelDocumentMetaclass(DocumentMetaclass):
"""Metaclass for top-level documents (i.e. documents that have their own
collection in the database.
"""
@ -260,7 +258,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
super_new = super(TopLevelDocumentMetaclass, cls).__new__
# Set default _meta data if base class, otherwise get user defined meta
if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass):
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
# defaults
attrs['_meta'] = {
'abstract': True,
@ -279,7 +277,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
attrs['_meta'].update(attrs.get('meta', {}))
else:
attrs['_meta'] = attrs.get('meta', {})
# Explictly set abstract to false unless set
# Explicitly set abstract to false unless set
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
attrs['_is_base_cls'] = False
@ -294,7 +292,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# Clean up top level meta
if 'meta' in attrs:
del(attrs['meta'])
del attrs['meta']
# Find the parent document class
parent_doc_cls = [b for b in flattened_bases
@ -303,11 +301,11 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# Prevent classes setting collection different to their parents
# If parent wasn't an abstract class
if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
and not parent_doc_cls._meta.get('abstract', True)):
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
not parent_doc_cls._meta.get('abstract', True)):
msg = "Trying to set a collection on a subclass (%s)" % name
warnings.warn(msg, SyntaxWarning)
del(attrs['_meta']['collection'])
del attrs['_meta']['collection']
# Ensure abstract documents have abstract bases
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
@ -410,14 +408,15 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
return new_class
def get_auto_id_names(self):
@classmethod
def get_auto_id_names(cls, new_class):
id_name, id_db_name = ('id', '_id')
if id_name not in self._fields and \
id_db_name not in (v.db_field for v in self._fields.values()):
if id_name not in new_class._fields and \
id_db_name not in (v.db_field for v in new_class._fields.values()):
return id_name, id_db_name
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
while id_name in self._fields or \
id_db_name in (v.db_field for v in self._fields.values()):
while id_name in new_class._fields or \
id_db_name in (v.db_field for v in new_class._fields.values()):
id_name = '{0}_{1}'.format(id_basename, i)
id_db_name = '{0}_{1}'.format(id_db_basename, i)
i += 1
@ -425,7 +424,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
class MetaDict(dict):
"""Custom dictionary for meta classes.
Handles the merging of set indexes
"""
@ -440,6 +438,5 @@ class MetaDict(dict):
class BasesTuple(tuple):
"""Special class to handle introspection of bases tuple in __new__"""
pass

View File

@ -120,7 +120,8 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
try:
connection = None
# check for shared connections
connection_settings_iterator = ((db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems())
connection_settings_iterator = (
(db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems())
for db_alias, connection_settings in connection_settings_iterator:
connection_settings.pop('name', None)
connection_settings.pop('username', None)

View File

@ -11,7 +11,6 @@ from document import Document, EmbeddedDocument
class DeReference(object):
def __call__(self, items, max_depth=1, instance=None, name=None):
"""
Cheaply dereferences the items to a set depth.
@ -49,8 +48,8 @@ class DeReference(object):
if is_list and all([i.__class__ == doc_type for i in items]):
return items
elif not is_list and all([i.__class__ == doc_type
for i in items.values()]):
elif not is_list and all(
[i.__class__ == doc_type for i in items.values()]):
return items
elif not field.dbref:
if not hasattr(items, 'items'):
@ -101,7 +100,7 @@ class DeReference(object):
if isinstance(item, (Document, EmbeddedDocument)):
for field_name, field in item._fields.iteritems():
v = item._data.get(field_name, None)
if isinstance(v, (DBRef)):
if isinstance(v, DBRef):
reference_map.setdefault(field.document_type, set()).add(v.id)
elif isinstance(v, (dict, SON)) and '_ref' in v:
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
@ -112,7 +111,7 @@ class DeReference(object):
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
key = field_cls
reference_map.setdefault(key, set()).update(refs)
elif isinstance(item, (DBRef)):
elif isinstance(item, DBRef):
reference_map.setdefault(item.collection, set()).add(item.id)
elif isinstance(item, (dict, SON)) and '_ref' in item:
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
@ -220,12 +219,12 @@ class DeReference(object):
elif isinstance(v, (Document, EmbeddedDocument)):
for field_name, field in v._fields.iteritems():
v = data[k]._data.get(field_name, None)
if isinstance(v, (DBRef)):
if isinstance(v, DBRef):
data[k]._data[field_name] = self.object_map.get(
(v.collection, v.id), v)
elif isinstance(v, (dict, SON)) and '_ref' in v:
data[k]._data[field_name] = self.object_map.get(
(v['_ref'].collection , v['_ref'].id), v)
(v['_ref'].collection, v['_ref'].id), v)
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
item_name = "{0}.{1}.{2}".format(name, k, field_name)
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)

View File

@ -46,7 +46,6 @@ class InvalidCollectionError(Exception):
class EmbeddedDocument(BaseDocument):
"""A :class:`~mongoengine.Document` that isn't stored in its own
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
fields on :class:`~mongoengine.Document`\ s through the
@ -61,7 +60,7 @@ class EmbeddedDocument(BaseDocument):
dictionary.
"""
__slots__ = ('_instance')
__slots__ = ('_instance', )
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
@ -89,7 +88,6 @@ class EmbeddedDocument(BaseDocument):
class Document(BaseDocument):
"""The base class used for defining the structure and properties of
collections of documents stored in MongoDB. Inherit from this class, and
add fields as class attributes to define a document's structure.
@ -160,7 +158,9 @@ class Document(BaseDocument):
def fset(self, value):
return setattr(self, self._meta['id_field'], value)
return property(fget, fset)
pk = pk()
@classmethod
@ -455,7 +455,7 @@ class Document(BaseDocument):
if kwargs.get('upsert', False):
query = self.to_mongo()
if "_cls" in query:
del(query["_cls"])
del query["_cls"]
return self._qs.filter(**query).update_one(**kwargs)
else:
raise OperationError(
@ -580,8 +580,8 @@ class Document(BaseDocument):
if not self.pk:
raise self.DoesNotExist("Document does not exist")
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
**self._object_key).only(*fields).limit(1
).select_related(max_depth=max_depth)
**self._object_key).only(*fields).limit(
1).select_related(max_depth=max_depth)
if obj:
obj = obj[0]
@ -640,11 +640,11 @@ class Document(BaseDocument):
for class_name in document_cls._subclasses
if class_name != document_cls.__name__] + [document_cls]
for cls in classes:
for klass in classes:
for document_cls in documents:
delete_rules = cls._meta.get('delete_rules') or {}
delete_rules = klass._meta.get('delete_rules') or {}
delete_rules[(document_cls, field_name)] = rule
cls._meta['delete_rules'] = delete_rules
klass._meta['delete_rules'] = delete_rules
@classmethod
def drop_collection(cls):
@ -769,7 +769,7 @@ class Document(BaseDocument):
**index_opts)
@classmethod
def list_indexes(cls, go_up=True, go_down=True):
def list_indexes(cls):
""" Lists all of the indexes that should be created for given
collection. It includes all the indexes from super- and sub-classes.
"""
@ -816,8 +816,8 @@ class Document(BaseDocument):
return indexes
indexes = []
for cls in classes:
for index in get_indexes_spec(cls):
for klass in classes:
for index in get_indexes_spec(klass):
if index not in indexes:
indexes.append(index)
@ -856,7 +856,6 @@ class Document(BaseDocument):
class DynamicDocument(Document):
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
way as an ordinary document but has expando style properties. Any data
@ -888,7 +887,6 @@ class DynamicDocument(Document):
class DynamicEmbeddedDocument(EmbeddedDocument):
"""A Dynamic Embedded Document class allowing flexible, expandable and
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
information about dynamic documents.
@ -915,7 +913,6 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
class MapReduceDocument(object):
"""A document returned from a map/reduce query.
:param collection: An instance of :class:`~pymongo.Collection`

View File

@ -115,6 +115,7 @@ class ValidationError(AssertionError):
else:
return unicode(source)
return errors_dict
if not self.errors:
return {}
return build_dict(self.errors)
@ -125,7 +126,7 @@ class ValidationError(AssertionError):
def generate_key(value, prefix=''):
if isinstance(value, list):
value = ' '.join([generate_key(k) for k in value])
if isinstance(value, dict):
elif isinstance(value, dict):
value = ' '.join(
[generate_key(v, k) for k, v in value.iteritems()])

View File

@ -47,12 +47,10 @@ __all__ = [
'SequenceField', 'UUIDField', 'MultiPointField', 'MultiLineStringField',
'MultiPolygonField', 'GeoJsonBaseField']
RECURSIVE_REFERENCE_CONSTANT = 'self'
class StringField(BaseField):
"""A unicode string field.
"""
@ -112,7 +110,6 @@ class StringField(BaseField):
class URLField(StringField):
"""A field that validates input as an URL.
.. versionadded:: 0.3
@ -159,7 +156,6 @@ class URLField(StringField):
class EmailField(StringField):
"""A field that validates input as an E-Mail-Address.
.. versionadded:: 0.4
@ -181,7 +177,6 @@ class EmailField(StringField):
class IntField(BaseField):
"""An 32-bit integer field.
"""
@ -216,7 +211,6 @@ class IntField(BaseField):
class LongField(BaseField):
"""An 64-bit integer field.
"""
@ -251,7 +245,6 @@ class LongField(BaseField):
class FloatField(BaseField):
"""An floating point number field.
"""
@ -286,7 +279,6 @@ class FloatField(BaseField):
class DecimalField(BaseField):
"""A fixed-point decimal number field.
.. versionchanged:: 0.8
@ -360,7 +352,6 @@ class DecimalField(BaseField):
class BooleanField(BaseField):
"""A boolean field type.
.. versionadded:: 0.1.2
@ -379,7 +370,6 @@ class BooleanField(BaseField):
class DateTimeField(BaseField):
"""A datetime field.
Uses the python-dateutil library if available alternatively use time.strptime
@ -447,7 +437,6 @@ class DateTimeField(BaseField):
class ComplexDateTimeField(StringField):
"""
ComplexDateTimeField handles microseconds exactly instead of rounding
like DateTimeField does.
@ -531,7 +520,6 @@ class ComplexDateTimeField(StringField):
class EmbeddedDocumentField(BaseField):
"""An embedded document field - with a declared document_type.
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
"""
@ -585,7 +573,6 @@ class EmbeddedDocumentField(BaseField):
class GenericEmbeddedDocumentField(BaseField):
"""A generic embedded document field - allows any
:class:`~mongoengine.EmbeddedDocument` to be stored.
@ -624,7 +611,6 @@ class GenericEmbeddedDocumentField(BaseField):
class DynamicField(BaseField):
"""A truly dynamic field type capable of handling different and varying
types of data.
@ -641,9 +627,9 @@ class DynamicField(BaseField):
cls = value.__class__
val = value.to_mongo()
# If we its a document thats not inherited add _cls
if (isinstance(value, Document)):
if isinstance(value, Document):
val = {"_ref": value.to_dbref(), "_cls": cls.__name__}
if (isinstance(value, EmbeddedDocument)):
if isinstance(value, EmbeddedDocument):
val['_cls'] = cls.__name__
return val
@ -678,18 +664,15 @@ class DynamicField(BaseField):
def prepare_query_value(self, op, value):
if isinstance(value, basestring):
from mongoengine.fields import StringField
return StringField().prepare_query_value(op, value)
return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value))
def validate(self, value, clean=True):
if hasattr(value, "validate"):
value.validate(clean=clean)
class ListField(ComplexBaseField):
"""A list field that wraps a standard field, allowing multiple instances
of the field to be used as a list in the database.
@ -714,9 +697,10 @@ class ListField(ComplexBaseField):
def prepare_query_value(self, op, value):
if self.field:
if op in ('set', 'unset') and (not isinstance(value, basestring)
and not isinstance(value, BaseDocument)
and hasattr(value, '__iter__')):
if op in ('set', 'unset') and (
not isinstance(value, basestring) and
not isinstance(value, BaseDocument) and
hasattr(value, '__iter__')):
return [self.field.prepare_query_value(op, v) for v in value]
return self.field.prepare_query_value(op, value)
return super(ListField, self).prepare_query_value(op, value)
@ -734,12 +718,10 @@ class EmbeddedDocumentListField(ListField):
"""
def __init__(self, document_type, *args, **kwargs):
def __init__(self, document_type, **kwargs):
"""
:param document_type: The type of
:class:`~mongoengine.EmbeddedDocument` the list will hold.
:param args: Arguments passed directly into the parent
:class:`~mongoengine.ListField`.
:param kwargs: Keyword arguments passed directly into the parent
:class:`~mongoengine.ListField`.
"""
@ -749,7 +731,6 @@ class EmbeddedDocumentListField(ListField):
class SortedListField(ListField):
"""A ListField that sorts the contents of its list before writing to
the database in order to ensure that a sorted list is always
retrieved.
@ -801,7 +782,6 @@ def key_has_dot_or_dollar(d):
class DictField(ComplexBaseField):
"""A dictionary field that wraps a standard Python dictionary. This is
similar to an embedded document, but the structure is not defined.
@ -857,7 +837,6 @@ class DictField(ComplexBaseField):
class MapField(DictField):
"""A field that maps a name to a specified field type. Similar to
a DictField, except the 'value' of each item must match the specified
field type.
@ -873,7 +852,6 @@ class MapField(DictField):
class ReferenceField(BaseField):
"""A reference to a document that will be automatically dereferenced on
access (lazily).
@ -995,7 +973,6 @@ class ReferenceField(BaseField):
super(ReferenceField, self).prepare_query_value(op, value)
return self.to_mongo(value)
def validate(self, value):
if not isinstance(value, (self.document_type, DBRef)):
@ -1010,7 +987,6 @@ class ReferenceField(BaseField):
class CachedReferenceField(BaseField):
"""
A referencefield with cache fields to purpose pseudo-joins
@ -1025,7 +1001,6 @@ class CachedReferenceField(BaseField):
"""
if not isinstance(document_type, basestring) and \
not issubclass(document_type, (Document, basestring)):
self.error('Argument to CachedReferenceField constructor must be a'
' document class or a string')
@ -1036,6 +1011,7 @@ class CachedReferenceField(BaseField):
def start_listener(self):
from mongoengine import signals
signals.post_save.connect(self.on_document_pre_save,
sender=self.document_type)
@ -1089,7 +1065,6 @@ class CachedReferenceField(BaseField):
def to_mongo(self, document):
id_field_name = self.document_type._meta['id_field']
id_field = self.document_type._fields[id_field_name]
doc_tipe = self.document_type
if isinstance(document, Document):
# We need the id from the saved object to create the DBRef
@ -1099,6 +1074,7 @@ class CachedReferenceField(BaseField):
' been saved to the database')
else:
self.error('Only accept a document object')
# TODO: should raise here or will fail next statement
value = SON((
("_id", id_field.to_mongo(id_)),
@ -1121,7 +1097,7 @@ class CachedReferenceField(BaseField):
def validate(self, value):
if not isinstance(value, (self.document_type)):
if not isinstance(value, self.document_type):
self.error("A CachedReferenceField only accepts documents")
if isinstance(value, Document) and value.id is None:
@ -1150,7 +1126,6 @@ class CachedReferenceField(BaseField):
class GenericReferenceField(BaseField):
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
that will be automatically dereferenced on access (lazily).
@ -1232,7 +1207,6 @@ class GenericReferenceField(BaseField):
class BinaryField(BaseField):
"""A binary data field.
"""
@ -1264,7 +1238,6 @@ class GridFSError(Exception):
class GridFSProxy(object):
"""Proxy object to handle writing and reading of files to and from GridFS
.. versionadded:: 0.4
@ -1410,7 +1383,6 @@ class GridFSProxy(object):
class FileField(BaseField):
"""A GridFS storage field.
.. versionadded:: 0.4
@ -1494,7 +1466,6 @@ class FileField(BaseField):
class ImageGridFsProxy(GridFSProxy):
"""
Proxy for ImageField
@ -1518,6 +1489,7 @@ class ImageGridFsProxy(GridFSProxy):
raise ValidationError('Invalid image: %s' % e)
# Progressive JPEG
# TODO: fixme, at least unused, at worst bad implementation
progressive = img.info.get('progressive') or False
if (kwargs.get('progressive') and
@ -1578,7 +1550,7 @@ class ImageGridFsProxy(GridFSProxy):
if out and out.thumbnail_id:
self.fs.delete(out.thumbnail_id)
return super(ImageGridFsProxy, self).delete(*args, **kwargs)
return super(ImageGridFsProxy, self).delete()
def _put_thumbnail(self, thumbnail, format, progressive, **kwargs):
w, h = thumbnail.size
@ -1633,7 +1605,6 @@ class ImproperlyConfigured(Exception):
class ImageField(FileField):
"""
A Image File storage field.
@ -1672,7 +1643,6 @@ class ImageField(FileField):
class SequenceField(BaseField):
"""Provides a sequential counter see:
http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
@ -1796,7 +1766,6 @@ class SequenceField(BaseField):
class UUIDField(BaseField):
"""A UUID field.
.. versionadded:: 0.6
@ -1843,13 +1812,12 @@ class UUIDField(BaseField):
if not isinstance(value, basestring):
value = str(value)
try:
value = uuid.UUID(value)
uuid.UUID(value)
except Exception, exc:
self.error('Could not convert to UUID: %s' % exc)
class GeoPointField(BaseField):
"""A list storing a longitude and latitude coordinate.
.. note:: this represents a generic point in a 2D plane and a legacy way of
@ -1879,7 +1847,6 @@ class GeoPointField(BaseField):
class PointField(GeoJsonBaseField):
"""A GeoJSON field storing a longitude and latitude coordinate.
The data is represented as:
@ -1900,7 +1867,6 @@ class PointField(GeoJsonBaseField):
class LineStringField(GeoJsonBaseField):
"""A GeoJSON field storing a line of longitude and latitude coordinates.
The data is represented as:
@ -1920,7 +1886,6 @@ class LineStringField(GeoJsonBaseField):
class PolygonField(GeoJsonBaseField):
"""A GeoJSON field storing a polygon of longitude and latitude coordinates.
The data is represented as:
@ -1943,7 +1908,6 @@ class PolygonField(GeoJsonBaseField):
class MultiPointField(GeoJsonBaseField):
"""A GeoJSON field storing a list of Points.
The data is represented as:
@ -1964,7 +1928,6 @@ class MultiPointField(GeoJsonBaseField):
class MultiLineStringField(GeoJsonBaseField):
"""A GeoJSON field storing a list of LineStrings.
The data is represented as:
@ -1985,7 +1948,6 @@ class MultiLineStringField(GeoJsonBaseField):
class MultiPolygonField(GeoJsonBaseField):
"""A GeoJSON field storing list of Polygons.
The data is represented as:

View File

@ -14,6 +14,7 @@ PY3 = sys.version_info[0] == 3
if PY3:
import codecs
from io import BytesIO as StringIO
# return s converted to binary. b('test') should be equivalent to b'test'
def b(s):
return codecs.latin_1_encode(s)[0]

View File

@ -43,7 +43,6 @@ RE_TYPE = type(re.compile(''))
class BaseQuerySet(object):
"""A set of results returned from a query. Wraps a MongoDB cursor,
providing :class:`~mongoengine.Document` objects as the results.
"""
@ -87,8 +86,8 @@ class BaseQuerySet(object):
self.only_fields = []
self._max_time_ms = None
def __call__(self, q_obj=None, class_check=True, slave_okay=False,
read_preference=None, **query):
def __call__(self, q_obj=None, class_check=True, read_preference=None,
**query):
"""Filter the selected documents by calling the
:class:`~mongoengine.queryset.QuerySet` with a query.
@ -98,9 +97,7 @@ class BaseQuerySet(object):
objects, only the last one will be used
:param class_check: If set to False bypass class name check when
querying collection
:param slave_okay: if True, allows this query to be run against a
replica secondary.
:params read_preference: if set, overrides connection-level
:param read_preference: if set, overrides connection-level
read_preference from `ReplicaSetConnection`.
:param query: Django-style query keyword arguments
"""
@ -205,7 +202,8 @@ class BaseQuerySet(object):
:param language: The language that determines the list of stop words
for the search and the rules for the stemmer and tokenizer.
If not specified, the search uses the default language of the index.
For supported languages, see `Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
For supported languages, see
`Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
"""
queryset = self.clone()
if queryset._search_text:
@ -271,7 +269,7 @@ class BaseQuerySet(object):
def insert(self, doc_or_docs, load_bulk=True, write_concern=None):
"""bulk insert documents
:param docs_or_doc: a document or list of documents to be inserted
:param doc_or_docs: a document or list of documents to be inserted
:param load_bulk (optional): If True returns the list of document
instances
:param write_concern: Extra keyword arguments are passed down to
@ -406,8 +404,8 @@ class BaseQuerySet(object):
if rule == CASCADE:
ref_q = document_cls.objects(**{field_name + '__in': self})
ref_q_count = ref_q.count()
if (doc != document_cls and ref_q_count > 0
or (doc == document_cls and ref_q_count > 0)):
if (doc != document_cls and ref_q_count > 0 or
(doc == document_cls and ref_q_count > 0)):
ref_q.delete(write_concern=write_concern)
elif rule == NULLIFY:
document_cls.objects(**{field_name + '__in': self}).update(
@ -528,7 +526,7 @@ class BaseQuerySet(object):
try:
if IS_PYMONGO_3:
if full_response:
msg = ("With PyMongo 3+, it is not possible anymore to get the full response.")
msg = "With PyMongo 3+, it is not possible anymore to get the full response."
warnings.warn(msg, DeprecationWarning)
if remove:
result = queryset._collection.find_one_and_delete(
@ -597,7 +595,8 @@ class BaseQuerySet(object):
doc_map[doc['_id']] = self._get_as_pymongo(doc)
else:
for doc in docs:
doc_map[doc['_id']] = self._document._from_son(doc,
doc_map[doc['_id']] = self._document._from_son(
doc,
only_fields=self.only_fields,
_auto_dereference=self._auto_dereference)
@ -619,7 +618,8 @@ class BaseQuerySet(object):
return self
def using(self, alias):
"""This method is for controlling which database the QuerySet will be evaluated against if you are using more than one database.
"""This method is for controlling which database the QuerySet will be
evaluated against if you are using more than one database.
:param alias: The database alias
@ -830,7 +830,6 @@ class BaseQuerySet(object):
cleaned_fields = []
for key, value in kwargs.items():
parts = key.split('__')
op = None
if parts[0] in operators:
op = parts.pop(0)
value = {'$' + op: value}
@ -967,7 +966,7 @@ class BaseQuerySet(object):
"""Instead of returning Document instances, return raw values from
pymongo.
:param coerce_type: Field types (if applicable) would be use to
:param coerce_types: Field types (if applicable) would be use to
coerce types.
"""
queryset = self.clone()
@ -1259,8 +1258,8 @@ class BaseQuerySet(object):
the aggregation framework instead of map-reduce.
"""
result = self._document._get_collection().aggregate([
{ '$match': self._query },
{ '$group': { '_id': 'sum', 'total': { '$sum': '$' + field } } }
{'$match': self._query},
{'$group': {'_id': 'sum', 'total': {'$sum': '$' + field}}}
])
if IS_PYMONGO_3:
result = list(result)
@ -1335,8 +1334,8 @@ class BaseQuerySet(object):
uses the aggregation framework instead of map-reduce.
"""
result = self._document._get_collection().aggregate([
{ '$match': self._query },
{ '$group': { '_id': 'avg', 'total': { '$avg': '$' + field } } }
{'$match': self._query},
{'$group': {'_id': 'avg', 'total': {'$avg': '$' + field}}}
])
if IS_PYMONGO_3:
result = list(result)
@ -1616,7 +1615,7 @@ class BaseQuerySet(object):
return frequencies
def _fields_to_dbfields(self, fields, subdoc=False):
def _fields_to_dbfields(self, fields):
"""Translate fields paths to its db equivalents"""
ret = []
subclasses = []
@ -1638,7 +1637,7 @@ class BaseQuerySet(object):
ret.append(subfield)
found = True
break
except LookUpError, e:
except LookUpError:
pass
if not found:

View File

@ -1,4 +1,3 @@
__all__ = ('QueryFieldList',)

View File

@ -61,7 +61,6 @@ class QuerySet(BaseQuerySet):
data[-1] = "...(remaining elements truncated)..."
return repr(data)
def _iter_results(self):
"""A generator for iterating over the result cache.
@ -74,7 +73,7 @@ class QuerySet(BaseQuerySet):
upper = len(self._result_cache)
while pos < upper:
yield self._result_cache[pos]
pos = pos + 1
pos += 1
if not self._has_more:
raise StopIteration
if len(self._result_cache) <= pos:

View File

@ -11,7 +11,6 @@ from mongoengine.python_support import IS_PYMONGO_3
__all__ = ('query', 'update')
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
'all', 'size', 'exists', 'not', 'elemMatch', 'type')
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
@ -27,7 +26,7 @@ MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
STRING_OPERATORS + CUSTOM_OPERATORS)
def query(_doc_cls=None, _field_operation=False, **query):
def query(_doc_cls=None, **query):
"""Transform a query from Django-style format to Mongo format.
"""
mongo_query = {}
@ -45,8 +44,8 @@ def query(_doc_cls=None, _field_operation=False, **query):
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
op = parts.pop()
#if user escape field name by __
if len(parts) > 1 and parts[-1]=="":
# Allw to escape operator-like field name by __
if len(parts) > 1 and parts[-1] == "":
parts.pop()
negate = False
@ -359,6 +358,7 @@ def _infer_geometry(value):
raise InvalidQueryError("Invalid $geometry dictionary should have "
"type and coordinates keys")
elif isinstance(value, (list, set)):
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
try:
value[0][0][0]
return {"$geometry": {"type": "Polygon", "coordinates": value}}

View File

@ -6,6 +6,7 @@ __all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
signals_available = False
try:
from blinker import Namespace
signals_available = True
except ImportError:
class Namespace(object):
@ -27,6 +28,7 @@ except ImportError:
raise RuntimeError('signalling support is unavailable '
'because the blinker library is '
'not installed.')
send = lambda *a, **kw: None
connect = disconnect = has_receivers_for = receivers_for = \
temporarily_connected_to = _fail

View File

@ -52,12 +52,13 @@ CLASSIFIERS = [
extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])}
if sys.version_info[0] == 3:
extra_opts['use_2to3'] = True
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'Pillow>=2.0.0']
extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0']
if "test" in sys.argv or "nosetests" in sys.argv:
extra_opts['packages'] = find_packages()
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
else:
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'Pillow>=2.0.0', 'python-dateutil']
# coverage 4 does not support Python 3.2 anymore
extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0', 'python-dateutil']
if sys.version_info[0] == 2 and sys.version_info[1] == 6:
extra_opts['tests_require'].append('unittest2')

View File

@ -253,13 +253,13 @@ class DeltaTest(unittest.TestCase):
self.assertEqual(doc.embedded_field.list_field[2].list_field,
[1, 2, {'hello': 'world'}])
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
del doc.embedded_field.list_field[2].list_field[2]['hello']
self.assertEqual(doc._delta(),
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
doc.save()
doc = doc.reload(10)
del(doc.embedded_field.list_field[2].list_field)
del doc.embedded_field.list_field[2].list_field
self.assertEqual(doc._delta(),
({}, {'embedded_field.list_field.2.list_field': 1}))
@ -593,13 +593,13 @@ class DeltaTest(unittest.TestCase):
self.assertEqual(doc.embedded_field.list_field[2].list_field,
[1, 2, {'hello': 'world'}])
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
del doc.embedded_field.list_field[2].list_field[2]['hello']
self.assertEqual(doc._delta(),
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
doc.save()
doc = doc.reload(10)
del(doc.embedded_field.list_field[2].list_field)
del doc.embedded_field.list_field[2].list_field
self.assertEqual(doc._delta(), ({},
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
@ -615,7 +615,7 @@ class DeltaTest(unittest.TestCase):
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
p.doc = 123
del(p.doc)
del p.doc
self.assertEqual(p._delta(), (
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))

View File

@ -72,7 +72,7 @@ class DynamicTest(unittest.TestCase):
obj = collection.find_one()
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
del(p.misc)
del p.misc
p.save()
p = self.Person.objects.get()
@ -356,7 +356,6 @@ class DynamicTest(unittest.TestCase):
self.assertEqual(Person.objects.first().address.city, "Londinium")
person = Person.objects.first()
person["age"] = 35
person.save()

View File

@ -1897,11 +1897,11 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(BlogPost.objects.count(), 0)
def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self):
''' ensure the pre_delete signal is triggered upon a cascading deletion
""" ensure the pre_delete signal is triggered upon a cascading deletion
setup a blog post with content, an author and editor
delete the author which triggers deletion of blogpost via cascade
blog post's pre_delete signal alters an editor attribute
'''
"""
class Editor(self.Person):
review_queue = IntField(default=0)

View File

@ -946,7 +946,7 @@ class FieldTest(unittest.TestCase):
BlogPost.drop_collection()
def test_reverse_list_sorting(self):
'''Ensure that a reverse sorted list field properly sorts values'''
"""Ensure that a reverse sorted list field properly sorts values"""
class Category(EmbeddedDocument):
count = IntField()
@ -1334,7 +1334,6 @@ class FieldTest(unittest.TestCase):
def test_atomic_update_dict_field(self):
"""Ensure that the entire DictField can be atomically updated."""
class Simple(Document):
mapping = DictField(field=ListField(IntField(required=True)))
@ -1349,7 +1348,7 @@ class FieldTest(unittest.TestCase):
self.assertEqual({"ints": [3, 4]}, e.mapping)
def create_invalid_mapping():
e.update(set__mapping={"somestrings": ["foo", "bar",]})
e.update(set__mapping={"somestrings": ["foo", "bar", ]})
self.assertRaises(ValueError, create_invalid_mapping)
@ -3774,7 +3773,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
class A(Document):
my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique))
a1 = A(my_list=[]).save()
A(my_list=[]).save()
self.assertRaises(NotUniqueError, lambda: A(my_list=[]).save())
class EmbeddedWithSparseUnique(EmbeddedDocument):
@ -3783,9 +3782,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
class B(Document):
my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique))
b1 = B(my_list=[]).save()
b2 = B(my_list=[]).save()
B(my_list=[]).save()
B(my_list=[]).save()
def test_filtered_delete(self):
"""
@ -3824,6 +3822,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
and doesn't interfere with the rest of field functionalities.
"""
custom_data = {'a': 'a_value', 'b': [1, 2]}
class CustomData(Document):
a_field = IntField()
c_field = IntField(custom_data=custom_data)

View File

@ -12,7 +12,7 @@ import gridfs
from nose.plugins.skip import SkipTest
from mongoengine import *
from mongoengine.connection import get_db
from mongoengine.python_support import PY3, b, StringIO
from mongoengine.python_support import b, StringIO
try:
from PIL import Image
@ -112,7 +112,7 @@ class FileTest(unittest.TestCase):
result.the_file.delete()
# Ensure deleted file returns None
self.assertTrue(result.the_file.read() == None)
self.assertTrue(result.the_file.read() is None)
def test_file_fields_stream_after_none(self):
"""Ensure that a file field can be written to after it has been saved as
@ -138,7 +138,7 @@ class FileTest(unittest.TestCase):
result = StreamFile.objects.first()
self.assertTrue(streamfile == result)
self.assertEqual(result.the_file.read(), text + more_text)
#self.assertEqual(result.the_file.content_type, content_type)
# self.assertEqual(result.the_file.content_type, content_type)
result.the_file.seek(0)
self.assertEqual(result.the_file.tell(), 0)
self.assertEqual(result.the_file.read(len(text)), text)
@ -148,7 +148,7 @@ class FileTest(unittest.TestCase):
result.the_file.delete()
# Ensure deleted file returns None
self.assertTrue(result.the_file.read() == None)
self.assertTrue(result.the_file.read() is None)
def test_file_fields_set(self):

View File

@ -115,7 +115,7 @@ class GeoFieldTest(unittest.TestCase):
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
self._test_for_expected_error(Location, coord, expected)
Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate()
Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate()
def test_polygon_validation(self):
class Location(Document):
@ -226,7 +226,7 @@ class GeoFieldTest(unittest.TestCase):
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
self._test_for_expected_error(Location, coord, expected)
Location(loc=[[[1, 2], [3, 4], [5, 6], [1,2]]]).validate()
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
def test_multipolygon_validation(self):
class Location(Document):

View File

@ -1,11 +1,14 @@
import unittest
from mongoengine.base.datastructures import StrictDict, SemiStrictDict
class TestStrictDict(unittest.TestCase):
def strict_dict_class(self, *args, **kwargs):
return StrictDict.create(*args, **kwargs)
def setUp(self):
self.dtype = self.strict_dict_class(("a", "b", "c"))
def test_init(self):
d = self.dtype(a=1, b=1, c=1)
self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
@ -38,8 +41,9 @@ class TestStrictDict(unittest.TestCase):
def test_setattr_raises_on_nonexisting_attr(self):
d = self.dtype()
def _f():
d.x=1
d.x = 1
self.assertRaises(AttributeError, _f)
def test_setattr_getattr_special(self):