Various fixes again

This commit is contained in:
Matthieu Rigal 2015-06-24 00:50:36 +02:00
parent cd76a906f4
commit 0aeb1ca408
22 changed files with 88 additions and 89 deletions

View File

@ -2,7 +2,7 @@
{% if next or prev %} {% if next or prev %}
<div class="rst-footer-buttons"> <div class="rst-footer-buttons">
{% if next %} {% if next %}
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}"/>Next <span class="icon icon-circle-arrow-right"></span></a> <a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}">Next <span class="icon icon-circle-arrow-right"></span></a>
{% endif %} {% endif %}
{% if prev %} {% if prev %}
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a> <a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>

View File

@ -598,7 +598,7 @@ Some variables are made available in the scope of the Javascript function:
The following example demonstrates the intended usage of The following example demonstrates the intended usage of
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums :meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
over a field on a document (this functionality is already available throught over a field on a document (this functionality is already available through
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of :meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
example):: example)::

View File

@ -443,7 +443,7 @@ class StrictDict(object):
class SemiStrictDict(StrictDict): class SemiStrictDict(StrictDict):
__slots__ = ('_extras') __slots__ = ('_extras', )
_classes = {} _classes = {}
def __getattr__(self, attr): def __getattr__(self, attr):

View File

@ -149,7 +149,6 @@ class BaseDocument(object):
# Handle dynamic data only if an initialised dynamic document # Handle dynamic data only if an initialised dynamic document
if self._dynamic and not self._dynamic_lock: if self._dynamic and not self._dynamic_lock:
field = None
if not hasattr(self, name) and not name.startswith('_'): if not hasattr(self, name) and not name.startswith('_'):
DynamicField = _import_class("DynamicField") DynamicField = _import_class("DynamicField")
field = DynamicField(db_field=name) field = DynamicField(db_field=name)
@ -182,8 +181,8 @@ class BaseDocument(object):
except AttributeError: except AttributeError:
self__initialised = False self__initialised = False
# Check if the user has created a new instance of a class # Check if the user has created a new instance of a class
if (self._is_document and self__initialised if (self._is_document and self__initialised and
and self__created and name == self._meta.get('id_field')): self__created and name == self._meta.get('id_field')):
super(BaseDocument, self).__setattr__('_created', False) super(BaseDocument, self).__setattr__('_created', False)
super(BaseDocument, self).__setattr__(name, value) super(BaseDocument, self).__setattr__(name, value)
@ -327,7 +326,7 @@ class BaseDocument(object):
if value is not None: if value is not None:
if isinstance(field, (EmbeddedDocumentField)): if isinstance(field, EmbeddedDocumentField):
if fields: if fields:
key = '%s.' % field_name key = '%s.' % field_name
embedded_fields = [ embedded_fields = [
@ -416,7 +415,8 @@ class BaseDocument(object):
def to_json(self, *args, **kwargs): def to_json(self, *args, **kwargs):
"""Converts a document to JSON. """Converts a document to JSON.
:param use_db_field: Set to True by default but enables the output of the json structure with the field names and not the mongodb store db_names in case of set to False :param use_db_field: Set to True by default but enables the output of the json structure with the field names
and not the mongodb store db_names in case of set to False
""" """
use_db_field = kwargs.pop('use_db_field', True) use_db_field = kwargs.pop('use_db_field', True)
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs) return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
@ -577,7 +577,7 @@ class BaseDocument(object):
if (hasattr(field, 'field') and if (hasattr(field, 'field') and
isinstance(field.field, ReferenceField)): isinstance(field.field, ReferenceField)):
continue continue
elif (isinstance(field, SortedListField) and field._ordering): elif isinstance(field, SortedListField) and field._ordering:
# if ordering is affected whole list is changed # if ordering is affected whole list is changed
if any(map(lambda d: field._ordering in d._changed_fields, data)): if any(map(lambda d: field._ordering in d._changed_fields, data)):
changed_fields.append(db_field_name) changed_fields.append(db_field_name)
@ -627,7 +627,7 @@ class BaseDocument(object):
if value or isinstance(value, (numbers.Number, bool)): if value or isinstance(value, (numbers.Number, bool)):
continue continue
# If we've set a value that ain't the default value dont unset it. # If we've set a value that ain't the default value don't unset it.
default = None default = None
if (self._dynamic and len(parts) and parts[0] in if (self._dynamic and len(parts) and parts[0] in
self._dynamic_fields): self._dynamic_fields):
@ -979,7 +979,7 @@ class BaseDocument(object):
if hasattr(getattr(field, 'field', None), 'lookup_member'): if hasattr(getattr(field, 'field', None), 'lookup_member'):
new_field = field.field.lookup_member(field_name) new_field = field.field.lookup_member(field_name)
elif cls._dynamic and (isinstance(field, DynamicField) or elif cls._dynamic and (isinstance(field, DynamicField) or
getattr(getattr(field, 'document_type'), '_dynamic')): getattr(getattr(field, 'document_type'), '_dynamic')):
new_field = DynamicField(db_field=field_name) new_field = DynamicField(db_field=field_name)
else: else:
# Look up subfield on the previous field or raise # Look up subfield on the previous field or raise

View File

@ -112,7 +112,7 @@ class BaseField(object):
"""Descriptor for assigning a value to a field in a document. """Descriptor for assigning a value to a field in a document.
""" """
# If setting to None and theres a default # If setting to None and there is a default
# Then set the value to the default value # Then set the value to the default value
if value is None: if value is None:
if self.null: if self.null:
@ -259,8 +259,8 @@ class ComplexBaseField(BaseField):
instance._data[self.name] = value instance._data[self.name] = value
if (self._auto_dereference and instance._initialised and if (self._auto_dereference and instance._initialised and
isinstance(value, (BaseList, BaseDict)) isinstance(value, (BaseList, BaseDict)) and
and not value._dereferenced): not value._dereferenced):
value = _dereference( value = _dereference(
value, max_depth=1, instance=instance, name=self.name value, max_depth=1, instance=instance, name=self.name
) )

View File

@ -110,7 +110,7 @@ class DocumentMetaclass(type):
for base in flattened_bases: for base in flattened_bases:
if (not getattr(base, '_is_base_cls', True) and if (not getattr(base, '_is_base_cls', True) and
not getattr(base, '_meta', {}).get('abstract', True)): not getattr(base, '_meta', {}).get('abstract', True)):
# Collate heirarchy for _cls and _subclasses # Collate hierarchy for _cls and _subclasses
class_name.append(base.__name__) class_name.append(base.__name__)
if hasattr(base, '_meta'): if hasattr(base, '_meta'):
@ -183,7 +183,7 @@ class DocumentMetaclass(type):
"CachedReferenceFields is not allowed in EmbeddedDocuments") "CachedReferenceFields is not allowed in EmbeddedDocuments")
if not f.document_type: if not f.document_type:
raise InvalidDocumentError( raise InvalidDocumentError(
"Document is not avaiable to sync") "Document is not available to sync")
if f.auto_sync: if f.auto_sync:
f.start_listener() f.start_listener()
@ -245,7 +245,7 @@ class DocumentMetaclass(type):
EmbeddedDocument = _import_class('EmbeddedDocument') EmbeddedDocument = _import_class('EmbeddedDocument')
DictField = _import_class('DictField') DictField = _import_class('DictField')
CachedReferenceField = _import_class('CachedReferenceField') CachedReferenceField = _import_class('CachedReferenceField')
return (Document, EmbeddedDocument, DictField, CachedReferenceField) return Document, EmbeddedDocument, DictField, CachedReferenceField
class TopLevelDocumentMetaclass(DocumentMetaclass): class TopLevelDocumentMetaclass(DocumentMetaclass):
@ -301,8 +301,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# Prevent classes setting collection different to their parents # Prevent classes setting collection different to their parents
# If parent wasn't an abstract class # If parent wasn't an abstract class
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
and not parent_doc_cls._meta.get('abstract', True)): not parent_doc_cls._meta.get('abstract', True)):
msg = "Trying to set a collection on a subclass (%s)" % name msg = "Trying to set a collection on a subclass (%s)" % name
warnings.warn(msg, SyntaxWarning) warnings.warn(msg, SyntaxWarning)
del attrs['_meta']['collection'] del attrs['_meta']['collection']
@ -408,14 +408,15 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
return new_class return new_class
def get_auto_id_names(self): @classmethod
def get_auto_id_names(cls, new_class):
id_name, id_db_name = ('id', '_id') id_name, id_db_name = ('id', '_id')
if id_name not in self._fields and \ if id_name not in new_class._fields and \
id_db_name not in (v.db_field for v in self._fields.values()): id_db_name not in (v.db_field for v in new_class._fields.values()):
return id_name, id_db_name return id_name, id_db_name
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0 id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
while id_name in self._fields or \ while id_name in new_class._fields or \
id_db_name in (v.db_field for v in self._fields.values()): id_db_name in (v.db_field for v in new_class._fields.values()):
id_name = '{0}_{1}'.format(id_basename, i) id_name = '{0}_{1}'.format(id_basename, i)
id_db_name = '{0}_{1}'.format(id_db_basename, i) id_db_name = '{0}_{1}'.format(id_db_basename, i)
i += 1 i += 1

View File

@ -100,7 +100,7 @@ class DeReference(object):
if isinstance(item, (Document, EmbeddedDocument)): if isinstance(item, (Document, EmbeddedDocument)):
for field_name, field in item._fields.iteritems(): for field_name, field in item._fields.iteritems():
v = item._data.get(field_name, None) v = item._data.get(field_name, None)
if isinstance(v, (DBRef)): if isinstance(v, DBRef):
reference_map.setdefault(field.document_type, set()).add(v.id) reference_map.setdefault(field.document_type, set()).add(v.id)
elif isinstance(v, (dict, SON)) and '_ref' in v: elif isinstance(v, (dict, SON)) and '_ref' in v:
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
@ -111,7 +111,7 @@ class DeReference(object):
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
key = field_cls key = field_cls
reference_map.setdefault(key, set()).update(refs) reference_map.setdefault(key, set()).update(refs)
elif isinstance(item, (DBRef)): elif isinstance(item, DBRef):
reference_map.setdefault(item.collection, set()).add(item.id) reference_map.setdefault(item.collection, set()).add(item.id)
elif isinstance(item, (dict, SON)) and '_ref' in item: elif isinstance(item, (dict, SON)) and '_ref' in item:
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
@ -219,12 +219,12 @@ class DeReference(object):
elif isinstance(v, (Document, EmbeddedDocument)): elif isinstance(v, (Document, EmbeddedDocument)):
for field_name, field in v._fields.iteritems(): for field_name, field in v._fields.iteritems():
v = data[k]._data.get(field_name, None) v = data[k]._data.get(field_name, None)
if isinstance(v, (DBRef)): if isinstance(v, DBRef):
data[k]._data[field_name] = self.object_map.get( data[k]._data[field_name] = self.object_map.get(
(v.collection, v.id), v) (v.collection, v.id), v)
elif isinstance(v, (dict, SON)) and '_ref' in v: elif isinstance(v, (dict, SON)) and '_ref' in v:
data[k]._data[field_name] = self.object_map.get( data[k]._data[field_name] = self.object_map.get(
(v['_ref'].collection , v['_ref'].id), v) (v['_ref'].collection, v['_ref'].id), v)
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
item_name = "{0}.{1}.{2}".format(name, k, field_name) item_name = "{0}.{1}.{2}".format(name, k, field_name)
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name) data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)

View File

@ -3,7 +3,6 @@ import pymongo
import re import re
from pymongo.read_preferences import ReadPreference from pymongo.read_preferences import ReadPreference
from bson import ObjectId
from bson.dbref import DBRef from bson.dbref import DBRef
from mongoengine import signals from mongoengine import signals
from mongoengine.common import _import_class from mongoengine.common import _import_class
@ -61,7 +60,7 @@ class EmbeddedDocument(BaseDocument):
dictionary. dictionary.
""" """
__slots__ = ('_instance') __slots__ = ('_instance', )
# The __metaclass__ attribute is removed by 2to3 when running with Python3 # The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3

View File

@ -126,7 +126,7 @@ class ValidationError(AssertionError):
def generate_key(value, prefix=''): def generate_key(value, prefix=''):
if isinstance(value, list): if isinstance(value, list):
value = ' '.join([generate_key(k) for k in value]) value = ' '.join([generate_key(k) for k in value])
if isinstance(value, dict): elif isinstance(value, dict):
value = ' '.join( value = ' '.join(
[generate_key(v, k) for k, v in value.iteritems()]) [generate_key(v, k) for k, v in value.iteritems()])

View File

@ -667,7 +667,6 @@ class DynamicField(BaseField):
return StringField().prepare_query_value(op, value) return StringField().prepare_query_value(op, value)
return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value)) return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value))
def validate(self, value, clean=True): def validate(self, value, clean=True):
if hasattr(value, "validate"): if hasattr(value, "validate"):
value.validate(clean=clean) value.validate(clean=clean)
@ -698,9 +697,10 @@ class ListField(ComplexBaseField):
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
if self.field: if self.field:
if op in ('set', 'unset') and (not isinstance(value, basestring) if op in ('set', 'unset') and (
and not isinstance(value, BaseDocument) not isinstance(value, basestring) and
and hasattr(value, '__iter__')): not isinstance(value, BaseDocument) and
hasattr(value, '__iter__')):
return [self.field.prepare_query_value(op, v) for v in value] return [self.field.prepare_query_value(op, v) for v in value]
return self.field.prepare_query_value(op, value) return self.field.prepare_query_value(op, value)
return super(ListField, self).prepare_query_value(op, value) return super(ListField, self).prepare_query_value(op, value)
@ -718,12 +718,10 @@ class EmbeddedDocumentListField(ListField):
""" """
def __init__(self, document_type, *args, **kwargs): def __init__(self, document_type, **kwargs):
""" """
:param document_type: The type of :param document_type: The type of
:class:`~mongoengine.EmbeddedDocument` the list will hold. :class:`~mongoengine.EmbeddedDocument` the list will hold.
:param args: Arguments passed directly into the parent
:class:`~mongoengine.ListField`.
:param kwargs: Keyword arguments passed directly into the parent :param kwargs: Keyword arguments passed directly into the parent
:class:`~mongoengine.ListField`. :class:`~mongoengine.ListField`.
""" """
@ -975,7 +973,6 @@ class ReferenceField(BaseField):
super(ReferenceField, self).prepare_query_value(op, value) super(ReferenceField, self).prepare_query_value(op, value)
return self.to_mongo(value) return self.to_mongo(value)
def validate(self, value): def validate(self, value):
if not isinstance(value, (self.document_type, DBRef)): if not isinstance(value, (self.document_type, DBRef)):
@ -1100,7 +1097,7 @@ class CachedReferenceField(BaseField):
def validate(self, value): def validate(self, value):
if not isinstance(value, (self.document_type)): if not isinstance(value, self.document_type):
self.error("A CachedReferenceField only accepts documents") self.error("A CachedReferenceField only accepts documents")
if isinstance(value, Document) and value.id is None: if isinstance(value, Document) and value.id is None:
@ -1419,7 +1416,7 @@ class FileField(BaseField):
def __set__(self, instance, value): def __set__(self, instance, value):
key = self.name key = self.name
if ((hasattr(value, 'read') and not if ((hasattr(value, 'read') and not
isinstance(value, GridFSProxy)) or isinstance(value, str_types)): isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
# using "FileField() = file/string" notation # using "FileField() = file/string" notation
grid_file = instance._data.get(self.name) grid_file = instance._data.get(self.name)
# If a file already exists, delete it # If a file already exists, delete it
@ -1553,7 +1550,7 @@ class ImageGridFsProxy(GridFSProxy):
if out and out.thumbnail_id: if out and out.thumbnail_id:
self.fs.delete(out.thumbnail_id) self.fs.delete(out.thumbnail_id)
return super(ImageGridFsProxy, self).delete(*args, **kwargs) return super(ImageGridFsProxy, self).delete()
def _put_thumbnail(self, thumbnail, format, progressive, **kwargs): def _put_thumbnail(self, thumbnail, format, progressive, **kwargs):
w, h = thumbnail.size w, h = thumbnail.size

View File

@ -86,8 +86,8 @@ class BaseQuerySet(object):
self.only_fields = [] self.only_fields = []
self._max_time_ms = None self._max_time_ms = None
def __call__(self, q_obj=None, class_check=True, slave_okay=False, def __call__(self, q_obj=None, class_check=True, read_preference=None,
read_preference=None, **query): **query):
"""Filter the selected documents by calling the """Filter the selected documents by calling the
:class:`~mongoengine.queryset.QuerySet` with a query. :class:`~mongoengine.queryset.QuerySet` with a query.
@ -97,9 +97,7 @@ class BaseQuerySet(object):
objects, only the last one will be used objects, only the last one will be used
:param class_check: If set to False bypass class name check when :param class_check: If set to False bypass class name check when
querying collection querying collection
:param slave_okay: if True, allows this query to be run against a :param read_preference: if set, overrides connection-level
replica secondary.
:params read_preference: if set, overrides connection-level
read_preference from `ReplicaSetConnection`. read_preference from `ReplicaSetConnection`.
:param query: Django-style query keyword arguments :param query: Django-style query keyword arguments
""" """
@ -204,7 +202,8 @@ class BaseQuerySet(object):
:param language: The language that determines the list of stop words :param language: The language that determines the list of stop words
for the search and the rules for the stemmer and tokenizer. for the search and the rules for the stemmer and tokenizer.
If not specified, the search uses the default language of the index. If not specified, the search uses the default language of the index.
For supported languages, see `Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`. For supported languages, see
`Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
""" """
queryset = self.clone() queryset = self.clone()
if queryset._search_text: if queryset._search_text:
@ -270,7 +269,7 @@ class BaseQuerySet(object):
def insert(self, doc_or_docs, load_bulk=True, write_concern=None): def insert(self, doc_or_docs, load_bulk=True, write_concern=None):
"""bulk insert documents """bulk insert documents
:param docs_or_doc: a document or list of documents to be inserted :param doc_or_docs: a document or list of documents to be inserted
:param load_bulk (optional): If True returns the list of document :param load_bulk (optional): If True returns the list of document
instances instances
:param write_concern: Extra keyword arguments are passed down to :param write_concern: Extra keyword arguments are passed down to
@ -405,8 +404,8 @@ class BaseQuerySet(object):
if rule == CASCADE: if rule == CASCADE:
ref_q = document_cls.objects(**{field_name + '__in': self}) ref_q = document_cls.objects(**{field_name + '__in': self})
ref_q_count = ref_q.count() ref_q_count = ref_q.count()
if (doc != document_cls and ref_q_count > 0 if (doc != document_cls and ref_q_count > 0 or
or (doc == document_cls and ref_q_count > 0)): (doc == document_cls and ref_q_count > 0)):
ref_q.delete(write_concern=write_concern) ref_q.delete(write_concern=write_concern)
elif rule == NULLIFY: elif rule == NULLIFY:
document_cls.objects(**{field_name + '__in': self}).update( document_cls.objects(**{field_name + '__in': self}).update(
@ -527,7 +526,7 @@ class BaseQuerySet(object):
try: try:
if IS_PYMONGO_3: if IS_PYMONGO_3:
if full_response: if full_response:
msg = ("With PyMongo 3+, it is not possible anymore to get the full response.") msg = "With PyMongo 3+, it is not possible anymore to get the full response."
warnings.warn(msg, DeprecationWarning) warnings.warn(msg, DeprecationWarning)
if remove: if remove:
result = queryset._collection.find_one_and_delete( result = queryset._collection.find_one_and_delete(
@ -619,7 +618,8 @@ class BaseQuerySet(object):
return self return self
def using(self, alias): def using(self, alias):
"""This method is for controlling which database the QuerySet will be evaluated against if you are using more than one database. """This method is for controlling which database the QuerySet will be
evaluated against if you are using more than one database.
:param alias: The database alias :param alias: The database alias
@ -966,7 +966,7 @@ class BaseQuerySet(object):
"""Instead of returning Document instances, return raw values from """Instead of returning Document instances, return raw values from
pymongo. pymongo.
:param coerce_type: Field types (if applicable) would be use to :param coerce_types: Field types (if applicable) would be use to
coerce types. coerce types.
""" """
queryset = self.clone() queryset = self.clone()
@ -1258,8 +1258,8 @@ class BaseQuerySet(object):
the aggregation framework instead of map-reduce. the aggregation framework instead of map-reduce.
""" """
result = self._document._get_collection().aggregate([ result = self._document._get_collection().aggregate([
{ '$match': self._query }, {'$match': self._query},
{ '$group': { '_id': 'sum', 'total': { '$sum': '$' + field } } } {'$group': {'_id': 'sum', 'total': {'$sum': '$' + field}}}
]) ])
if IS_PYMONGO_3: if IS_PYMONGO_3:
result = list(result) result = list(result)
@ -1334,8 +1334,8 @@ class BaseQuerySet(object):
uses the aggregation framework instead of map-reduce. uses the aggregation framework instead of map-reduce.
""" """
result = self._document._get_collection().aggregate([ result = self._document._get_collection().aggregate([
{ '$match': self._query }, {'$match': self._query},
{ '$group': { '_id': 'avg', 'total': { '$avg': '$' + field } } } {'$group': {'_id': 'avg', 'total': {'$avg': '$' + field}}}
]) ])
if IS_PYMONGO_3: if IS_PYMONGO_3:
result = list(result) result = list(result)
@ -1637,7 +1637,7 @@ class BaseQuerySet(object):
ret.append(subfield) ret.append(subfield)
found = True found = True
break break
except LookUpError, e: except LookUpError:
pass pass
if not found: if not found:

View File

@ -44,8 +44,8 @@ def query(_doc_cls=None, **query):
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
op = parts.pop() op = parts.pop()
#if user escape field name by __ # Allw to escape operator-like field name by __
if len(parts) > 1 and parts[-1]=="": if len(parts) > 1 and parts[-1] == "":
parts.pop() parts.pop()
negate = False negate = False

View File

@ -253,13 +253,13 @@ class DeltaTest(unittest.TestCase):
self.assertEqual(doc.embedded_field.list_field[2].list_field, self.assertEqual(doc.embedded_field.list_field[2].list_field,
[1, 2, {'hello': 'world'}]) [1, 2, {'hello': 'world'}])
del(doc.embedded_field.list_field[2].list_field[2]['hello']) del doc.embedded_field.list_field[2].list_field[2]['hello']
self.assertEqual(doc._delta(), self.assertEqual(doc._delta(),
({}, {'embedded_field.list_field.2.list_field.2.hello': 1})) ({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
doc.save() doc.save()
doc = doc.reload(10) doc = doc.reload(10)
del(doc.embedded_field.list_field[2].list_field) del doc.embedded_field.list_field[2].list_field
self.assertEqual(doc._delta(), self.assertEqual(doc._delta(),
({}, {'embedded_field.list_field.2.list_field': 1})) ({}, {'embedded_field.list_field.2.list_field': 1}))
@ -593,13 +593,13 @@ class DeltaTest(unittest.TestCase):
self.assertEqual(doc.embedded_field.list_field[2].list_field, self.assertEqual(doc.embedded_field.list_field[2].list_field,
[1, 2, {'hello': 'world'}]) [1, 2, {'hello': 'world'}])
del(doc.embedded_field.list_field[2].list_field[2]['hello']) del doc.embedded_field.list_field[2].list_field[2]['hello']
self.assertEqual(doc._delta(), self.assertEqual(doc._delta(),
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1})) ({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
doc.save() doc.save()
doc = doc.reload(10) doc = doc.reload(10)
del(doc.embedded_field.list_field[2].list_field) del doc.embedded_field.list_field[2].list_field
self.assertEqual(doc._delta(), ({}, self.assertEqual(doc._delta(), ({},
{'db_embedded_field.db_list_field.2.db_list_field': 1})) {'db_embedded_field.db_list_field.2.db_list_field': 1}))
@ -615,7 +615,7 @@ class DeltaTest(unittest.TestCase):
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
p.doc = 123 p.doc = 123
del(p.doc) del p.doc
self.assertEqual(p._delta(), ( self.assertEqual(p._delta(), (
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))

View File

@ -72,7 +72,7 @@ class DynamicTest(unittest.TestCase):
obj = collection.find_one() obj = collection.find_one()
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name']) self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
del(p.misc) del p.misc
p.save() p.save()
p = self.Person.objects.get() p = self.Person.objects.get()
@ -340,7 +340,7 @@ class DynamicTest(unittest.TestCase):
person = Person.objects.first() person = Person.objects.first()
person.attrval = "This works" person.attrval = "This works"
person["phone"] = "555-1212" # but this should too person["phone"] = "555-1212" # but this should too
# Same thing two levels deep # Same thing two levels deep
person["address"]["city"] = "Lundenne" person["address"]["city"] = "Lundenne"
@ -356,7 +356,6 @@ class DynamicTest(unittest.TestCase):
self.assertEqual(Person.objects.first().address.city, "Londinium") self.assertEqual(Person.objects.first().address.city, "Londinium")
person = Person.objects.first() person = Person.objects.first()
person["age"] = 35 person["age"] = 35
person.save() person.save()

View File

@ -143,7 +143,7 @@ class IndexesTest(unittest.TestCase):
meta = { meta = {
'indexes': [ 'indexes': [
{ {
'fields': ('title',), 'fields': ('title',),
}, },
], ],
'allow_inheritance': True, 'allow_inheritance': True,

View File

@ -1897,11 +1897,11 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(BlogPost.objects.count(), 0) self.assertEqual(BlogPost.objects.count(), 0)
def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self): def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self):
''' ensure the pre_delete signal is triggered upon a cascading deletion """ ensure the pre_delete signal is triggered upon a cascading deletion
setup a blog post with content, an author and editor setup a blog post with content, an author and editor
delete the author which triggers deletion of blogpost via cascade delete the author which triggers deletion of blogpost via cascade
blog post's pre_delete signal alters an editor attribute blog post's pre_delete signal alters an editor attribute
''' """
class Editor(self.Person): class Editor(self.Person):
review_queue = IntField(default=0) review_queue = IntField(default=0)

View File

@ -946,7 +946,7 @@ class FieldTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
def test_reverse_list_sorting(self): def test_reverse_list_sorting(self):
'''Ensure that a reverse sorted list field properly sorts values''' """Ensure that a reverse sorted list field properly sorts values"""
class Category(EmbeddedDocument): class Category(EmbeddedDocument):
count = IntField() count = IntField()
@ -1334,7 +1334,6 @@ class FieldTest(unittest.TestCase):
def test_atomic_update_dict_field(self): def test_atomic_update_dict_field(self):
"""Ensure that the entire DictField can be atomically updated.""" """Ensure that the entire DictField can be atomically updated."""
class Simple(Document): class Simple(Document):
mapping = DictField(field=ListField(IntField(required=True))) mapping = DictField(field=ListField(IntField(required=True)))
@ -1349,7 +1348,7 @@ class FieldTest(unittest.TestCase):
self.assertEqual({"ints": [3, 4]}, e.mapping) self.assertEqual({"ints": [3, 4]}, e.mapping)
def create_invalid_mapping(): def create_invalid_mapping():
e.update(set__mapping={"somestrings": ["foo", "bar",]}) e.update(set__mapping={"somestrings": ["foo", "bar", ]})
self.assertRaises(ValueError, create_invalid_mapping) self.assertRaises(ValueError, create_invalid_mapping)
@ -1460,7 +1459,7 @@ class FieldTest(unittest.TestCase):
class Action(EmbeddedDocument): class Action(EmbeddedDocument):
operation = StringField() operation = StringField()
object = StringField() object = StringField()
class Log(Document): class Log(Document):
name = StringField() name = StringField()
@ -3774,7 +3773,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
class A(Document): class A(Document):
my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique)) my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique))
a1 = A(my_list=[]).save() A(my_list=[]).save()
self.assertRaises(NotUniqueError, lambda: A(my_list=[]).save()) self.assertRaises(NotUniqueError, lambda: A(my_list=[]).save())
class EmbeddedWithSparseUnique(EmbeddedDocument): class EmbeddedWithSparseUnique(EmbeddedDocument):
@ -3783,9 +3782,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
class B(Document): class B(Document):
my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique)) my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique))
b1 = B(my_list=[]).save() B(my_list=[]).save()
b2 = B(my_list=[]).save() B(my_list=[]).save()
def test_filtered_delete(self): def test_filtered_delete(self):
""" """
@ -3824,6 +3822,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
and doesn't interfere with the rest of field functionalities. and doesn't interfere with the rest of field functionalities.
""" """
custom_data = {'a': 'a_value', 'b': [1, 2]} custom_data = {'a': 'a_value', 'b': [1, 2]}
class CustomData(Document): class CustomData(Document):
a_field = IntField() a_field = IntField()
c_field = IntField(custom_data=custom_data) c_field = IntField(custom_data=custom_data)

View File

@ -12,7 +12,7 @@ import gridfs
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
from mongoengine.python_support import PY3, b, StringIO from mongoengine.python_support import b, StringIO
try: try:
from PIL import Image from PIL import Image
@ -112,7 +112,7 @@ class FileTest(unittest.TestCase):
result.the_file.delete() result.the_file.delete()
# Ensure deleted file returns None # Ensure deleted file returns None
self.assertTrue(result.the_file.read() == None) self.assertTrue(result.the_file.read() is None)
def test_file_fields_stream_after_none(self): def test_file_fields_stream_after_none(self):
"""Ensure that a file field can be written to after it has been saved as """Ensure that a file field can be written to after it has been saved as
@ -138,7 +138,7 @@ class FileTest(unittest.TestCase):
result = StreamFile.objects.first() result = StreamFile.objects.first()
self.assertTrue(streamfile == result) self.assertTrue(streamfile == result)
self.assertEqual(result.the_file.read(), text + more_text) self.assertEqual(result.the_file.read(), text + more_text)
#self.assertEqual(result.the_file.content_type, content_type) # self.assertEqual(result.the_file.content_type, content_type)
result.the_file.seek(0) result.the_file.seek(0)
self.assertEqual(result.the_file.tell(), 0) self.assertEqual(result.the_file.tell(), 0)
self.assertEqual(result.the_file.read(len(text)), text) self.assertEqual(result.the_file.read(len(text)), text)
@ -148,7 +148,7 @@ class FileTest(unittest.TestCase):
result.the_file.delete() result.the_file.delete()
# Ensure deleted file returns None # Ensure deleted file returns None
self.assertTrue(result.the_file.read() == None) self.assertTrue(result.the_file.read() is None)
def test_file_fields_set(self): def test_file_fields_set(self):

View File

@ -115,7 +115,7 @@ class GeoFieldTest(unittest.TestCase):
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0]) expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
self._test_for_expected_error(Location, coord, expected) self._test_for_expected_error(Location, coord, expected)
Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate() Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate()
def test_polygon_validation(self): def test_polygon_validation(self):
class Location(Document): class Location(Document):
@ -226,7 +226,7 @@ class GeoFieldTest(unittest.TestCase):
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0]) expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
self._test_for_expected_error(Location, coord, expected) self._test_for_expected_error(Location, coord, expected)
Location(loc=[[[1, 2], [3, 4], [5, 6], [1,2]]]).validate() Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
def test_multipolygon_validation(self): def test_multipolygon_validation(self):
class Location(Document): class Location(Document):

View File

@ -1,11 +1,14 @@
import unittest import unittest
from mongoengine.base.datastructures import StrictDict, SemiStrictDict from mongoengine.base.datastructures import StrictDict, SemiStrictDict
class TestStrictDict(unittest.TestCase): class TestStrictDict(unittest.TestCase):
def strict_dict_class(self, *args, **kwargs): def strict_dict_class(self, *args, **kwargs):
return StrictDict.create(*args, **kwargs) return StrictDict.create(*args, **kwargs)
def setUp(self): def setUp(self):
self.dtype = self.strict_dict_class(("a", "b", "c")) self.dtype = self.strict_dict_class(("a", "b", "c"))
def test_init(self): def test_init(self):
d = self.dtype(a=1, b=1, c=1) d = self.dtype(a=1, b=1, c=1)
self.assertEqual((d.a, d.b, d.c), (1, 1, 1)) self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
@ -38,8 +41,9 @@ class TestStrictDict(unittest.TestCase):
def test_setattr_raises_on_nonexisting_attr(self): def test_setattr_raises_on_nonexisting_attr(self):
d = self.dtype() d = self.dtype()
def _f(): def _f():
d.x=1 d.x = 1
self.assertRaises(AttributeError, _f) self.assertRaises(AttributeError, _f)
def test_setattr_getattr_special(self): def test_setattr_getattr_special(self):