Various fixes again

This commit is contained in:
Matthieu Rigal 2015-06-24 00:50:36 +02:00
parent cd76a906f4
commit 0aeb1ca408
22 changed files with 88 additions and 89 deletions

View File

@ -2,7 +2,7 @@
{% if next or prev %}
<div class="rst-footer-buttons">
{% if next %}
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}"/>Next <span class="icon icon-circle-arrow-right"></span></a>
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}">Next <span class="icon icon-circle-arrow-right"></span></a>
{% endif %}
{% if prev %}
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>

View File

@ -598,7 +598,7 @@ Some variables are made available in the scope of the Javascript function:
The following example demonstrates the intended usage of
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
over a field on a document (this functionality is already available throught
over a field on a document (this functionality is already available through
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
example)::

View File

@ -443,7 +443,7 @@ class StrictDict(object):
class SemiStrictDict(StrictDict):
__slots__ = ('_extras')
__slots__ = ('_extras', )
_classes = {}
def __getattr__(self, attr):

View File

@ -149,7 +149,6 @@ class BaseDocument(object):
# Handle dynamic data only if an initialised dynamic document
if self._dynamic and not self._dynamic_lock:
field = None
if not hasattr(self, name) and not name.startswith('_'):
DynamicField = _import_class("DynamicField")
field = DynamicField(db_field=name)
@ -182,8 +181,8 @@ class BaseDocument(object):
except AttributeError:
self__initialised = False
# Check if the user has created a new instance of a class
if (self._is_document and self__initialised
and self__created and name == self._meta.get('id_field')):
if (self._is_document and self__initialised and
self__created and name == self._meta.get('id_field')):
super(BaseDocument, self).__setattr__('_created', False)
super(BaseDocument, self).__setattr__(name, value)
@ -327,7 +326,7 @@ class BaseDocument(object):
if value is not None:
if isinstance(field, (EmbeddedDocumentField)):
if isinstance(field, EmbeddedDocumentField):
if fields:
key = '%s.' % field_name
embedded_fields = [
@ -416,7 +415,8 @@ class BaseDocument(object):
def to_json(self, *args, **kwargs):
"""Converts a document to JSON.
:param use_db_field: Set to True by default but enables the output of the json structure with the field names and not the mongodb store db_names in case of set to False
:param use_db_field: Set to True by default but enables the output of the json structure with the field names
and not the mongodb store db_names in case of set to False
"""
use_db_field = kwargs.pop('use_db_field', True)
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
@ -577,7 +577,7 @@ class BaseDocument(object):
if (hasattr(field, 'field') and
isinstance(field.field, ReferenceField)):
continue
elif (isinstance(field, SortedListField) and field._ordering):
elif isinstance(field, SortedListField) and field._ordering:
# if ordering is affected whole list is changed
if any(map(lambda d: field._ordering in d._changed_fields, data)):
changed_fields.append(db_field_name)
@ -627,7 +627,7 @@ class BaseDocument(object):
if value or isinstance(value, (numbers.Number, bool)):
continue
# If we've set a value that ain't the default value dont unset it.
# If we've set a value that ain't the default value don't unset it.
default = None
if (self._dynamic and len(parts) and parts[0] in
self._dynamic_fields):

View File

@ -112,7 +112,7 @@ class BaseField(object):
"""Descriptor for assigning a value to a field in a document.
"""
# If setting to None and theres a default
# If setting to None and there is a default
# Then set the value to the default value
if value is None:
if self.null:
@ -259,8 +259,8 @@ class ComplexBaseField(BaseField):
instance._data[self.name] = value
if (self._auto_dereference and instance._initialised and
isinstance(value, (BaseList, BaseDict))
and not value._dereferenced):
isinstance(value, (BaseList, BaseDict)) and
not value._dereferenced):
value = _dereference(
value, max_depth=1, instance=instance, name=self.name
)

View File

@ -110,7 +110,7 @@ class DocumentMetaclass(type):
for base in flattened_bases:
if (not getattr(base, '_is_base_cls', True) and
not getattr(base, '_meta', {}).get('abstract', True)):
# Collate heirarchy for _cls and _subclasses
# Collate hierarchy for _cls and _subclasses
class_name.append(base.__name__)
if hasattr(base, '_meta'):
@ -183,7 +183,7 @@ class DocumentMetaclass(type):
"CachedReferenceFields is not allowed in EmbeddedDocuments")
if not f.document_type:
raise InvalidDocumentError(
"Document is not avaiable to sync")
"Document is not available to sync")
if f.auto_sync:
f.start_listener()
@ -245,7 +245,7 @@ class DocumentMetaclass(type):
EmbeddedDocument = _import_class('EmbeddedDocument')
DictField = _import_class('DictField')
CachedReferenceField = _import_class('CachedReferenceField')
return (Document, EmbeddedDocument, DictField, CachedReferenceField)
return Document, EmbeddedDocument, DictField, CachedReferenceField
class TopLevelDocumentMetaclass(DocumentMetaclass):
@ -301,8 +301,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# Prevent classes setting collection different to their parents
# If parent wasn't an abstract class
if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
and not parent_doc_cls._meta.get('abstract', True)):
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
not parent_doc_cls._meta.get('abstract', True)):
msg = "Trying to set a collection on a subclass (%s)" % name
warnings.warn(msg, SyntaxWarning)
del attrs['_meta']['collection']
@ -408,14 +408,15 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
return new_class
def get_auto_id_names(self):
@classmethod
def get_auto_id_names(cls, new_class):
id_name, id_db_name = ('id', '_id')
if id_name not in self._fields and \
id_db_name not in (v.db_field for v in self._fields.values()):
if id_name not in new_class._fields and \
id_db_name not in (v.db_field for v in new_class._fields.values()):
return id_name, id_db_name
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
while id_name in self._fields or \
id_db_name in (v.db_field for v in self._fields.values()):
while id_name in new_class._fields or \
id_db_name in (v.db_field for v in new_class._fields.values()):
id_name = '{0}_{1}'.format(id_basename, i)
id_db_name = '{0}_{1}'.format(id_db_basename, i)
i += 1

View File

@ -100,7 +100,7 @@ class DeReference(object):
if isinstance(item, (Document, EmbeddedDocument)):
for field_name, field in item._fields.iteritems():
v = item._data.get(field_name, None)
if isinstance(v, (DBRef)):
if isinstance(v, DBRef):
reference_map.setdefault(field.document_type, set()).add(v.id)
elif isinstance(v, (dict, SON)) and '_ref' in v:
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
@ -111,7 +111,7 @@ class DeReference(object):
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
key = field_cls
reference_map.setdefault(key, set()).update(refs)
elif isinstance(item, (DBRef)):
elif isinstance(item, DBRef):
reference_map.setdefault(item.collection, set()).add(item.id)
elif isinstance(item, (dict, SON)) and '_ref' in item:
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
@ -219,7 +219,7 @@ class DeReference(object):
elif isinstance(v, (Document, EmbeddedDocument)):
for field_name, field in v._fields.iteritems():
v = data[k]._data.get(field_name, None)
if isinstance(v, (DBRef)):
if isinstance(v, DBRef):
data[k]._data[field_name] = self.object_map.get(
(v.collection, v.id), v)
elif isinstance(v, (dict, SON)) and '_ref' in v:

View File

@ -3,7 +3,6 @@ import pymongo
import re
from pymongo.read_preferences import ReadPreference
from bson import ObjectId
from bson.dbref import DBRef
from mongoengine import signals
from mongoengine.common import _import_class
@ -61,7 +60,7 @@ class EmbeddedDocument(BaseDocument):
dictionary.
"""
__slots__ = ('_instance')
__slots__ = ('_instance', )
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3

View File

@ -126,7 +126,7 @@ class ValidationError(AssertionError):
def generate_key(value, prefix=''):
if isinstance(value, list):
value = ' '.join([generate_key(k) for k in value])
if isinstance(value, dict):
elif isinstance(value, dict):
value = ' '.join(
[generate_key(v, k) for k, v in value.iteritems()])

View File

@ -667,7 +667,6 @@ class DynamicField(BaseField):
return StringField().prepare_query_value(op, value)
return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value))
def validate(self, value, clean=True):
if hasattr(value, "validate"):
value.validate(clean=clean)
@ -698,9 +697,10 @@ class ListField(ComplexBaseField):
def prepare_query_value(self, op, value):
if self.field:
if op in ('set', 'unset') and (not isinstance(value, basestring)
and not isinstance(value, BaseDocument)
and hasattr(value, '__iter__')):
if op in ('set', 'unset') and (
not isinstance(value, basestring) and
not isinstance(value, BaseDocument) and
hasattr(value, '__iter__')):
return [self.field.prepare_query_value(op, v) for v in value]
return self.field.prepare_query_value(op, value)
return super(ListField, self).prepare_query_value(op, value)
@ -718,12 +718,10 @@ class EmbeddedDocumentListField(ListField):
"""
def __init__(self, document_type, *args, **kwargs):
def __init__(self, document_type, **kwargs):
"""
:param document_type: The type of
:class:`~mongoengine.EmbeddedDocument` the list will hold.
:param args: Arguments passed directly into the parent
:class:`~mongoengine.ListField`.
:param kwargs: Keyword arguments passed directly into the parent
:class:`~mongoengine.ListField`.
"""
@ -975,7 +973,6 @@ class ReferenceField(BaseField):
super(ReferenceField, self).prepare_query_value(op, value)
return self.to_mongo(value)
def validate(self, value):
if not isinstance(value, (self.document_type, DBRef)):
@ -1100,7 +1097,7 @@ class CachedReferenceField(BaseField):
def validate(self, value):
if not isinstance(value, (self.document_type)):
if not isinstance(value, self.document_type):
self.error("A CachedReferenceField only accepts documents")
if isinstance(value, Document) and value.id is None:
@ -1553,7 +1550,7 @@ class ImageGridFsProxy(GridFSProxy):
if out and out.thumbnail_id:
self.fs.delete(out.thumbnail_id)
return super(ImageGridFsProxy, self).delete(*args, **kwargs)
return super(ImageGridFsProxy, self).delete()
def _put_thumbnail(self, thumbnail, format, progressive, **kwargs):
w, h = thumbnail.size

View File

@ -86,8 +86,8 @@ class BaseQuerySet(object):
self.only_fields = []
self._max_time_ms = None
def __call__(self, q_obj=None, class_check=True, slave_okay=False,
read_preference=None, **query):
def __call__(self, q_obj=None, class_check=True, read_preference=None,
**query):
"""Filter the selected documents by calling the
:class:`~mongoengine.queryset.QuerySet` with a query.
@ -97,9 +97,7 @@ class BaseQuerySet(object):
objects, only the last one will be used
:param class_check: If set to False bypass class name check when
querying collection
:param slave_okay: if True, allows this query to be run against a
replica secondary.
:params read_preference: if set, overrides connection-level
:param read_preference: if set, overrides connection-level
read_preference from `ReplicaSetConnection`.
:param query: Django-style query keyword arguments
"""
@ -204,7 +202,8 @@ class BaseQuerySet(object):
:param language: The language that determines the list of stop words
for the search and the rules for the stemmer and tokenizer.
If not specified, the search uses the default language of the index.
For supported languages, see `Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
For supported languages, see
`Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
"""
queryset = self.clone()
if queryset._search_text:
@ -270,7 +269,7 @@ class BaseQuerySet(object):
def insert(self, doc_or_docs, load_bulk=True, write_concern=None):
"""bulk insert documents
:param docs_or_doc: a document or list of documents to be inserted
:param doc_or_docs: a document or list of documents to be inserted
:param load_bulk (optional): If True returns the list of document
instances
:param write_concern: Extra keyword arguments are passed down to
@ -405,8 +404,8 @@ class BaseQuerySet(object):
if rule == CASCADE:
ref_q = document_cls.objects(**{field_name + '__in': self})
ref_q_count = ref_q.count()
if (doc != document_cls and ref_q_count > 0
or (doc == document_cls and ref_q_count > 0)):
if (doc != document_cls and ref_q_count > 0 or
(doc == document_cls and ref_q_count > 0)):
ref_q.delete(write_concern=write_concern)
elif rule == NULLIFY:
document_cls.objects(**{field_name + '__in': self}).update(
@ -527,7 +526,7 @@ class BaseQuerySet(object):
try:
if IS_PYMONGO_3:
if full_response:
msg = ("With PyMongo 3+, it is not possible anymore to get the full response.")
msg = "With PyMongo 3+, it is not possible anymore to get the full response."
warnings.warn(msg, DeprecationWarning)
if remove:
result = queryset._collection.find_one_and_delete(
@ -619,7 +618,8 @@ class BaseQuerySet(object):
return self
def using(self, alias):
"""This method is for controlling which database the QuerySet will be evaluated against if you are using more than one database.
"""This method is for controlling which database the QuerySet will be
evaluated against if you are using more than one database.
:param alias: The database alias
@ -966,7 +966,7 @@ class BaseQuerySet(object):
"""Instead of returning Document instances, return raw values from
pymongo.
:param coerce_type: Field types (if applicable) would be use to
:param coerce_types: Field types (if applicable) would be use to
coerce types.
"""
queryset = self.clone()
@ -1637,7 +1637,7 @@ class BaseQuerySet(object):
ret.append(subfield)
found = True
break
except LookUpError, e:
except LookUpError:
pass
if not found:

View File

@ -44,7 +44,7 @@ def query(_doc_cls=None, **query):
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
op = parts.pop()
#if user escape field name by __
# Allw to escape operator-like field name by __
if len(parts) > 1 and parts[-1] == "":
parts.pop()

View File

@ -253,13 +253,13 @@ class DeltaTest(unittest.TestCase):
self.assertEqual(doc.embedded_field.list_field[2].list_field,
[1, 2, {'hello': 'world'}])
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
del doc.embedded_field.list_field[2].list_field[2]['hello']
self.assertEqual(doc._delta(),
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
doc.save()
doc = doc.reload(10)
del(doc.embedded_field.list_field[2].list_field)
del doc.embedded_field.list_field[2].list_field
self.assertEqual(doc._delta(),
({}, {'embedded_field.list_field.2.list_field': 1}))
@ -593,13 +593,13 @@ class DeltaTest(unittest.TestCase):
self.assertEqual(doc.embedded_field.list_field[2].list_field,
[1, 2, {'hello': 'world'}])
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
del doc.embedded_field.list_field[2].list_field[2]['hello']
self.assertEqual(doc._delta(),
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
doc.save()
doc = doc.reload(10)
del(doc.embedded_field.list_field[2].list_field)
del doc.embedded_field.list_field[2].list_field
self.assertEqual(doc._delta(), ({},
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
@ -615,7 +615,7 @@ class DeltaTest(unittest.TestCase):
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
p.doc = 123
del(p.doc)
del p.doc
self.assertEqual(p._delta(), (
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))

View File

@ -72,7 +72,7 @@ class DynamicTest(unittest.TestCase):
obj = collection.find_one()
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
del(p.misc)
del p.misc
p.save()
p = self.Person.objects.get()
@ -356,7 +356,6 @@ class DynamicTest(unittest.TestCase):
self.assertEqual(Person.objects.first().address.city, "Londinium")
person = Person.objects.first()
person["age"] = 35
person.save()

View File

@ -1897,11 +1897,11 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(BlogPost.objects.count(), 0)
def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self):
''' ensure the pre_delete signal is triggered upon a cascading deletion
""" ensure the pre_delete signal is triggered upon a cascading deletion
setup a blog post with content, an author and editor
delete the author which triggers deletion of blogpost via cascade
blog post's pre_delete signal alters an editor attribute
'''
"""
class Editor(self.Person):
review_queue = IntField(default=0)

View File

@ -946,7 +946,7 @@ class FieldTest(unittest.TestCase):
BlogPost.drop_collection()
def test_reverse_list_sorting(self):
'''Ensure that a reverse sorted list field properly sorts values'''
"""Ensure that a reverse sorted list field properly sorts values"""
class Category(EmbeddedDocument):
count = IntField()
@ -1334,7 +1334,6 @@ class FieldTest(unittest.TestCase):
def test_atomic_update_dict_field(self):
"""Ensure that the entire DictField can be atomically updated."""
class Simple(Document):
mapping = DictField(field=ListField(IntField(required=True)))
@ -3774,7 +3773,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
class A(Document):
my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique))
a1 = A(my_list=[]).save()
A(my_list=[]).save()
self.assertRaises(NotUniqueError, lambda: A(my_list=[]).save())
class EmbeddedWithSparseUnique(EmbeddedDocument):
@ -3783,9 +3782,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
class B(Document):
my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique))
b1 = B(my_list=[]).save()
b2 = B(my_list=[]).save()
B(my_list=[]).save()
B(my_list=[]).save()
def test_filtered_delete(self):
"""
@ -3824,6 +3822,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
and doesn't interfere with the rest of field functionalities.
"""
custom_data = {'a': 'a_value', 'b': [1, 2]}
class CustomData(Document):
a_field = IntField()
c_field = IntField(custom_data=custom_data)

View File

@ -12,7 +12,7 @@ import gridfs
from nose.plugins.skip import SkipTest
from mongoengine import *
from mongoengine.connection import get_db
from mongoengine.python_support import PY3, b, StringIO
from mongoengine.python_support import b, StringIO
try:
from PIL import Image
@ -112,7 +112,7 @@ class FileTest(unittest.TestCase):
result.the_file.delete()
# Ensure deleted file returns None
self.assertTrue(result.the_file.read() == None)
self.assertTrue(result.the_file.read() is None)
def test_file_fields_stream_after_none(self):
"""Ensure that a file field can be written to after it has been saved as
@ -148,7 +148,7 @@ class FileTest(unittest.TestCase):
result.the_file.delete()
# Ensure deleted file returns None
self.assertTrue(result.the_file.read() == None)
self.assertTrue(result.the_file.read() is None)
def test_file_fields_set(self):

View File

@ -1,11 +1,14 @@
import unittest
from mongoengine.base.datastructures import StrictDict, SemiStrictDict
class TestStrictDict(unittest.TestCase):
def strict_dict_class(self, *args, **kwargs):
return StrictDict.create(*args, **kwargs)
def setUp(self):
self.dtype = self.strict_dict_class(("a", "b", "c"))
def test_init(self):
d = self.dtype(a=1, b=1, c=1)
self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
@ -38,6 +41,7 @@ class TestStrictDict(unittest.TestCase):
def test_setattr_raises_on_nonexisting_attr(self):
d = self.dtype()
def _f():
d.x = 1
self.assertRaises(AttributeError, _f)