Various fixes again
This commit is contained in:
@@ -443,7 +443,7 @@ class StrictDict(object):
|
||||
|
||||
|
||||
class SemiStrictDict(StrictDict):
|
||||
__slots__ = ('_extras')
|
||||
__slots__ = ('_extras', )
|
||||
_classes = {}
|
||||
|
||||
def __getattr__(self, attr):
|
||||
|
||||
@@ -149,7 +149,6 @@ class BaseDocument(object):
|
||||
# Handle dynamic data only if an initialised dynamic document
|
||||
if self._dynamic and not self._dynamic_lock:
|
||||
|
||||
field = None
|
||||
if not hasattr(self, name) and not name.startswith('_'):
|
||||
DynamicField = _import_class("DynamicField")
|
||||
field = DynamicField(db_field=name)
|
||||
@@ -182,8 +181,8 @@ class BaseDocument(object):
|
||||
except AttributeError:
|
||||
self__initialised = False
|
||||
# Check if the user has created a new instance of a class
|
||||
if (self._is_document and self__initialised
|
||||
and self__created and name == self._meta.get('id_field')):
|
||||
if (self._is_document and self__initialised and
|
||||
self__created and name == self._meta.get('id_field')):
|
||||
super(BaseDocument, self).__setattr__('_created', False)
|
||||
|
||||
super(BaseDocument, self).__setattr__(name, value)
|
||||
@@ -327,7 +326,7 @@ class BaseDocument(object):
|
||||
|
||||
if value is not None:
|
||||
|
||||
if isinstance(field, (EmbeddedDocumentField)):
|
||||
if isinstance(field, EmbeddedDocumentField):
|
||||
if fields:
|
||||
key = '%s.' % field_name
|
||||
embedded_fields = [
|
||||
@@ -416,7 +415,8 @@ class BaseDocument(object):
|
||||
|
||||
def to_json(self, *args, **kwargs):
|
||||
"""Converts a document to JSON.
|
||||
:param use_db_field: Set to True by default but enables the output of the json structure with the field names and not the mongodb store db_names in case of set to False
|
||||
:param use_db_field: Set to True by default but enables the output of the json structure with the field names
|
||||
and not the mongodb store db_names in case of set to False
|
||||
"""
|
||||
use_db_field = kwargs.pop('use_db_field', True)
|
||||
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
|
||||
@@ -577,7 +577,7 @@ class BaseDocument(object):
|
||||
if (hasattr(field, 'field') and
|
||||
isinstance(field.field, ReferenceField)):
|
||||
continue
|
||||
elif (isinstance(field, SortedListField) and field._ordering):
|
||||
elif isinstance(field, SortedListField) and field._ordering:
|
||||
# if ordering is affected whole list is changed
|
||||
if any(map(lambda d: field._ordering in d._changed_fields, data)):
|
||||
changed_fields.append(db_field_name)
|
||||
@@ -627,7 +627,7 @@ class BaseDocument(object):
|
||||
if value or isinstance(value, (numbers.Number, bool)):
|
||||
continue
|
||||
|
||||
# If we've set a value that ain't the default value dont unset it.
|
||||
# If we've set a value that ain't the default value don't unset it.
|
||||
default = None
|
||||
if (self._dynamic and len(parts) and parts[0] in
|
||||
self._dynamic_fields):
|
||||
@@ -979,7 +979,7 @@ class BaseDocument(object):
|
||||
if hasattr(getattr(field, 'field', None), 'lookup_member'):
|
||||
new_field = field.field.lookup_member(field_name)
|
||||
elif cls._dynamic and (isinstance(field, DynamicField) or
|
||||
getattr(getattr(field, 'document_type'), '_dynamic')):
|
||||
getattr(getattr(field, 'document_type'), '_dynamic')):
|
||||
new_field = DynamicField(db_field=field_name)
|
||||
else:
|
||||
# Look up subfield on the previous field or raise
|
||||
|
||||
@@ -112,7 +112,7 @@ class BaseField(object):
|
||||
"""Descriptor for assigning a value to a field in a document.
|
||||
"""
|
||||
|
||||
# If setting to None and theres a default
|
||||
# If setting to None and there is a default
|
||||
# Then set the value to the default value
|
||||
if value is None:
|
||||
if self.null:
|
||||
@@ -259,8 +259,8 @@ class ComplexBaseField(BaseField):
|
||||
instance._data[self.name] = value
|
||||
|
||||
if (self._auto_dereference and instance._initialised and
|
||||
isinstance(value, (BaseList, BaseDict))
|
||||
and not value._dereferenced):
|
||||
isinstance(value, (BaseList, BaseDict)) and
|
||||
not value._dereferenced):
|
||||
value = _dereference(
|
||||
value, max_depth=1, instance=instance, name=self.name
|
||||
)
|
||||
|
||||
@@ -110,7 +110,7 @@ class DocumentMetaclass(type):
|
||||
for base in flattened_bases:
|
||||
if (not getattr(base, '_is_base_cls', True) and
|
||||
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||
# Collate heirarchy for _cls and _subclasses
|
||||
# Collate hierarchy for _cls and _subclasses
|
||||
class_name.append(base.__name__)
|
||||
|
||||
if hasattr(base, '_meta'):
|
||||
@@ -183,7 +183,7 @@ class DocumentMetaclass(type):
|
||||
"CachedReferenceFields is not allowed in EmbeddedDocuments")
|
||||
if not f.document_type:
|
||||
raise InvalidDocumentError(
|
||||
"Document is not avaiable to sync")
|
||||
"Document is not available to sync")
|
||||
|
||||
if f.auto_sync:
|
||||
f.start_listener()
|
||||
@@ -245,7 +245,7 @@ class DocumentMetaclass(type):
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
DictField = _import_class('DictField')
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
return (Document, EmbeddedDocument, DictField, CachedReferenceField)
|
||||
return Document, EmbeddedDocument, DictField, CachedReferenceField
|
||||
|
||||
|
||||
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
@@ -301,8 +301,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
# Prevent classes setting collection different to their parents
|
||||
# If parent wasn't an abstract class
|
||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
|
||||
and not parent_doc_cls._meta.get('abstract', True)):
|
||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
|
||||
not parent_doc_cls._meta.get('abstract', True)):
|
||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||
warnings.warn(msg, SyntaxWarning)
|
||||
del attrs['_meta']['collection']
|
||||
@@ -408,14 +408,15 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
return new_class
|
||||
|
||||
def get_auto_id_names(self):
|
||||
@classmethod
|
||||
def get_auto_id_names(cls, new_class):
|
||||
id_name, id_db_name = ('id', '_id')
|
||||
if id_name not in self._fields and \
|
||||
id_db_name not in (v.db_field for v in self._fields.values()):
|
||||
if id_name not in new_class._fields and \
|
||||
id_db_name not in (v.db_field for v in new_class._fields.values()):
|
||||
return id_name, id_db_name
|
||||
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
|
||||
while id_name in self._fields or \
|
||||
id_db_name in (v.db_field for v in self._fields.values()):
|
||||
while id_name in new_class._fields or \
|
||||
id_db_name in (v.db_field for v in new_class._fields.values()):
|
||||
id_name = '{0}_{1}'.format(id_basename, i)
|
||||
id_db_name = '{0}_{1}'.format(id_db_basename, i)
|
||||
i += 1
|
||||
|
||||
@@ -100,7 +100,7 @@ class DeReference(object):
|
||||
if isinstance(item, (Document, EmbeddedDocument)):
|
||||
for field_name, field in item._fields.iteritems():
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, (DBRef)):
|
||||
if isinstance(v, DBRef):
|
||||
reference_map.setdefault(field.document_type, set()).add(v.id)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
||||
@@ -111,7 +111,7 @@ class DeReference(object):
|
||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||
key = field_cls
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
elif isinstance(item, (DBRef)):
|
||||
elif isinstance(item, DBRef):
|
||||
reference_map.setdefault(item.collection, set()).add(item.id)
|
||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
|
||||
@@ -219,12 +219,12 @@ class DeReference(object):
|
||||
elif isinstance(v, (Document, EmbeddedDocument)):
|
||||
for field_name, field in v._fields.iteritems():
|
||||
v = data[k]._data.get(field_name, None)
|
||||
if isinstance(v, (DBRef)):
|
||||
if isinstance(v, DBRef):
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v.collection, v.id), v)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v['_ref'].collection , v['_ref'].id), v)
|
||||
(v['_ref'].collection, v['_ref'].id), v)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = "{0}.{1}.{2}".format(name, k, field_name)
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)
|
||||
|
||||
@@ -3,7 +3,6 @@ import pymongo
|
||||
import re
|
||||
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
from bson import ObjectId
|
||||
from bson.dbref import DBRef
|
||||
from mongoengine import signals
|
||||
from mongoengine.common import _import_class
|
||||
@@ -61,7 +60,7 @@ class EmbeddedDocument(BaseDocument):
|
||||
dictionary.
|
||||
"""
|
||||
|
||||
__slots__ = ('_instance')
|
||||
__slots__ = ('_instance', )
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
|
||||
@@ -126,7 +126,7 @@ class ValidationError(AssertionError):
|
||||
def generate_key(value, prefix=''):
|
||||
if isinstance(value, list):
|
||||
value = ' '.join([generate_key(k) for k in value])
|
||||
if isinstance(value, dict):
|
||||
elif isinstance(value, dict):
|
||||
value = ' '.join(
|
||||
[generate_key(v, k) for k, v in value.iteritems()])
|
||||
|
||||
|
||||
@@ -667,7 +667,6 @@ class DynamicField(BaseField):
|
||||
return StringField().prepare_query_value(op, value)
|
||||
return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value))
|
||||
|
||||
|
||||
def validate(self, value, clean=True):
|
||||
if hasattr(value, "validate"):
|
||||
value.validate(clean=clean)
|
||||
@@ -698,9 +697,10 @@ class ListField(ComplexBaseField):
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if self.field:
|
||||
if op in ('set', 'unset') and (not isinstance(value, basestring)
|
||||
and not isinstance(value, BaseDocument)
|
||||
and hasattr(value, '__iter__')):
|
||||
if op in ('set', 'unset') and (
|
||||
not isinstance(value, basestring) and
|
||||
not isinstance(value, BaseDocument) and
|
||||
hasattr(value, '__iter__')):
|
||||
return [self.field.prepare_query_value(op, v) for v in value]
|
||||
return self.field.prepare_query_value(op, value)
|
||||
return super(ListField, self).prepare_query_value(op, value)
|
||||
@@ -718,12 +718,10 @@ class EmbeddedDocumentListField(ListField):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, document_type, *args, **kwargs):
|
||||
def __init__(self, document_type, **kwargs):
|
||||
"""
|
||||
:param document_type: The type of
|
||||
:class:`~mongoengine.EmbeddedDocument` the list will hold.
|
||||
:param args: Arguments passed directly into the parent
|
||||
:class:`~mongoengine.ListField`.
|
||||
:param kwargs: Keyword arguments passed directly into the parent
|
||||
:class:`~mongoengine.ListField`.
|
||||
"""
|
||||
@@ -975,7 +973,6 @@ class ReferenceField(BaseField):
|
||||
super(ReferenceField, self).prepare_query_value(op, value)
|
||||
return self.to_mongo(value)
|
||||
|
||||
|
||||
def validate(self, value):
|
||||
|
||||
if not isinstance(value, (self.document_type, DBRef)):
|
||||
@@ -1100,7 +1097,7 @@ class CachedReferenceField(BaseField):
|
||||
|
||||
def validate(self, value):
|
||||
|
||||
if not isinstance(value, (self.document_type)):
|
||||
if not isinstance(value, self.document_type):
|
||||
self.error("A CachedReferenceField only accepts documents")
|
||||
|
||||
if isinstance(value, Document) and value.id is None:
|
||||
@@ -1419,7 +1416,7 @@ class FileField(BaseField):
|
||||
def __set__(self, instance, value):
|
||||
key = self.name
|
||||
if ((hasattr(value, 'read') and not
|
||||
isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
|
||||
isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
|
||||
# using "FileField() = file/string" notation
|
||||
grid_file = instance._data.get(self.name)
|
||||
# If a file already exists, delete it
|
||||
@@ -1553,7 +1550,7 @@ class ImageGridFsProxy(GridFSProxy):
|
||||
if out and out.thumbnail_id:
|
||||
self.fs.delete(out.thumbnail_id)
|
||||
|
||||
return super(ImageGridFsProxy, self).delete(*args, **kwargs)
|
||||
return super(ImageGridFsProxy, self).delete()
|
||||
|
||||
def _put_thumbnail(self, thumbnail, format, progressive, **kwargs):
|
||||
w, h = thumbnail.size
|
||||
|
||||
@@ -86,8 +86,8 @@ class BaseQuerySet(object):
|
||||
self.only_fields = []
|
||||
self._max_time_ms = None
|
||||
|
||||
def __call__(self, q_obj=None, class_check=True, slave_okay=False,
|
||||
read_preference=None, **query):
|
||||
def __call__(self, q_obj=None, class_check=True, read_preference=None,
|
||||
**query):
|
||||
"""Filter the selected documents by calling the
|
||||
:class:`~mongoengine.queryset.QuerySet` with a query.
|
||||
|
||||
@@ -97,9 +97,7 @@ class BaseQuerySet(object):
|
||||
objects, only the last one will be used
|
||||
:param class_check: If set to False bypass class name check when
|
||||
querying collection
|
||||
:param slave_okay: if True, allows this query to be run against a
|
||||
replica secondary.
|
||||
:params read_preference: if set, overrides connection-level
|
||||
:param read_preference: if set, overrides connection-level
|
||||
read_preference from `ReplicaSetConnection`.
|
||||
:param query: Django-style query keyword arguments
|
||||
"""
|
||||
@@ -204,7 +202,8 @@ class BaseQuerySet(object):
|
||||
:param language: The language that determines the list of stop words
|
||||
for the search and the rules for the stemmer and tokenizer.
|
||||
If not specified, the search uses the default language of the index.
|
||||
For supported languages, see `Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
|
||||
For supported languages, see
|
||||
`Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
|
||||
"""
|
||||
queryset = self.clone()
|
||||
if queryset._search_text:
|
||||
@@ -270,7 +269,7 @@ class BaseQuerySet(object):
|
||||
def insert(self, doc_or_docs, load_bulk=True, write_concern=None):
|
||||
"""bulk insert documents
|
||||
|
||||
:param docs_or_doc: a document or list of documents to be inserted
|
||||
:param doc_or_docs: a document or list of documents to be inserted
|
||||
:param load_bulk (optional): If True returns the list of document
|
||||
instances
|
||||
:param write_concern: Extra keyword arguments are passed down to
|
||||
@@ -405,8 +404,8 @@ class BaseQuerySet(object):
|
||||
if rule == CASCADE:
|
||||
ref_q = document_cls.objects(**{field_name + '__in': self})
|
||||
ref_q_count = ref_q.count()
|
||||
if (doc != document_cls and ref_q_count > 0
|
||||
or (doc == document_cls and ref_q_count > 0)):
|
||||
if (doc != document_cls and ref_q_count > 0 or
|
||||
(doc == document_cls and ref_q_count > 0)):
|
||||
ref_q.delete(write_concern=write_concern)
|
||||
elif rule == NULLIFY:
|
||||
document_cls.objects(**{field_name + '__in': self}).update(
|
||||
@@ -527,7 +526,7 @@ class BaseQuerySet(object):
|
||||
try:
|
||||
if IS_PYMONGO_3:
|
||||
if full_response:
|
||||
msg = ("With PyMongo 3+, it is not possible anymore to get the full response.")
|
||||
msg = "With PyMongo 3+, it is not possible anymore to get the full response."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
if remove:
|
||||
result = queryset._collection.find_one_and_delete(
|
||||
@@ -619,7 +618,8 @@ class BaseQuerySet(object):
|
||||
return self
|
||||
|
||||
def using(self, alias):
|
||||
"""This method is for controlling which database the QuerySet will be evaluated against if you are using more than one database.
|
||||
"""This method is for controlling which database the QuerySet will be
|
||||
evaluated against if you are using more than one database.
|
||||
|
||||
:param alias: The database alias
|
||||
|
||||
@@ -966,7 +966,7 @@ class BaseQuerySet(object):
|
||||
"""Instead of returning Document instances, return raw values from
|
||||
pymongo.
|
||||
|
||||
:param coerce_type: Field types (if applicable) would be use to
|
||||
:param coerce_types: Field types (if applicable) would be use to
|
||||
coerce types.
|
||||
"""
|
||||
queryset = self.clone()
|
||||
@@ -1258,8 +1258,8 @@ class BaseQuerySet(object):
|
||||
the aggregation framework instead of map-reduce.
|
||||
"""
|
||||
result = self._document._get_collection().aggregate([
|
||||
{ '$match': self._query },
|
||||
{ '$group': { '_id': 'sum', 'total': { '$sum': '$' + field } } }
|
||||
{'$match': self._query},
|
||||
{'$group': {'_id': 'sum', 'total': {'$sum': '$' + field}}}
|
||||
])
|
||||
if IS_PYMONGO_3:
|
||||
result = list(result)
|
||||
@@ -1334,8 +1334,8 @@ class BaseQuerySet(object):
|
||||
uses the aggregation framework instead of map-reduce.
|
||||
"""
|
||||
result = self._document._get_collection().aggregate([
|
||||
{ '$match': self._query },
|
||||
{ '$group': { '_id': 'avg', 'total': { '$avg': '$' + field } } }
|
||||
{'$match': self._query},
|
||||
{'$group': {'_id': 'avg', 'total': {'$avg': '$' + field}}}
|
||||
])
|
||||
if IS_PYMONGO_3:
|
||||
result = list(result)
|
||||
@@ -1637,7 +1637,7 @@ class BaseQuerySet(object):
|
||||
ret.append(subfield)
|
||||
found = True
|
||||
break
|
||||
except LookUpError, e:
|
||||
except LookUpError:
|
||||
pass
|
||||
|
||||
if not found:
|
||||
|
||||
@@ -160,4 +160,4 @@ class QuerySetNoDeRef(QuerySet):
|
||||
"""Special no_dereference QuerySet"""
|
||||
|
||||
def __dereference(items, max_depth=1, instance=None, name=None):
|
||||
return items
|
||||
return items
|
||||
|
||||
@@ -44,8 +44,8 @@ def query(_doc_cls=None, **query):
|
||||
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
|
||||
op = parts.pop()
|
||||
|
||||
#if user escape field name by __
|
||||
if len(parts) > 1 and parts[-1]=="":
|
||||
# Allw to escape operator-like field name by __
|
||||
if len(parts) > 1 and parts[-1] == "":
|
||||
parts.pop()
|
||||
|
||||
negate = False
|
||||
|
||||
Reference in New Issue
Block a user