Improve the health of this package (#1428)
This commit is contained in:
@@ -1,8 +1,28 @@
|
||||
# Base module is split into several files for convenience. Files inside of
|
||||
# this module should import from a specific submodule (e.g.
|
||||
# `from mongoengine.base.document import BaseDocument`), but all of the
|
||||
# other modules should import directly from the top-level module (e.g.
|
||||
# `from mongoengine.base import BaseDocument`). This approach is cleaner and
|
||||
# also helps with cyclical import errors.
|
||||
from mongoengine.base.common import *
|
||||
from mongoengine.base.datastructures import *
|
||||
from mongoengine.base.document import *
|
||||
from mongoengine.base.fields import *
|
||||
from mongoengine.base.metaclasses import *
|
||||
|
||||
# Help with backwards compatibility
|
||||
from mongoengine.errors import *
|
||||
__all__ = (
|
||||
# common
|
||||
'UPDATE_OPERATORS', '_document_registry', 'get_document',
|
||||
|
||||
# datastructures
|
||||
'BaseDict', 'BaseList', 'EmbeddedDocumentList',
|
||||
|
||||
# document
|
||||
'BaseDocument',
|
||||
|
||||
# fields
|
||||
'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField',
|
||||
|
||||
# metaclasses
|
||||
'DocumentMetaclass', 'TopLevelDocumentMetaclass'
|
||||
)
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
from mongoengine.errors import NotRegistered
|
||||
|
||||
__all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry')
|
||||
__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
|
||||
|
||||
|
||||
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||
'set_on_insert', 'min', 'max'])
|
||||
|
||||
ALLOW_INHERITANCE = False
|
||||
|
||||
_document_registry = {}
|
||||
|
||||
|
||||
def get_document(name):
|
||||
"""Get a document class by name."""
|
||||
doc = _document_registry.get(name, None)
|
||||
if not doc:
|
||||
# Possible old style name
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
import itertools
|
||||
import weakref
|
||||
|
||||
import six
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||
|
||||
__all__ = ("BaseDict", "BaseList", "EmbeddedDocumentList")
|
||||
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList')
|
||||
|
||||
|
||||
class BaseDict(dict):
|
||||
"""A special dict so we can watch any changes"""
|
||||
"""A special dict so we can watch any changes."""
|
||||
|
||||
_dereferenced = False
|
||||
_instance = None
|
||||
@@ -93,8 +95,7 @@ class BaseDict(dict):
|
||||
|
||||
|
||||
class BaseList(list):
|
||||
"""A special list so we can watch any changes
|
||||
"""
|
||||
"""A special list so we can watch any changes."""
|
||||
|
||||
_dereferenced = False
|
||||
_instance = None
|
||||
@@ -209,17 +210,22 @@ class BaseList(list):
|
||||
class EmbeddedDocumentList(BaseList):
|
||||
|
||||
@classmethod
|
||||
def __match_all(cls, i, kwargs):
|
||||
items = kwargs.items()
|
||||
return all([
|
||||
getattr(i, k) == v or unicode(getattr(i, k)) == v for k, v in items
|
||||
])
|
||||
def __match_all(cls, embedded_doc, kwargs):
|
||||
"""Return True if a given embedded doc matches all the filter
|
||||
kwargs. If it doesn't return False.
|
||||
"""
|
||||
for key, expected_value in kwargs.items():
|
||||
doc_val = getattr(embedded_doc, key)
|
||||
if doc_val != expected_value and six.text_type(doc_val) != expected_value:
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def __only_matches(cls, obj, kwargs):
|
||||
def __only_matches(cls, embedded_docs, kwargs):
|
||||
"""Return embedded docs that match the filter kwargs."""
|
||||
if not kwargs:
|
||||
return obj
|
||||
return filter(lambda i: cls.__match_all(i, kwargs), obj)
|
||||
return embedded_docs
|
||||
return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)]
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||
@@ -285,18 +291,18 @@ class EmbeddedDocumentList(BaseList):
|
||||
values = self.__only_matches(self, kwargs)
|
||||
if len(values) == 0:
|
||||
raise DoesNotExist(
|
||||
"%s matching query does not exist." % self._name
|
||||
'%s matching query does not exist.' % self._name
|
||||
)
|
||||
elif len(values) > 1:
|
||||
raise MultipleObjectsReturned(
|
||||
"%d items returned, instead of 1" % len(values)
|
||||
'%d items returned, instead of 1' % len(values)
|
||||
)
|
||||
|
||||
return values[0]
|
||||
|
||||
def first(self):
|
||||
"""
|
||||
Returns the first embedded document in the list, or ``None`` if empty.
|
||||
"""Return the first embedded document in the list, or ``None``
|
||||
if empty.
|
||||
"""
|
||||
if len(self) > 0:
|
||||
return self[0]
|
||||
@@ -438,7 +444,7 @@ class StrictDict(object):
|
||||
__slots__ = allowed_keys_tuple
|
||||
|
||||
def __repr__(self):
|
||||
return "{%s}" % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items())
|
||||
return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items())
|
||||
|
||||
cls._classes[allowed_keys] = SpecificStrictDict
|
||||
return cls._classes[allowed_keys]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import copy
|
||||
import numbers
|
||||
import operator
|
||||
from collections import Hashable
|
||||
from functools import partial
|
||||
|
||||
@@ -8,30 +7,27 @@ from bson import ObjectId, json_util
|
||||
from bson.dbref import DBRef
|
||||
from bson.son import SON
|
||||
import pymongo
|
||||
import six
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.base.common import ALLOW_INHERITANCE, get_document
|
||||
from mongoengine.base.datastructures import (
|
||||
BaseDict,
|
||||
BaseList,
|
||||
EmbeddedDocumentList,
|
||||
SemiStrictDict,
|
||||
StrictDict
|
||||
)
|
||||
from mongoengine.base.common import get_document
|
||||
from mongoengine.base.datastructures import (BaseDict, BaseList,
|
||||
EmbeddedDocumentList,
|
||||
SemiStrictDict, StrictDict)
|
||||
from mongoengine.base.fields import ComplexBaseField
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError,
|
||||
LookUpError, ValidationError)
|
||||
from mongoengine.python_support import PY3, txt_type
|
||||
LookUpError, OperationError, ValidationError)
|
||||
|
||||
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
|
||||
__all__ = ('BaseDocument',)
|
||||
|
||||
NON_FIELD_ERRORS = '__all__'
|
||||
|
||||
|
||||
class BaseDocument(object):
|
||||
__slots__ = ('_changed_fields', '_initialised', '_created', '_data',
|
||||
'_dynamic_fields', '_auto_id_field', '_db_field_map', '__weakref__')
|
||||
'_dynamic_fields', '_auto_id_field', '_db_field_map',
|
||||
'__weakref__')
|
||||
|
||||
_dynamic = False
|
||||
_dynamic_lock = True
|
||||
@@ -57,15 +53,15 @@ class BaseDocument(object):
|
||||
name = next(field)
|
||||
if name in values:
|
||||
raise TypeError(
|
||||
"Multiple values for keyword argument '" + name + "'")
|
||||
'Multiple values for keyword argument "%s"' % name)
|
||||
values[name] = value
|
||||
|
||||
__auto_convert = values.pop("__auto_convert", True)
|
||||
__auto_convert = values.pop('__auto_convert', True)
|
||||
|
||||
# 399: set default values only to fields loaded from DB
|
||||
__only_fields = set(values.pop("__only_fields", values))
|
||||
__only_fields = set(values.pop('__only_fields', values))
|
||||
|
||||
_created = values.pop("_created", True)
|
||||
_created = values.pop('_created', True)
|
||||
|
||||
signals.pre_init.send(self.__class__, document=self, values=values)
|
||||
|
||||
@@ -76,7 +72,7 @@ class BaseDocument(object):
|
||||
self._fields.keys() + ['id', 'pk', '_cls', '_text_score'])
|
||||
if _undefined_fields:
|
||||
msg = (
|
||||
"The fields '{0}' do not exist on the document '{1}'"
|
||||
'The fields "{0}" do not exist on the document "{1}"'
|
||||
).format(_undefined_fields, self._class_name)
|
||||
raise FieldDoesNotExist(msg)
|
||||
|
||||
@@ -95,7 +91,7 @@ class BaseDocument(object):
|
||||
value = getattr(self, key, None)
|
||||
setattr(self, key, value)
|
||||
|
||||
if "_cls" not in values:
|
||||
if '_cls' not in values:
|
||||
self._cls = self._class_name
|
||||
|
||||
# Set passed values after initialisation
|
||||
@@ -150,7 +146,7 @@ class BaseDocument(object):
|
||||
if self._dynamic and not self._dynamic_lock:
|
||||
|
||||
if not hasattr(self, name) and not name.startswith('_'):
|
||||
DynamicField = _import_class("DynamicField")
|
||||
DynamicField = _import_class('DynamicField')
|
||||
field = DynamicField(db_field=name)
|
||||
field.name = name
|
||||
self._dynamic_fields[name] = field
|
||||
@@ -169,11 +165,13 @@ class BaseDocument(object):
|
||||
except AttributeError:
|
||||
self__created = True
|
||||
|
||||
if (self._is_document and not self__created and
|
||||
name in self._meta.get('shard_key', tuple()) and
|
||||
self._data.get(name) != value):
|
||||
OperationError = _import_class('OperationError')
|
||||
msg = "Shard Keys are immutable. Tried to update %s" % name
|
||||
if (
|
||||
self._is_document and
|
||||
not self__created and
|
||||
name in self._meta.get('shard_key', tuple()) and
|
||||
self._data.get(name) != value
|
||||
):
|
||||
msg = 'Shard Keys are immutable. Tried to update %s' % name
|
||||
raise OperationError(msg)
|
||||
|
||||
try:
|
||||
@@ -197,8 +195,8 @@ class BaseDocument(object):
|
||||
return data
|
||||
|
||||
def __setstate__(self, data):
|
||||
if isinstance(data["_data"], SON):
|
||||
data["_data"] = self.__class__._from_son(data["_data"])._data
|
||||
if isinstance(data['_data'], SON):
|
||||
data['_data'] = self.__class__._from_son(data['_data'])._data
|
||||
for k in ('_changed_fields', '_initialised', '_created', '_data',
|
||||
'_dynamic_fields'):
|
||||
if k in data:
|
||||
@@ -212,7 +210,7 @@ class BaseDocument(object):
|
||||
|
||||
dynamic_fields = data.get('_dynamic_fields') or SON()
|
||||
for k in dynamic_fields.keys():
|
||||
setattr(self, k, data["_data"].get(k))
|
||||
setattr(self, k, data['_data'].get(k))
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._fields_ordered)
|
||||
@@ -254,12 +252,13 @@ class BaseDocument(object):
|
||||
return repr_type('<%s: %s>' % (self.__class__.__name__, u))
|
||||
|
||||
def __str__(self):
|
||||
# TODO this could be simpler?
|
||||
if hasattr(self, '__unicode__'):
|
||||
if PY3:
|
||||
if six.PY3:
|
||||
return self.__unicode__()
|
||||
else:
|
||||
return unicode(self).encode('utf-8')
|
||||
return txt_type('%s object' % self.__class__.__name__)
|
||||
return six.text_type(self).encode('utf-8')
|
||||
return six.text_type('%s object' % self.__class__.__name__)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__) and hasattr(other, 'id') and other.id is not None:
|
||||
@@ -308,7 +307,7 @@ class BaseDocument(object):
|
||||
fields = []
|
||||
|
||||
data = SON()
|
||||
data["_id"] = None
|
||||
data['_id'] = None
|
||||
data['_cls'] = self._class_name
|
||||
|
||||
# only root fields ['test1.a', 'test2'] => ['test1', 'test2']
|
||||
@@ -351,18 +350,8 @@ class BaseDocument(object):
|
||||
else:
|
||||
data[field.name] = value
|
||||
|
||||
# If "_id" has not been set, then try and set it
|
||||
Document = _import_class("Document")
|
||||
if isinstance(self, Document):
|
||||
if data["_id"] is None:
|
||||
data["_id"] = self._data.get("id", None)
|
||||
|
||||
if data['_id'] is None:
|
||||
data.pop('_id')
|
||||
|
||||
# Only add _cls if allow_inheritance is True
|
||||
if (not hasattr(self, '_meta') or
|
||||
not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
|
||||
if not self._meta.get('allow_inheritance'):
|
||||
data.pop('_cls')
|
||||
|
||||
return data
|
||||
@@ -376,16 +365,16 @@ class BaseDocument(object):
|
||||
if clean:
|
||||
try:
|
||||
self.clean()
|
||||
except ValidationError, error:
|
||||
except ValidationError as error:
|
||||
errors[NON_FIELD_ERRORS] = error
|
||||
|
||||
# Get a list of tuples of field names and their current values
|
||||
fields = [(self._fields.get(name, self._dynamic_fields.get(name)),
|
||||
self._data.get(name)) for name in self._fields_ordered]
|
||||
|
||||
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
|
||||
GenericEmbeddedDocumentField = _import_class(
|
||||
"GenericEmbeddedDocumentField")
|
||||
'GenericEmbeddedDocumentField')
|
||||
|
||||
for field, value in fields:
|
||||
if value is not None:
|
||||
@@ -395,21 +384,21 @@ class BaseDocument(object):
|
||||
field._validate(value, clean=clean)
|
||||
else:
|
||||
field._validate(value)
|
||||
except ValidationError, error:
|
||||
except ValidationError as error:
|
||||
errors[field.name] = error.errors or error
|
||||
except (ValueError, AttributeError, AssertionError), error:
|
||||
except (ValueError, AttributeError, AssertionError) as error:
|
||||
errors[field.name] = error
|
||||
elif field.required and not getattr(field, '_auto_gen', False):
|
||||
errors[field.name] = ValidationError('Field is required',
|
||||
field_name=field.name)
|
||||
|
||||
if errors:
|
||||
pk = "None"
|
||||
pk = 'None'
|
||||
if hasattr(self, 'pk'):
|
||||
pk = self.pk
|
||||
elif self._instance and hasattr(self._instance, 'pk'):
|
||||
pk = self._instance.pk
|
||||
message = "ValidationError (%s:%s) " % (self._class_name, pk)
|
||||
message = 'ValidationError (%s:%s) ' % (self._class_name, pk)
|
||||
raise ValidationError(message, errors=errors)
|
||||
|
||||
def to_json(self, *args, **kwargs):
|
||||
@@ -426,33 +415,26 @@ class BaseDocument(object):
|
||||
return cls._from_son(json_util.loads(json_data), created=created)
|
||||
|
||||
def __expand_dynamic_values(self, name, value):
|
||||
"""expand any dynamic values to their correct types / values"""
|
||||
"""Expand any dynamic values to their correct types / values."""
|
||||
if not isinstance(value, (dict, list, tuple)):
|
||||
return value
|
||||
|
||||
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
is_list = True
|
||||
value = dict([(k, v) for k, v in enumerate(value)])
|
||||
|
||||
if not is_list and '_cls' in value:
|
||||
# If the value is a dict with '_cls' in it, turn it into a document
|
||||
is_dict = isinstance(value, dict)
|
||||
if is_dict and '_cls' in value:
|
||||
cls = get_document(value['_cls'])
|
||||
return cls(**value)
|
||||
|
||||
data = {}
|
||||
for k, v in value.items():
|
||||
key = name if is_list else k
|
||||
data[k] = self.__expand_dynamic_values(key, v)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
data_items = sorted(data.items(), key=operator.itemgetter(0))
|
||||
value = [v for k, v in data_items]
|
||||
if is_dict:
|
||||
value = {
|
||||
k: self.__expand_dynamic_values(k, v)
|
||||
for k, v in value.items()
|
||||
}
|
||||
else:
|
||||
value = data
|
||||
value = [self.__expand_dynamic_values(name, v) for v in value]
|
||||
|
||||
# Convert lists / values so we can watch for any changes on them
|
||||
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||
if (isinstance(value, (list, tuple)) and
|
||||
not isinstance(value, BaseList)):
|
||||
if issubclass(type(self), EmbeddedDocumentListField):
|
||||
@@ -465,8 +447,7 @@ class BaseDocument(object):
|
||||
return value
|
||||
|
||||
def _mark_as_changed(self, key):
|
||||
"""Marks a key as explicitly changed by the user
|
||||
"""
|
||||
"""Mark a key as explicitly changed by the user."""
|
||||
if not key:
|
||||
return
|
||||
|
||||
@@ -496,10 +477,11 @@ class BaseDocument(object):
|
||||
remove(field)
|
||||
|
||||
def _clear_changed_fields(self):
|
||||
"""Using get_changed_fields iterate and remove any fields that are
|
||||
marked as changed"""
|
||||
"""Using _get_changed_fields iterate and remove any fields that
|
||||
are marked as changed.
|
||||
"""
|
||||
for changed in self._get_changed_fields():
|
||||
parts = changed.split(".")
|
||||
parts = changed.split('.')
|
||||
data = self
|
||||
for part in parts:
|
||||
if isinstance(data, list):
|
||||
@@ -511,10 +493,13 @@ class BaseDocument(object):
|
||||
data = data.get(part, None)
|
||||
else:
|
||||
data = getattr(data, part, None)
|
||||
if hasattr(data, "_changed_fields"):
|
||||
if hasattr(data, "_is_document") and data._is_document:
|
||||
|
||||
if hasattr(data, '_changed_fields'):
|
||||
if getattr(data, '_is_document', False):
|
||||
continue
|
||||
|
||||
data._changed_fields = []
|
||||
|
||||
self._changed_fields = []
|
||||
|
||||
def _nestable_types_changed_fields(self, changed_fields, key, data, inspected):
|
||||
@@ -526,26 +511,27 @@ class BaseDocument(object):
|
||||
iterator = data.iteritems()
|
||||
|
||||
for index, value in iterator:
|
||||
list_key = "%s%s." % (key, index)
|
||||
list_key = '%s%s.' % (key, index)
|
||||
# don't check anything lower if this key is already marked
|
||||
# as changed.
|
||||
if list_key[:-1] in changed_fields:
|
||||
continue
|
||||
if hasattr(value, '_get_changed_fields'):
|
||||
changed = value._get_changed_fields(inspected)
|
||||
changed_fields += ["%s%s" % (list_key, k)
|
||||
changed_fields += ['%s%s' % (list_key, k)
|
||||
for k in changed if k]
|
||||
elif isinstance(value, (list, tuple, dict)):
|
||||
self._nestable_types_changed_fields(
|
||||
changed_fields, list_key, value, inspected)
|
||||
|
||||
def _get_changed_fields(self, inspected=None):
|
||||
"""Returns a list of all fields that have explicitly been changed.
|
||||
"""Return a list of all fields that have explicitly been changed.
|
||||
"""
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument")
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
SortedListField = _import_class("SortedListField")
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
DynamicEmbeddedDocument = _import_class('DynamicEmbeddedDocument')
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
SortedListField = _import_class('SortedListField')
|
||||
|
||||
changed_fields = []
|
||||
changed_fields += getattr(self, '_changed_fields', [])
|
||||
|
||||
@@ -572,7 +558,7 @@ class BaseDocument(object):
|
||||
):
|
||||
# Find all embedded fields that have been changed
|
||||
changed = data._get_changed_fields(inspected)
|
||||
changed_fields += ["%s%s" % (key, k) for k in changed if k]
|
||||
changed_fields += ['%s%s' % (key, k) for k in changed if k]
|
||||
elif (isinstance(data, (list, tuple, dict)) and
|
||||
db_field_name not in changed_fields):
|
||||
if (hasattr(field, 'field') and
|
||||
@@ -676,21 +662,25 @@ class BaseDocument(object):
|
||||
|
||||
@classmethod
|
||||
def _get_collection_name(cls):
|
||||
"""Returns the collection name for this class. None for abstract class
|
||||
"""Return the collection name for this class. None for abstract
|
||||
class.
|
||||
"""
|
||||
return cls._meta.get('collection', None)
|
||||
|
||||
@classmethod
|
||||
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
|
||||
"""Create an instance of a Document (subclass) from a PyMongo SON.
|
||||
"""Create an instance of a Document (subclass) from a PyMongo
|
||||
SON.
|
||||
"""
|
||||
if not only_fields:
|
||||
only_fields = []
|
||||
|
||||
# get the class name from the document, falling back to the given
|
||||
# Get the class name from the document, falling back to the given
|
||||
# class if unavailable
|
||||
class_name = son.get('_cls', cls._class_name)
|
||||
data = dict(("%s" % key, value) for key, value in son.iteritems())
|
||||
|
||||
# Convert SON to a dict, making sure each key is a string
|
||||
data = {str(key): value for key, value in son.iteritems()}
|
||||
|
||||
# Return correct subclass for document type
|
||||
if class_name != cls._class_name:
|
||||
@@ -712,19 +702,20 @@ class BaseDocument(object):
|
||||
else field.to_python(value))
|
||||
if field_name != field.db_field:
|
||||
del data[field.db_field]
|
||||
except (AttributeError, ValueError), e:
|
||||
except (AttributeError, ValueError) as e:
|
||||
errors_dict[field_name] = e
|
||||
|
||||
if errors_dict:
|
||||
errors = "\n".join(["%s - %s" % (k, v)
|
||||
errors = '\n'.join(['%s - %s' % (k, v)
|
||||
for k, v in errors_dict.items()])
|
||||
msg = ("Invalid data to create a `%s` instance.\n%s"
|
||||
msg = ('Invalid data to create a `%s` instance.\n%s'
|
||||
% (cls._class_name, errors))
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
# In STRICT documents, remove any keys that aren't in cls._fields
|
||||
if cls.STRICT:
|
||||
data = dict((k, v)
|
||||
for k, v in data.iteritems() if k in cls._fields)
|
||||
data = {k: v for k, v in data.iteritems() if k in cls._fields}
|
||||
|
||||
obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data)
|
||||
obj._changed_fields = changed_fields
|
||||
if not _auto_dereference:
|
||||
@@ -734,37 +725,43 @@ class BaseDocument(object):
|
||||
|
||||
@classmethod
|
||||
def _build_index_specs(cls, meta_indexes):
|
||||
"""Generate and merge the full index specs
|
||||
"""
|
||||
|
||||
"""Generate and merge the full index specs."""
|
||||
geo_indices = cls._geo_indices()
|
||||
unique_indices = cls._unique_with_indexes()
|
||||
index_specs = [cls._build_index_spec(spec)
|
||||
for spec in meta_indexes]
|
||||
index_specs = [cls._build_index_spec(spec) for spec in meta_indexes]
|
||||
|
||||
def merge_index_specs(index_specs, indices):
|
||||
"""Helper method for merging index specs."""
|
||||
if not indices:
|
||||
return index_specs
|
||||
|
||||
spec_fields = [v['fields']
|
||||
for k, v in enumerate(index_specs)]
|
||||
# Merge unique_indexes with existing specs
|
||||
for k, v in enumerate(indices):
|
||||
if v['fields'] in spec_fields:
|
||||
index_specs[spec_fields.index(v['fields'])].update(v)
|
||||
# Create a map of index fields to index spec. We're converting
|
||||
# the fields from a list to a tuple so that it's hashable.
|
||||
spec_fields = {
|
||||
tuple(index['fields']): index for index in index_specs
|
||||
}
|
||||
|
||||
# For each new index, if there's an existing index with the same
|
||||
# fields list, update the existing spec with all data from the
|
||||
# new spec.
|
||||
for new_index in indices:
|
||||
candidate = spec_fields.get(tuple(new_index['fields']))
|
||||
if candidate is None:
|
||||
index_specs.append(new_index)
|
||||
else:
|
||||
index_specs.append(v)
|
||||
candidate.update(new_index)
|
||||
|
||||
return index_specs
|
||||
|
||||
# Merge geo indexes and unique_with indexes into the meta index specs.
|
||||
index_specs = merge_index_specs(index_specs, geo_indices)
|
||||
index_specs = merge_index_specs(index_specs, unique_indices)
|
||||
return index_specs
|
||||
|
||||
@classmethod
|
||||
def _build_index_spec(cls, spec):
|
||||
"""Build a PyMongo index spec from a MongoEngine index spec.
|
||||
"""
|
||||
if isinstance(spec, basestring):
|
||||
"""Build a PyMongo index spec from a MongoEngine index spec."""
|
||||
if isinstance(spec, six.string_types):
|
||||
spec = {'fields': [spec]}
|
||||
elif isinstance(spec, (list, tuple)):
|
||||
spec = {'fields': list(spec)}
|
||||
@@ -775,8 +772,7 @@ class BaseDocument(object):
|
||||
direction = None
|
||||
|
||||
# Check to see if we need to include _cls
|
||||
allow_inheritance = cls._meta.get('allow_inheritance',
|
||||
ALLOW_INHERITANCE)
|
||||
allow_inheritance = cls._meta.get('allow_inheritance')
|
||||
include_cls = (
|
||||
allow_inheritance and
|
||||
not spec.get('sparse', False) and
|
||||
@@ -786,7 +782,7 @@ class BaseDocument(object):
|
||||
|
||||
# 733: don't include cls if index_cls is False unless there is an explicit cls with the index
|
||||
include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True))
|
||||
if "cls" in spec:
|
||||
if 'cls' in spec:
|
||||
spec.pop('cls')
|
||||
for key in spec['fields']:
|
||||
# If inherited spec continue
|
||||
@@ -801,19 +797,19 @@ class BaseDocument(object):
|
||||
# GEOHAYSTACK from )
|
||||
# GEO2D from *
|
||||
direction = pymongo.ASCENDING
|
||||
if key.startswith("-"):
|
||||
if key.startswith('-'):
|
||||
direction = pymongo.DESCENDING
|
||||
elif key.startswith("$"):
|
||||
elif key.startswith('$'):
|
||||
direction = pymongo.TEXT
|
||||
elif key.startswith("#"):
|
||||
elif key.startswith('#'):
|
||||
direction = pymongo.HASHED
|
||||
elif key.startswith("("):
|
||||
elif key.startswith('('):
|
||||
direction = pymongo.GEOSPHERE
|
||||
elif key.startswith(")"):
|
||||
elif key.startswith(')'):
|
||||
direction = pymongo.GEOHAYSTACK
|
||||
elif key.startswith("*"):
|
||||
elif key.startswith('*'):
|
||||
direction = pymongo.GEO2D
|
||||
if key.startswith(("+", "-", "*", "$", "#", "(", ")")):
|
||||
if key.startswith(('+', '-', '*', '$', '#', '(', ')')):
|
||||
key = key[1:]
|
||||
|
||||
# Use real field name, do it manually because we need field
|
||||
@@ -826,7 +822,7 @@ class BaseDocument(object):
|
||||
parts = []
|
||||
for field in fields:
|
||||
try:
|
||||
if field != "_id":
|
||||
if field != '_id':
|
||||
field = field.db_field
|
||||
except AttributeError:
|
||||
pass
|
||||
@@ -845,49 +841,53 @@ class BaseDocument(object):
|
||||
return spec
|
||||
|
||||
@classmethod
|
||||
def _unique_with_indexes(cls, namespace=""):
|
||||
"""
|
||||
Find and set unique indexes
|
||||
"""
|
||||
def _unique_with_indexes(cls, namespace=''):
|
||||
"""Find unique indexes in the document schema and return them."""
|
||||
unique_indexes = []
|
||||
for field_name, field in cls._fields.items():
|
||||
sparse = field.sparse
|
||||
|
||||
# Generate a list of indexes needed by uniqueness constraints
|
||||
if field.unique:
|
||||
unique_fields = [field.db_field]
|
||||
|
||||
# Add any unique_with fields to the back of the index spec
|
||||
if field.unique_with:
|
||||
if isinstance(field.unique_with, basestring):
|
||||
if isinstance(field.unique_with, six.string_types):
|
||||
field.unique_with = [field.unique_with]
|
||||
|
||||
# Convert unique_with field names to real field names
|
||||
unique_with = []
|
||||
for other_name in field.unique_with:
|
||||
parts = other_name.split('.')
|
||||
|
||||
# Lookup real name
|
||||
parts = cls._lookup_field(parts)
|
||||
name_parts = [part.db_field for part in parts]
|
||||
unique_with.append('.'.join(name_parts))
|
||||
|
||||
# Unique field should be required
|
||||
parts[-1].required = True
|
||||
sparse = (not sparse and
|
||||
parts[-1].name not in cls.__dict__)
|
||||
|
||||
unique_fields += unique_with
|
||||
|
||||
# Add the new index to the list
|
||||
fields = [("%s%s" % (namespace, f), pymongo.ASCENDING)
|
||||
for f in unique_fields]
|
||||
fields = [
|
||||
('%s%s' % (namespace, f), pymongo.ASCENDING)
|
||||
for f in unique_fields
|
||||
]
|
||||
index = {'fields': fields, 'unique': True, 'sparse': sparse}
|
||||
unique_indexes.append(index)
|
||||
|
||||
if field.__class__.__name__ == "ListField":
|
||||
if field.__class__.__name__ == 'ListField':
|
||||
field = field.field
|
||||
|
||||
# Grab any embedded document field unique indexes
|
||||
if (field.__class__.__name__ == "EmbeddedDocumentField" and
|
||||
if (field.__class__.__name__ == 'EmbeddedDocumentField' and
|
||||
field.document_type != cls):
|
||||
field_namespace = "%s." % field_name
|
||||
field_namespace = '%s.' % field_name
|
||||
doc_cls = field.document_type
|
||||
unique_indexes += doc_cls._unique_with_indexes(field_namespace)
|
||||
|
||||
@@ -899,8 +899,9 @@ class BaseDocument(object):
|
||||
geo_indices = []
|
||||
inspected.append(cls)
|
||||
|
||||
geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField",
|
||||
"PointField", "LineStringField", "PolygonField"]
|
||||
geo_field_type_names = ('EmbeddedDocumentField', 'GeoPointField',
|
||||
'PointField', 'LineStringField',
|
||||
'PolygonField')
|
||||
|
||||
geo_field_types = tuple([_import_class(field)
|
||||
for field in geo_field_type_names])
|
||||
@@ -908,32 +909,68 @@ class BaseDocument(object):
|
||||
for field in cls._fields.values():
|
||||
if not isinstance(field, geo_field_types):
|
||||
continue
|
||||
|
||||
if hasattr(field, 'document_type'):
|
||||
field_cls = field.document_type
|
||||
if field_cls in inspected:
|
||||
continue
|
||||
|
||||
if hasattr(field_cls, '_geo_indices'):
|
||||
geo_indices += field_cls._geo_indices(
|
||||
inspected, parent_field=field.db_field)
|
||||
elif field._geo_index:
|
||||
field_name = field.db_field
|
||||
if parent_field:
|
||||
field_name = "%s.%s" % (parent_field, field_name)
|
||||
geo_indices.append({'fields':
|
||||
[(field_name, field._geo_index)]})
|
||||
field_name = '%s.%s' % (parent_field, field_name)
|
||||
geo_indices.append({
|
||||
'fields': [(field_name, field._geo_index)]
|
||||
})
|
||||
|
||||
return geo_indices
|
||||
|
||||
@classmethod
|
||||
def _lookup_field(cls, parts):
|
||||
"""Lookup a field based on its attribute and return a list containing
|
||||
the field's parents and the field.
|
||||
"""
|
||||
"""Given the path to a given field, return a list containing
|
||||
the Field object associated with that field and all of its parent
|
||||
Field objects.
|
||||
|
||||
ListField = _import_class("ListField")
|
||||
Args:
|
||||
parts (str, list, or tuple) - path to the field. Should be a
|
||||
string for simple fields existing on this document or a list
|
||||
of strings for a field that exists deeper in embedded documents.
|
||||
|
||||
Returns:
|
||||
A list of Field instances for fields that were found or
|
||||
strings for sub-fields that weren't.
|
||||
|
||||
Example:
|
||||
>>> user._lookup_field('name')
|
||||
[<mongoengine.fields.StringField at 0x1119bff50>]
|
||||
|
||||
>>> user._lookup_field('roles')
|
||||
[<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>]
|
||||
|
||||
>>> user._lookup_field(['roles', 'role'])
|
||||
[<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>,
|
||||
<mongoengine.fields.StringField at 0x1119ec050>]
|
||||
|
||||
>>> user._lookup_field('doesnt_exist')
|
||||
raises LookUpError
|
||||
|
||||
>>> user._lookup_field(['roles', 'doesnt_exist'])
|
||||
[<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>,
|
||||
'doesnt_exist']
|
||||
|
||||
"""
|
||||
# TODO this method is WAY too complicated. Simplify it.
|
||||
# TODO don't think returning a string for embedded non-existent fields is desired
|
||||
|
||||
ListField = _import_class('ListField')
|
||||
DynamicField = _import_class('DynamicField')
|
||||
|
||||
if not isinstance(parts, (list, tuple)):
|
||||
parts = [parts]
|
||||
|
||||
fields = []
|
||||
field = None
|
||||
|
||||
@@ -943,16 +980,17 @@ class BaseDocument(object):
|
||||
fields.append(field_name)
|
||||
continue
|
||||
|
||||
# Look up first field from the document
|
||||
if field is None:
|
||||
# Look up first field from the document
|
||||
if field_name == 'pk':
|
||||
# Deal with "primary key" alias
|
||||
field_name = cls._meta['id_field']
|
||||
|
||||
if field_name in cls._fields:
|
||||
field = cls._fields[field_name]
|
||||
elif cls._dynamic:
|
||||
field = DynamicField(db_field=field_name)
|
||||
elif cls._meta.get("allow_inheritance", False) or cls._meta.get("abstract", False):
|
||||
elif cls._meta.get('allow_inheritance') or cls._meta.get('abstract', False):
|
||||
# 744: in case the field is defined in a subclass
|
||||
for subcls in cls.__subclasses__():
|
||||
try:
|
||||
@@ -965,35 +1003,55 @@ class BaseDocument(object):
|
||||
else:
|
||||
raise LookUpError('Cannot resolve field "%s"' % field_name)
|
||||
else:
|
||||
raise LookUpError('Cannot resolve field "%s"'
|
||||
% field_name)
|
||||
raise LookUpError('Cannot resolve field "%s"' % field_name)
|
||||
else:
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
|
||||
# If previous field was a reference, throw an error (we
|
||||
# cannot look up fields that are on references).
|
||||
if isinstance(field, (ReferenceField, GenericReferenceField)):
|
||||
raise LookUpError('Cannot perform join in mongoDB: %s' %
|
||||
'__'.join(parts))
|
||||
|
||||
# If the parent field has a "field" attribute which has a
|
||||
# lookup_member method, call it to find the field
|
||||
# corresponding to this iteration.
|
||||
if hasattr(getattr(field, 'field', None), 'lookup_member'):
|
||||
new_field = field.field.lookup_member(field_name)
|
||||
|
||||
# If the parent field is a DynamicField or if it's part of
|
||||
# a DynamicDocument, mark current field as a DynamicField
|
||||
# with db_name equal to the field name.
|
||||
elif cls._dynamic and (isinstance(field, DynamicField) or
|
||||
getattr(getattr(field, 'document_type', None), '_dynamic', None)):
|
||||
new_field = DynamicField(db_field=field_name)
|
||||
|
||||
# Else, try to use the parent field's lookup_member method
|
||||
# to find the subfield.
|
||||
elif hasattr(field, 'lookup_member'):
|
||||
new_field = field.lookup_member(field_name)
|
||||
|
||||
# Raise a LookUpError if all the other conditions failed.
|
||||
else:
|
||||
# Look up subfield on the previous field or raise
|
||||
try:
|
||||
new_field = field.lookup_member(field_name)
|
||||
except AttributeError:
|
||||
raise LookUpError('Cannot resolve subfield or operator {} '
|
||||
'on the field {}'.format(
|
||||
field_name, field.name))
|
||||
raise LookUpError(
|
||||
'Cannot resolve subfield or operator {} '
|
||||
'on the field {}'.format(field_name, field.name)
|
||||
)
|
||||
|
||||
# If current field still wasn't found and the parent field
|
||||
# is a ComplexBaseField, add the name current field name and
|
||||
# move on.
|
||||
if not new_field and isinstance(field, ComplexBaseField):
|
||||
fields.append(field_name)
|
||||
continue
|
||||
elif not new_field:
|
||||
raise LookUpError('Cannot resolve field "%s"'
|
||||
% field_name)
|
||||
raise LookUpError('Cannot resolve field "%s"' % field_name)
|
||||
|
||||
field = new_field # update field to the new field type
|
||||
|
||||
fields.append(field)
|
||||
|
||||
return fields
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -4,21 +4,17 @@ import weakref
|
||||
|
||||
from bson import DBRef, ObjectId, SON
|
||||
import pymongo
|
||||
import six
|
||||
|
||||
from mongoengine.base.common import ALLOW_INHERITANCE
|
||||
from mongoengine.base.datastructures import (
|
||||
BaseDict, BaseList, EmbeddedDocumentList
|
||||
)
|
||||
from mongoengine.base.common import UPDATE_OPERATORS
|
||||
from mongoengine.base.datastructures import (BaseDict, BaseList,
|
||||
EmbeddedDocumentList)
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import ValidationError
|
||||
|
||||
__all__ = ("BaseField", "ComplexBaseField",
|
||||
"ObjectIdField", "GeoJsonBaseField")
|
||||
|
||||
|
||||
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||
'set_on_insert', 'min', 'max'])
|
||||
__all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField',
|
||||
'GeoJsonBaseField')
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
@@ -73,7 +69,7 @@ class BaseField(object):
|
||||
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||
|
||||
if name:
|
||||
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
|
||||
msg = 'Field\'s "name" attribute deprecated in favour of "db_field"'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self.required = required or primary_key
|
||||
self.default = default
|
||||
@@ -89,7 +85,7 @@ class BaseField(object):
|
||||
# Detect and report conflicts between metadata and base properties.
|
||||
conflicts = set(dir(self)) & set(kwargs)
|
||||
if conflicts:
|
||||
raise TypeError("%s already has attribute(s): %s" % (
|
||||
raise TypeError('%s already has attribute(s): %s' % (
|
||||
self.__class__.__name__, ', '.join(conflicts)))
|
||||
|
||||
# Assign metadata to the instance
|
||||
@@ -147,25 +143,21 @@ class BaseField(object):
|
||||
v._instance = weakref.proxy(instance)
|
||||
instance._data[self.name] = value
|
||||
|
||||
def error(self, message="", errors=None, field_name=None):
|
||||
"""Raises a ValidationError.
|
||||
"""
|
||||
def error(self, message='', errors=None, field_name=None):
|
||||
"""Raise a ValidationError."""
|
||||
field_name = field_name if field_name else self.name
|
||||
raise ValidationError(message, errors=errors, field_name=field_name)
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert a MongoDB-compatible type to a Python type.
|
||||
"""
|
||||
"""Convert a MongoDB-compatible type to a Python type."""
|
||||
return value
|
||||
|
||||
def to_mongo(self, value):
|
||||
"""Convert a Python type to a MongoDB-compatible type.
|
||||
"""
|
||||
"""Convert a Python type to a MongoDB-compatible type."""
|
||||
return self.to_python(value)
|
||||
|
||||
def _to_mongo_safe_call(self, value, use_db_field=True, fields=None):
|
||||
"""A helper method to call to_mongo with proper inputs
|
||||
"""
|
||||
"""Helper method to call to_mongo with proper inputs."""
|
||||
f_inputs = self.to_mongo.__code__.co_varnames
|
||||
ex_vars = {}
|
||||
if 'fields' in f_inputs:
|
||||
@@ -177,15 +169,13 @@ class BaseField(object):
|
||||
return self.to_mongo(value, **ex_vars)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
"""Prepare a value that is being used in a query for PyMongo.
|
||||
"""
|
||||
"""Prepare a value that is being used in a query for PyMongo."""
|
||||
if op in UPDATE_OPERATORS:
|
||||
self.validate(value)
|
||||
return value
|
||||
|
||||
def validate(self, value, clean=True):
|
||||
"""Perform validation on a value.
|
||||
"""
|
||||
"""Perform validation on a value."""
|
||||
pass
|
||||
|
||||
def _validate_choices(self, value):
|
||||
@@ -200,11 +190,13 @@ class BaseField(object):
|
||||
if isinstance(value, (Document, EmbeddedDocument)):
|
||||
if not any(isinstance(value, c) for c in choice_list):
|
||||
self.error(
|
||||
'Value must be instance of %s' % unicode(choice_list)
|
||||
'Value must be an instance of %s' % (
|
||||
six.text_type(choice_list)
|
||||
)
|
||||
)
|
||||
# Choices which are types other than Documents
|
||||
elif value not in choice_list:
|
||||
self.error('Value must be one of %s' % unicode(choice_list))
|
||||
self.error('Value must be one of %s' % six.text_type(choice_list))
|
||||
|
||||
def _validate(self, value, **kwargs):
|
||||
# Check the Choices Constraint
|
||||
@@ -247,8 +239,7 @@ class ComplexBaseField(BaseField):
|
||||
field = None
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
"""Descriptor to automatically dereference references.
|
||||
"""
|
||||
"""Descriptor to automatically dereference references."""
|
||||
if instance is None:
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
@@ -260,7 +251,7 @@ class ComplexBaseField(BaseField):
|
||||
(self.field is None or isinstance(self.field,
|
||||
(GenericReferenceField, ReferenceField))))
|
||||
|
||||
_dereference = _import_class("DeReference")()
|
||||
_dereference = _import_class('DeReference')()
|
||||
|
||||
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
if instance._initialised and dereference and instance._data.get(self.name):
|
||||
@@ -295,9 +286,8 @@ class ComplexBaseField(BaseField):
|
||||
return value
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert a MongoDB-compatible type to a Python type.
|
||||
"""
|
||||
if isinstance(value, basestring):
|
||||
"""Convert a MongoDB-compatible type to a Python type."""
|
||||
if isinstance(value, six.string_types):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_python'):
|
||||
@@ -307,14 +297,14 @@ class ComplexBaseField(BaseField):
|
||||
if not hasattr(value, 'items'):
|
||||
try:
|
||||
is_list = True
|
||||
value = dict([(k, v) for k, v in enumerate(value)])
|
||||
value = {k: v for k, v in enumerate(value)}
|
||||
except TypeError: # Not iterable return the value
|
||||
return value
|
||||
|
||||
if self.field:
|
||||
self.field._auto_dereference = self._auto_dereference
|
||||
value_dict = dict([(key, self.field.to_python(item))
|
||||
for key, item in value.items()])
|
||||
value_dict = {key: self.field.to_python(item)
|
||||
for key, item in value.items()}
|
||||
else:
|
||||
Document = _import_class('Document')
|
||||
value_dict = {}
|
||||
@@ -337,13 +327,12 @@ class ComplexBaseField(BaseField):
|
||||
return value_dict
|
||||
|
||||
def to_mongo(self, value, use_db_field=True, fields=None):
|
||||
"""Convert a Python type to a MongoDB-compatible type.
|
||||
"""
|
||||
Document = _import_class("Document")
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
"""Convert a Python type to a MongoDB-compatible type."""
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
|
||||
if isinstance(value, basestring):
|
||||
if isinstance(value, six.string_types):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_mongo'):
|
||||
@@ -360,13 +349,15 @@ class ComplexBaseField(BaseField):
|
||||
if not hasattr(value, 'items'):
|
||||
try:
|
||||
is_list = True
|
||||
value = dict([(k, v) for k, v in enumerate(value)])
|
||||
value = {k: v for k, v in enumerate(value)}
|
||||
except TypeError: # Not iterable return the value
|
||||
return value
|
||||
|
||||
if self.field:
|
||||
value_dict = dict([(key, self.field._to_mongo_safe_call(item, use_db_field, fields))
|
||||
for key, item in value.iteritems()])
|
||||
value_dict = {
|
||||
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
||||
for key, item in value.iteritems()
|
||||
}
|
||||
else:
|
||||
value_dict = {}
|
||||
for k, v in value.iteritems():
|
||||
@@ -380,9 +371,7 @@ class ComplexBaseField(BaseField):
|
||||
# any _cls data so make it a generic reference allows
|
||||
# us to dereference
|
||||
meta = getattr(v, '_meta', {})
|
||||
allow_inheritance = (
|
||||
meta.get('allow_inheritance', ALLOW_INHERITANCE)
|
||||
is True)
|
||||
allow_inheritance = meta.get('allow_inheritance')
|
||||
if not allow_inheritance and not self.field:
|
||||
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||
else:
|
||||
@@ -404,8 +393,7 @@ class ComplexBaseField(BaseField):
|
||||
return value_dict
|
||||
|
||||
def validate(self, value):
|
||||
"""If field is provided ensure the value is valid.
|
||||
"""
|
||||
"""If field is provided ensure the value is valid."""
|
||||
errors = {}
|
||||
if self.field:
|
||||
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
|
||||
@@ -415,9 +403,9 @@ class ComplexBaseField(BaseField):
|
||||
for k, v in sequence:
|
||||
try:
|
||||
self.field._validate(v)
|
||||
except ValidationError, error:
|
||||
except ValidationError as error:
|
||||
errors[k] = error.errors or error
|
||||
except (ValueError, AssertionError), error:
|
||||
except (ValueError, AssertionError) as error:
|
||||
errors[k] = error
|
||||
|
||||
if errors:
|
||||
@@ -443,8 +431,7 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
|
||||
class ObjectIdField(BaseField):
|
||||
"""A field wrapper around MongoDB's ObjectIds.
|
||||
"""
|
||||
"""A field wrapper around MongoDB's ObjectIds."""
|
||||
|
||||
def to_python(self, value):
|
||||
try:
|
||||
@@ -457,10 +444,10 @@ class ObjectIdField(BaseField):
|
||||
def to_mongo(self, value):
|
||||
if not isinstance(value, ObjectId):
|
||||
try:
|
||||
return ObjectId(unicode(value))
|
||||
except Exception, e:
|
||||
return ObjectId(six.text_type(value))
|
||||
except Exception as e:
|
||||
# e.message attribute has been deprecated since Python 2.6
|
||||
self.error(unicode(e))
|
||||
self.error(six.text_type(e))
|
||||
return value
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
@@ -468,7 +455,7 @@ class ObjectIdField(BaseField):
|
||||
|
||||
def validate(self, value):
|
||||
try:
|
||||
ObjectId(unicode(value))
|
||||
ObjectId(six.text_type(value))
|
||||
except Exception:
|
||||
self.error('Invalid Object ID')
|
||||
|
||||
@@ -480,21 +467,20 @@ class GeoJsonBaseField(BaseField):
|
||||
"""
|
||||
|
||||
_geo_index = pymongo.GEOSPHERE
|
||||
_type = "GeoBase"
|
||||
_type = 'GeoBase'
|
||||
|
||||
def __init__(self, auto_index=True, *args, **kwargs):
|
||||
"""
|
||||
:param bool auto_index: Automatically create a "2dsphere" index.\
|
||||
:param bool auto_index: Automatically create a '2dsphere' index.\
|
||||
Defaults to `True`.
|
||||
"""
|
||||
self._name = "%sField" % self._type
|
||||
self._name = '%sField' % self._type
|
||||
if not auto_index:
|
||||
self._geo_index = False
|
||||
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
|
||||
|
||||
def validate(self, value):
|
||||
"""Validate the GeoJson object based on its type
|
||||
"""
|
||||
"""Validate the GeoJson object based on its type."""
|
||||
if isinstance(value, dict):
|
||||
if set(value.keys()) == set(['type', 'coordinates']):
|
||||
if value['type'] != self._type:
|
||||
@@ -509,7 +495,7 @@ class GeoJsonBaseField(BaseField):
|
||||
self.error('%s can only accept lists of [x, y]' % self._name)
|
||||
return
|
||||
|
||||
validate = getattr(self, "_validate_%s" % self._type.lower())
|
||||
validate = getattr(self, '_validate_%s' % self._type.lower())
|
||||
error = validate(value)
|
||||
if error:
|
||||
self.error(error)
|
||||
@@ -522,7 +508,7 @@ class GeoJsonBaseField(BaseField):
|
||||
try:
|
||||
value[0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return "Invalid Polygon must contain at least one valid linestring"
|
||||
return 'Invalid Polygon must contain at least one valid linestring'
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
@@ -533,12 +519,12 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
if errors:
|
||||
if top_level:
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
return 'Invalid Polygon:\n%s' % ', '.join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
return '%s' % ', '.join(errors)
|
||||
|
||||
def _validate_linestring(self, value, top_level=True):
|
||||
"""Validates a linestring"""
|
||||
"""Validate a linestring."""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'LineStrings must contain list of coordinate pairs'
|
||||
|
||||
@@ -546,7 +532,7 @@ class GeoJsonBaseField(BaseField):
|
||||
try:
|
||||
value[0][0]
|
||||
except (TypeError, IndexError):
|
||||
return "Invalid LineString must contain at least one valid point"
|
||||
return 'Invalid LineString must contain at least one valid point'
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
@@ -555,19 +541,19 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
if errors:
|
||||
if top_level:
|
||||
return "Invalid LineString:\n%s" % ", ".join(errors)
|
||||
return 'Invalid LineString:\n%s' % ', '.join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
return '%s' % ', '.join(errors)
|
||||
|
||||
def _validate_point(self, value):
|
||||
"""Validate each set of coords"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Points must be a list of coordinate pairs'
|
||||
elif not len(value) == 2:
|
||||
return "Value (%s) must be a two-dimensional point" % repr(value)
|
||||
return 'Value (%s) must be a two-dimensional point' % repr(value)
|
||||
elif (not isinstance(value[0], (float, int)) or
|
||||
not isinstance(value[1], (float, int))):
|
||||
return "Both values (%s) in point must be float or int" % repr(value)
|
||||
return 'Both values (%s) in point must be float or int' % repr(value)
|
||||
|
||||
def _validate_multipoint(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
@@ -577,7 +563,7 @@ class GeoJsonBaseField(BaseField):
|
||||
try:
|
||||
value[0][0]
|
||||
except (TypeError, IndexError):
|
||||
return "Invalid MultiPoint must contain at least one valid point"
|
||||
return 'Invalid MultiPoint must contain at least one valid point'
|
||||
|
||||
errors = []
|
||||
for point in value:
|
||||
@@ -586,7 +572,7 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return "%s" % ", ".join(errors)
|
||||
return '%s' % ', '.join(errors)
|
||||
|
||||
def _validate_multilinestring(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
@@ -596,7 +582,7 @@ class GeoJsonBaseField(BaseField):
|
||||
try:
|
||||
value[0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return "Invalid MultiLineString must contain at least one valid linestring"
|
||||
return 'Invalid MultiLineString must contain at least one valid linestring'
|
||||
|
||||
errors = []
|
||||
for linestring in value:
|
||||
@@ -606,9 +592,9 @@ class GeoJsonBaseField(BaseField):
|
||||
|
||||
if errors:
|
||||
if top_level:
|
||||
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
|
||||
return 'Invalid MultiLineString:\n%s' % ', '.join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
return '%s' % ', '.join(errors)
|
||||
|
||||
def _validate_multipolygon(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
@@ -618,7 +604,7 @@ class GeoJsonBaseField(BaseField):
|
||||
try:
|
||||
value[0][0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||
return 'Invalid MultiPolygon must contain at least one valid Polygon'
|
||||
|
||||
errors = []
|
||||
for polygon in value:
|
||||
@@ -627,9 +613,9 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
|
||||
return 'Invalid MultiPolygon:\n%s' % ', '.join(errors)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
return SON([("type", self._type), ("coordinates", value)])
|
||||
return SON([('type', self._type), ('coordinates', value)])
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import warnings
|
||||
|
||||
from mongoengine.base.common import ALLOW_INHERITANCE, _document_registry
|
||||
import six
|
||||
|
||||
from mongoengine.base.common import _document_registry
|
||||
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import InvalidDocumentError
|
||||
from mongoengine.python_support import PY3
|
||||
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
|
||||
MultipleObjectsReturned,
|
||||
QuerySetManager)
|
||||
@@ -45,7 +46,8 @@ class DocumentMetaclass(type):
|
||||
attrs['_meta'] = meta
|
||||
attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
||||
|
||||
if attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE):
|
||||
# If allow_inheritance is True, add a "_cls" string field to the attrs
|
||||
if attrs['_meta'].get('allow_inheritance'):
|
||||
StringField = _import_class('StringField')
|
||||
attrs['_cls'] = StringField()
|
||||
|
||||
@@ -87,16 +89,17 @@ class DocumentMetaclass(type):
|
||||
# Ensure no duplicate db_fields
|
||||
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
|
||||
if duplicate_db_fields:
|
||||
msg = ("Multiple db_fields defined for: %s " %
|
||||
", ".join(duplicate_db_fields))
|
||||
msg = ('Multiple db_fields defined for: %s ' %
|
||||
', '.join(duplicate_db_fields))
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
# Set _fields and db_field maps
|
||||
attrs['_fields'] = doc_fields
|
||||
attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k))
|
||||
for k, v in doc_fields.iteritems()])
|
||||
attrs['_reverse_db_field_map'] = dict(
|
||||
(v, k) for k, v in attrs['_db_field_map'].iteritems())
|
||||
attrs['_db_field_map'] = {k: getattr(v, 'db_field', k)
|
||||
for k, v in doc_fields.items()}
|
||||
attrs['_reverse_db_field_map'] = {
|
||||
v: k for k, v in attrs['_db_field_map'].items()
|
||||
}
|
||||
|
||||
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
|
||||
(v.creation_counter, v.name)
|
||||
@@ -116,10 +119,8 @@ class DocumentMetaclass(type):
|
||||
if hasattr(base, '_meta'):
|
||||
# Warn if allow_inheritance isn't set and prevent
|
||||
# inheritance of classes where inheritance is set to False
|
||||
allow_inheritance = base._meta.get('allow_inheritance',
|
||||
ALLOW_INHERITANCE)
|
||||
if (allow_inheritance is not True and
|
||||
not base._meta.get('abstract')):
|
||||
allow_inheritance = base._meta.get('allow_inheritance')
|
||||
if not allow_inheritance and not base._meta.get('abstract'):
|
||||
raise ValueError('Document %s may not be subclassed' %
|
||||
base.__name__)
|
||||
|
||||
@@ -161,7 +162,7 @@ class DocumentMetaclass(type):
|
||||
# module continues to use im_func and im_self, so the code below
|
||||
# copies __func__ into im_func and __self__ into im_self for
|
||||
# classmethod objects in Document derived classes.
|
||||
if PY3:
|
||||
if six.PY3:
|
||||
for val in new_class.__dict__.values():
|
||||
if isinstance(val, classmethod):
|
||||
f = val.__get__(new_class)
|
||||
@@ -179,11 +180,11 @@ class DocumentMetaclass(type):
|
||||
if isinstance(f, CachedReferenceField):
|
||||
|
||||
if issubclass(new_class, EmbeddedDocument):
|
||||
raise InvalidDocumentError(
|
||||
"CachedReferenceFields is not allowed in EmbeddedDocuments")
|
||||
raise InvalidDocumentError('CachedReferenceFields is not '
|
||||
'allowed in EmbeddedDocuments')
|
||||
if not f.document_type:
|
||||
raise InvalidDocumentError(
|
||||
"Document is not available to sync")
|
||||
'Document is not available to sync')
|
||||
|
||||
if f.auto_sync:
|
||||
f.start_listener()
|
||||
@@ -195,8 +196,8 @@ class DocumentMetaclass(type):
|
||||
'reverse_delete_rule',
|
||||
DO_NOTHING)
|
||||
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
|
||||
msg = ("Reverse delete rules are not supported "
|
||||
"for %s (field: %s)" %
|
||||
msg = ('Reverse delete rules are not supported '
|
||||
'for %s (field: %s)' %
|
||||
(field.__class__.__name__, field.name))
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
@@ -204,16 +205,16 @@ class DocumentMetaclass(type):
|
||||
|
||||
if delete_rule != DO_NOTHING:
|
||||
if issubclass(new_class, EmbeddedDocument):
|
||||
msg = ("Reverse delete rules are not supported for "
|
||||
"EmbeddedDocuments (field: %s)" % field.name)
|
||||
msg = ('Reverse delete rules are not supported for '
|
||||
'EmbeddedDocuments (field: %s)' % field.name)
|
||||
raise InvalidDocumentError(msg)
|
||||
f.document_type.register_delete_rule(new_class,
|
||||
field.name, delete_rule)
|
||||
|
||||
if (field.name and hasattr(Document, field.name) and
|
||||
EmbeddedDocument not in new_class.mro()):
|
||||
msg = ("%s is a document method and not a valid "
|
||||
"field name" % field.name)
|
||||
msg = ('%s is a document method and not a valid '
|
||||
'field name' % field.name)
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
return new_class
|
||||
@@ -271,6 +272,11 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
'index_drop_dups': False,
|
||||
'index_opts': None,
|
||||
'delete_rules': None,
|
||||
|
||||
# allow_inheritance can be True, False, and None. True means
|
||||
# "allow inheritance", False means "don't allow inheritance",
|
||||
# None means "do whatever your parent does, or don't allow
|
||||
# inheritance if you're a top-level class".
|
||||
'allow_inheritance': None,
|
||||
}
|
||||
attrs['_is_base_cls'] = True
|
||||
@@ -303,7 +309,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
# If parent wasn't an abstract class
|
||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
|
||||
not parent_doc_cls._meta.get('abstract', True)):
|
||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||
msg = 'Trying to set a collection on a subclass (%s)' % name
|
||||
warnings.warn(msg, SyntaxWarning)
|
||||
del attrs['_meta']['collection']
|
||||
|
||||
@@ -311,7 +317,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
||||
if (parent_doc_cls and
|
||||
not parent_doc_cls._meta.get('abstract', False)):
|
||||
msg = "Abstract document cannot have non-abstract base"
|
||||
msg = 'Abstract document cannot have non-abstract base'
|
||||
raise ValueError(msg)
|
||||
return super_new(cls, name, bases, attrs)
|
||||
|
||||
@@ -334,12 +340,16 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
meta.merge(attrs.get('_meta', {})) # Top level meta
|
||||
|
||||
# Only simple classes (direct subclasses of Document)
|
||||
# may set allow_inheritance to False
|
||||
# Only simple classes (i.e. direct subclasses of Document) may set
|
||||
# allow_inheritance to False. If the base Document allows inheritance,
|
||||
# none of its subclasses can override allow_inheritance to False.
|
||||
simple_class = all([b._meta.get('abstract')
|
||||
for b in flattened_bases if hasattr(b, '_meta')])
|
||||
if (not simple_class and meta['allow_inheritance'] is False and
|
||||
not meta['abstract']):
|
||||
if (
|
||||
not simple_class and
|
||||
meta['allow_inheritance'] is False and
|
||||
not meta['abstract']
|
||||
):
|
||||
raise ValueError('Only direct subclasses of Document may set '
|
||||
'"allow_inheritance" to False')
|
||||
|
||||
|
||||
Reference in New Issue
Block a user