Improve overall code quality (based on pylint findings)
This commit is contained in:
parent
55fc04752a
commit
adfb039ba6
@ -6,6 +6,11 @@ Development
|
|||||||
***********
|
***********
|
||||||
(Fill this out whenever you introduce breaking changes to MongoEngine)
|
(Fill this out whenever you introduce breaking changes to MongoEngine)
|
||||||
|
|
||||||
|
URLField's constructor no longer takes `verify_exists`
|
||||||
|
|
||||||
|
0.15.0
|
||||||
|
******
|
||||||
|
|
||||||
0.14.0
|
0.14.0
|
||||||
******
|
******
|
||||||
This release includes a few bug fixes and a significant code cleanup. The most
|
This release includes a few bug fixes and a significant code cleanup. The most
|
||||||
|
@ -19,7 +19,7 @@ def get_document(name):
|
|||||||
# Possible old style name
|
# Possible old style name
|
||||||
single_end = name.split('.')[-1]
|
single_end = name.split('.')[-1]
|
||||||
compound_end = '.%s' % single_end
|
compound_end = '.%s' % single_end
|
||||||
possible_match = [k for k in _document_registry.keys()
|
possible_match = [k for k in _document_registry
|
||||||
if k.endswith(compound_end) or k == single_end]
|
if k.endswith(compound_end) or k == single_end]
|
||||||
if len(possible_match) == 1:
|
if len(possible_match) == 1:
|
||||||
doc = _document_registry.get(possible_match.pop(), None)
|
doc = _document_registry.get(possible_match.pop(), None)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
import itertools
|
|
||||||
import weakref
|
import weakref
|
||||||
|
|
||||||
from bson import DBRef
|
from bson import DBRef
|
||||||
@ -18,10 +17,9 @@ class BaseDict(dict):
|
|||||||
_name = None
|
_name = None
|
||||||
|
|
||||||
def __init__(self, dict_items, instance, name):
|
def __init__(self, dict_items, instance, name):
|
||||||
Document = _import_class('Document')
|
BaseDocument = _import_class('BaseDocument')
|
||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
|
||||||
|
|
||||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
if isinstance(instance, BaseDocument):
|
||||||
self._instance = weakref.proxy(instance)
|
self._instance = weakref.proxy(instance)
|
||||||
self._name = name
|
self._name = name
|
||||||
super(BaseDict, self).__init__(dict_items)
|
super(BaseDict, self).__init__(dict_items)
|
||||||
@ -32,11 +30,11 @@ class BaseDict(dict):
|
|||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||||
super(BaseDict, self).__setitem__(key, value)
|
super(BaseDict, self).__setitem__(key, value)
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
elif not isinstance(value, BaseList) and isinstance(value, list):
|
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||||
super(BaseDict, self).__setitem__(key, value)
|
super(BaseDict, self).__setitem__(key, value)
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
@ -103,10 +101,9 @@ class BaseList(list):
|
|||||||
_name = None
|
_name = None
|
||||||
|
|
||||||
def __init__(self, list_items, instance, name):
|
def __init__(self, list_items, instance, name):
|
||||||
Document = _import_class('Document')
|
BaseDocument = _import_class('BaseDocument')
|
||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
|
||||||
|
|
||||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
if isinstance(instance, BaseDocument):
|
||||||
self._instance = weakref.proxy(instance)
|
self._instance = weakref.proxy(instance)
|
||||||
self._name = name
|
self._name = name
|
||||||
super(BaseList, self).__init__(list_items)
|
super(BaseList, self).__init__(list_items)
|
||||||
@ -117,11 +114,11 @@ class BaseList(list):
|
|||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||||
super(BaseList, self).__setitem__(key, value)
|
super(BaseList, self).__setitem__(key, value)
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
elif not isinstance(value, BaseList) and isinstance(value, list):
|
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||||
super(BaseList, self).__setitem__(key, value)
|
super(BaseList, self).__setitem__(key, value)
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
|
@ -91,9 +91,6 @@ class BaseDocument(object):
|
|||||||
value = getattr(self, key, None)
|
value = getattr(self, key, None)
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
|
||||||
if '_cls' not in values:
|
|
||||||
self._cls = self._class_name
|
|
||||||
|
|
||||||
# Set passed values after initialisation
|
# Set passed values after initialisation
|
||||||
if self._dynamic:
|
if self._dynamic:
|
||||||
dynamic_data = {}
|
dynamic_data = {}
|
||||||
|
@ -18,14 +18,14 @@ class DocumentMetaclass(type):
|
|||||||
"""Metaclass for all documents."""
|
"""Metaclass for all documents."""
|
||||||
|
|
||||||
# TODO lower complexity of this method
|
# TODO lower complexity of this method
|
||||||
def __new__(cls, name, bases, attrs):
|
def __new__(mcs, name, bases, attrs):
|
||||||
flattened_bases = cls._get_bases(bases)
|
flattened_bases = mcs._get_bases(bases)
|
||||||
super_new = super(DocumentMetaclass, cls).__new__
|
super_new = super(DocumentMetaclass, mcs).__new__
|
||||||
|
|
||||||
# If a base class just call super
|
# If a base class just call super
|
||||||
metaclass = attrs.get('my_metaclass')
|
metaclass = attrs.get('my_metaclass')
|
||||||
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
||||||
return super_new(cls, name, bases, attrs)
|
return super_new(mcs, name, bases, attrs)
|
||||||
|
|
||||||
attrs['_is_document'] = attrs.get('_is_document', False)
|
attrs['_is_document'] = attrs.get('_is_document', False)
|
||||||
attrs['_cached_reference_fields'] = []
|
attrs['_cached_reference_fields'] = []
|
||||||
@ -138,7 +138,7 @@ class DocumentMetaclass(type):
|
|||||||
attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
|
attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
|
||||||
|
|
||||||
# Create the new_class
|
# Create the new_class
|
||||||
new_class = super_new(cls, name, bases, attrs)
|
new_class = super_new(mcs, name, bases, attrs)
|
||||||
|
|
||||||
# Set _subclasses
|
# Set _subclasses
|
||||||
for base in document_bases:
|
for base in document_bases:
|
||||||
@ -147,7 +147,7 @@ class DocumentMetaclass(type):
|
|||||||
base._types = base._subclasses # TODO depreciate _types
|
base._types = base._subclasses # TODO depreciate _types
|
||||||
|
|
||||||
(Document, EmbeddedDocument, DictField,
|
(Document, EmbeddedDocument, DictField,
|
||||||
CachedReferenceField) = cls._import_classes()
|
CachedReferenceField) = mcs._import_classes()
|
||||||
|
|
||||||
if issubclass(new_class, Document):
|
if issubclass(new_class, Document):
|
||||||
new_class._collection = None
|
new_class._collection = None
|
||||||
@ -219,29 +219,26 @@ class DocumentMetaclass(type):
|
|||||||
|
|
||||||
return new_class
|
return new_class
|
||||||
|
|
||||||
def add_to_class(self, name, value):
|
|
||||||
setattr(self, name, value)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_bases(cls, bases):
|
def _get_bases(mcs, bases):
|
||||||
if isinstance(bases, BasesTuple):
|
if isinstance(bases, BasesTuple):
|
||||||
return bases
|
return bases
|
||||||
seen = []
|
seen = []
|
||||||
bases = cls.__get_bases(bases)
|
bases = mcs.__get_bases(bases)
|
||||||
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
|
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
|
||||||
return BasesTuple(unique_bases)
|
return BasesTuple(unique_bases)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __get_bases(cls, bases):
|
def __get_bases(mcs, bases):
|
||||||
for base in bases:
|
for base in bases:
|
||||||
if base is object:
|
if base is object:
|
||||||
continue
|
continue
|
||||||
yield base
|
yield base
|
||||||
for child_base in cls.__get_bases(base.__bases__):
|
for child_base in mcs.__get_bases(base.__bases__):
|
||||||
yield child_base
|
yield child_base
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _import_classes(cls):
|
def _import_classes(mcs):
|
||||||
Document = _import_class('Document')
|
Document = _import_class('Document')
|
||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
DictField = _import_class('DictField')
|
DictField = _import_class('DictField')
|
||||||
@ -254,9 +251,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
collection in the database.
|
collection in the database.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __new__(cls, name, bases, attrs):
|
def __new__(mcs, name, bases, attrs):
|
||||||
flattened_bases = cls._get_bases(bases)
|
flattened_bases = mcs._get_bases(bases)
|
||||||
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
super_new = super(TopLevelDocumentMetaclass, mcs).__new__
|
||||||
|
|
||||||
# Set default _meta data if base class, otherwise get user defined meta
|
# Set default _meta data if base class, otherwise get user defined meta
|
||||||
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
|
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
|
||||||
@ -319,7 +316,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
not parent_doc_cls._meta.get('abstract', False)):
|
not parent_doc_cls._meta.get('abstract', False)):
|
||||||
msg = 'Abstract document cannot have non-abstract base'
|
msg = 'Abstract document cannot have non-abstract base'
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
return super_new(cls, name, bases, attrs)
|
return super_new(mcs, name, bases, attrs)
|
||||||
|
|
||||||
# Merge base class metas.
|
# Merge base class metas.
|
||||||
# Uses a special MetaDict that handles various merging rules
|
# Uses a special MetaDict that handles various merging rules
|
||||||
@ -360,7 +357,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
attrs['_meta'] = meta
|
attrs['_meta'] = meta
|
||||||
|
|
||||||
# Call super and get the new class
|
# Call super and get the new class
|
||||||
new_class = super_new(cls, name, bases, attrs)
|
new_class = super_new(mcs, name, bases, attrs)
|
||||||
|
|
||||||
meta = new_class._meta
|
meta = new_class._meta
|
||||||
|
|
||||||
@ -394,7 +391,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
'_auto_id_field', False)
|
'_auto_id_field', False)
|
||||||
if not new_class._meta.get('id_field'):
|
if not new_class._meta.get('id_field'):
|
||||||
# After 0.10, find not existing names, instead of overwriting
|
# After 0.10, find not existing names, instead of overwriting
|
||||||
id_name, id_db_name = cls.get_auto_id_names(new_class)
|
id_name, id_db_name = mcs.get_auto_id_names(new_class)
|
||||||
new_class._auto_id_field = True
|
new_class._auto_id_field = True
|
||||||
new_class._meta['id_field'] = id_name
|
new_class._meta['id_field'] = id_name
|
||||||
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
|
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
|
||||||
@ -419,7 +416,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
return new_class
|
return new_class
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_auto_id_names(cls, new_class):
|
def get_auto_id_names(mcs, new_class):
|
||||||
id_name, id_db_name = ('id', '_id')
|
id_name, id_db_name = ('id', '_id')
|
||||||
if id_name not in new_class._fields and \
|
if id_name not in new_class._fields and \
|
||||||
id_db_name not in (v.db_field for v in new_class._fields.values()):
|
id_db_name not in (v.db_field for v in new_class._fields.values()):
|
||||||
|
@ -15,7 +15,7 @@ class LazyRegexCompiler(object):
|
|||||||
self._compiled_regex = re.compile(self._pattern, self._flags)
|
self._compiled_regex = re.compile(self._pattern, self._flags)
|
||||||
return self._compiled_regex
|
return self._compiled_regex
|
||||||
|
|
||||||
def __get__(self, obj, objtype):
|
def __get__(self, instance, owner):
|
||||||
return self.compiled_regex
|
return self.compiled_regex
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
|
@ -170,8 +170,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
|||||||
"""
|
"""
|
||||||
if self.pk is None:
|
if self.pk is None:
|
||||||
return super(BaseDocument, self).__hash__()
|
return super(BaseDocument, self).__hash__()
|
||||||
else:
|
|
||||||
return hash(self.pk)
|
return hash(self.pk)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_db(cls):
|
def _get_db(cls):
|
||||||
|
@ -71,6 +71,7 @@ class ValidationError(AssertionError):
|
|||||||
_message = None
|
_message = None
|
||||||
|
|
||||||
def __init__(self, message='', **kwargs):
|
def __init__(self, message='', **kwargs):
|
||||||
|
super(ValidationError, self).__init__(message)
|
||||||
self.errors = kwargs.get('errors', {})
|
self.errors = kwargs.get('errors', {})
|
||||||
self.field_name = kwargs.get('field_name')
|
self.field_name = kwargs.get('field_name')
|
||||||
self.message = message
|
self.message = message
|
||||||
|
@ -140,8 +140,7 @@ class URLField(StringField):
|
|||||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||||
_URL_SCHEMES = ['http', 'https', 'ftp', 'ftps']
|
_URL_SCHEMES = ['http', 'https', 'ftp', 'ftps']
|
||||||
|
|
||||||
def __init__(self, verify_exists=False, url_regex=None, schemes=None, **kwargs):
|
def __init__(self, url_regex=None, schemes=None, **kwargs):
|
||||||
self.verify_exists = verify_exists
|
|
||||||
self.url_regex = url_regex or self._URL_REGEX
|
self.url_regex = url_regex or self._URL_REGEX
|
||||||
self.schemes = schemes or self._URL_SCHEMES
|
self.schemes = schemes or self._URL_SCHEMES
|
||||||
super(URLField, self).__init__(**kwargs)
|
super(URLField, self).__init__(**kwargs)
|
||||||
@ -274,14 +273,14 @@ class IntField(BaseField):
|
|||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
try:
|
try:
|
||||||
value = int(value)
|
value = int(value)
|
||||||
except ValueError:
|
except (TypeError, ValueError):
|
||||||
pass
|
pass
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
try:
|
try:
|
||||||
value = int(value)
|
value = int(value)
|
||||||
except Exception:
|
except (TypeError, ValueError):
|
||||||
self.error('%s could not be converted to int' % value)
|
self.error('%s could not be converted to int' % value)
|
||||||
|
|
||||||
if self.min_value is not None and value < self.min_value:
|
if self.min_value is not None and value < self.min_value:
|
||||||
@ -307,7 +306,7 @@ class LongField(BaseField):
|
|||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
try:
|
try:
|
||||||
value = long(value)
|
value = long(value)
|
||||||
except ValueError:
|
except (TypeError, ValueError):
|
||||||
pass
|
pass
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -317,7 +316,7 @@ class LongField(BaseField):
|
|||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
try:
|
try:
|
||||||
value = long(value)
|
value = long(value)
|
||||||
except Exception:
|
except (TypeError, ValueError):
|
||||||
self.error('%s could not be converted to long' % value)
|
self.error('%s could not be converted to long' % value)
|
||||||
|
|
||||||
if self.min_value is not None and value < self.min_value:
|
if self.min_value is not None and value < self.min_value:
|
||||||
@ -416,7 +415,7 @@ class DecimalField(BaseField):
|
|||||||
# Convert to string for python 2.6 before casting to Decimal
|
# Convert to string for python 2.6 before casting to Decimal
|
||||||
try:
|
try:
|
||||||
value = decimal.Decimal('%s' % value)
|
value = decimal.Decimal('%s' % value)
|
||||||
except decimal.InvalidOperation:
|
except (TypeError, ValueError, decimal.InvalidOperation):
|
||||||
return value
|
return value
|
||||||
return value.quantize(decimal.Decimal('.%s' % ('0' * self.precision)), rounding=self.rounding)
|
return value.quantize(decimal.Decimal('.%s' % ('0' * self.precision)), rounding=self.rounding)
|
||||||
|
|
||||||
@ -433,7 +432,7 @@ class DecimalField(BaseField):
|
|||||||
value = six.text_type(value)
|
value = six.text_type(value)
|
||||||
try:
|
try:
|
||||||
value = decimal.Decimal(value)
|
value = decimal.Decimal(value)
|
||||||
except Exception as exc:
|
except (TypeError, ValueError, decimal.InvalidOperation) as exc:
|
||||||
self.error('Could not convert value to decimal: %s' % exc)
|
self.error('Could not convert value to decimal: %s' % exc)
|
||||||
|
|
||||||
if self.min_value is not None and value < self.min_value:
|
if self.min_value is not None and value < self.min_value:
|
||||||
@ -852,8 +851,7 @@ class ListField(ComplexBaseField):
|
|||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
"""Make sure that a list of valid fields is being used."""
|
"""Make sure that a list of valid fields is being used."""
|
||||||
if (not isinstance(value, (list, tuple, QuerySet)) or
|
if not isinstance(value, (list, tuple, QuerySet)):
|
||||||
isinstance(value, six.string_types)):
|
|
||||||
self.error('Only lists and tuples may be used in a list field')
|
self.error('Only lists and tuples may be used in a list field')
|
||||||
super(ListField, self).validate(value)
|
super(ListField, self).validate(value)
|
||||||
|
|
||||||
@ -1953,8 +1951,7 @@ class SequenceField(BaseField):
|
|||||||
self.collection_name = collection_name or self.COLLECTION_NAME
|
self.collection_name = collection_name or self.COLLECTION_NAME
|
||||||
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
|
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
|
||||||
self.sequence_name = sequence_name
|
self.sequence_name = sequence_name
|
||||||
self.value_decorator = (callable(value_decorator) and
|
self.value_decorator = value_decorator if callable(value_decorator) else self.VALUE_DECORATOR
|
||||||
value_decorator or self.VALUE_DECORATOR)
|
|
||||||
super(SequenceField, self).__init__(*args, **kwargs)
|
super(SequenceField, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def generate(self):
|
def generate(self):
|
||||||
@ -2063,7 +2060,7 @@ class UUIDField(BaseField):
|
|||||||
if not isinstance(value, six.string_types):
|
if not isinstance(value, six.string_types):
|
||||||
value = six.text_type(value)
|
value = six.text_type(value)
|
||||||
return uuid.UUID(value)
|
return uuid.UUID(value)
|
||||||
except Exception:
|
except (ValueError, TypeError, AttributeError):
|
||||||
return original_value
|
return original_value
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -2085,7 +2082,7 @@ class UUIDField(BaseField):
|
|||||||
value = str(value)
|
value = str(value)
|
||||||
try:
|
try:
|
||||||
uuid.UUID(value)
|
uuid.UUID(value)
|
||||||
except Exception as exc:
|
except (ValueError, TypeError, AttributeError) as exc:
|
||||||
self.error('Could not convert to UUID: %s' % exc)
|
self.error('Could not convert to UUID: %s' % exc)
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,8 +38,6 @@ CASCADE = 2
|
|||||||
DENY = 3
|
DENY = 3
|
||||||
PULL = 4
|
PULL = 4
|
||||||
|
|
||||||
RE_TYPE = type(re.compile(''))
|
|
||||||
|
|
||||||
|
|
||||||
class BaseQuerySet(object):
|
class BaseQuerySet(object):
|
||||||
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
||||||
@ -356,7 +354,7 @@ class BaseQuerySet(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
inserted_result = insert_func(raw)
|
inserted_result = insert_func(raw)
|
||||||
ids = return_one and [inserted_result.inserted_id] or inserted_result.inserted_ids
|
ids = [inserted_result.inserted_id] if return_one else inserted_result.inserted_ids
|
||||||
except pymongo.errors.DuplicateKeyError as err:
|
except pymongo.errors.DuplicateKeyError as err:
|
||||||
message = 'Could not save document (%s)'
|
message = 'Could not save document (%s)'
|
||||||
raise NotUniqueError(message % six.text_type(err))
|
raise NotUniqueError(message % six.text_type(err))
|
||||||
@ -377,14 +375,14 @@ class BaseQuerySet(object):
|
|||||||
if not load_bulk:
|
if not load_bulk:
|
||||||
signals.post_bulk_insert.send(
|
signals.post_bulk_insert.send(
|
||||||
self._document, documents=docs, loaded=False, **signal_kwargs)
|
self._document, documents=docs, loaded=False, **signal_kwargs)
|
||||||
return return_one and ids[0] or ids
|
return ids[0] if return_one else ids
|
||||||
documents = self.in_bulk(ids)
|
documents = self.in_bulk(ids)
|
||||||
results = []
|
results = []
|
||||||
for obj_id in ids:
|
for obj_id in ids:
|
||||||
results.append(documents.get(obj_id))
|
results.append(documents.get(obj_id))
|
||||||
signals.post_bulk_insert.send(
|
signals.post_bulk_insert.send(
|
||||||
self._document, documents=results, loaded=True, **signal_kwargs)
|
self._document, documents=results, loaded=True, **signal_kwargs)
|
||||||
return return_one and results[0] or results
|
return results[0] if return_one else results
|
||||||
|
|
||||||
def count(self, with_limit_and_skip=False):
|
def count(self, with_limit_and_skip=False):
|
||||||
"""Count the selected elements in the query.
|
"""Count the selected elements in the query.
|
||||||
@ -974,11 +972,10 @@ class BaseQuerySet(object):
|
|||||||
# explicitly included, and then more complicated operators such as
|
# explicitly included, and then more complicated operators such as
|
||||||
# $slice.
|
# $slice.
|
||||||
def _sort_key(field_tuple):
|
def _sort_key(field_tuple):
|
||||||
key, value = field_tuple
|
_, value = field_tuple
|
||||||
if isinstance(value, (int)):
|
if isinstance(value, int):
|
||||||
return value # 0 for exclusion, 1 for inclusion
|
return value # 0 for exclusion, 1 for inclusion
|
||||||
else:
|
return 2 # so that complex values appear last
|
||||||
return 2 # so that complex values appear last
|
|
||||||
|
|
||||||
fields = sorted(cleaned_fields, key=_sort_key)
|
fields = sorted(cleaned_fields, key=_sort_key)
|
||||||
|
|
||||||
|
@ -186,10 +186,3 @@ class QuerySetNoCache(BaseQuerySet):
|
|||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
queryset.rewind()
|
queryset.rewind()
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
class QuerySetNoDeRef(QuerySet):
|
|
||||||
"""Special no_dereference QuerySet"""
|
|
||||||
|
|
||||||
def __dereference(items, max_depth=1, instance=None, name=None):
|
|
||||||
return items
|
|
||||||
|
@ -429,7 +429,6 @@ def _infer_geometry(value):
|
|||||||
'type and coordinates keys')
|
'type and coordinates keys')
|
||||||
elif isinstance(value, (list, set)):
|
elif isinstance(value, (list, set)):
|
||||||
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
||||||
# TODO: should both TypeError and IndexError be alike interpreted?
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
value[0][0][0]
|
value[0][0][0]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user