Improve the health of this package (#1428)

This commit is contained in:
Stefan Wójcik
2016-12-11 18:49:21 -05:00
committed by GitHub
parent 3135b456be
commit 835d3c3d18
60 changed files with 1564 additions and 1893 deletions

View File

@@ -12,9 +12,10 @@ from bson.code import Code
import pymongo
import pymongo.errors
from pymongo.common import validate_read_preference
import six
from mongoengine import signals
from mongoengine.base.common import get_document
from mongoengine.base import get_document
from mongoengine.common import _import_class
from mongoengine.connection import get_db
from mongoengine.context_managers import switch_db
@@ -73,10 +74,10 @@ class BaseQuerySet(object):
# subclasses of the class being used
if document._meta.get('allow_inheritance') is True:
if len(self._document._subclasses) == 1:
self._initial_query = {"_cls": self._document._subclasses[0]}
self._initial_query = {'_cls': self._document._subclasses[0]}
else:
self._initial_query = {
"_cls": {"$in": self._document._subclasses}}
'_cls': {'$in': self._document._subclasses}}
self._loaded_fields = QueryFieldList(always_include=['_cls'])
self._cursor_obj = None
self._limit = None
@@ -105,8 +106,8 @@ class BaseQuerySet(object):
if q_obj:
# make sure proper query object is passed
if not isinstance(q_obj, QNode):
msg = ("Not a query object: %s. "
"Did you intend to use key=value?" % q_obj)
msg = ('Not a query object: %s. '
'Did you intend to use key=value?' % q_obj)
raise InvalidQueryError(msg)
query &= q_obj
@@ -133,10 +134,10 @@ class BaseQuerySet(object):
obj_dict = self.__dict__.copy()
# don't picke collection, instead pickle collection params
obj_dict.pop("_collection_obj")
obj_dict.pop('_collection_obj')
# don't pickle cursor
obj_dict["_cursor_obj"] = None
obj_dict['_cursor_obj'] = None
return obj_dict
@@ -147,7 +148,7 @@ class BaseQuerySet(object):
See https://github.com/MongoEngine/mongoengine/issues/442
"""
obj_dict["_collection_obj"] = obj_dict["_document"]._get_collection()
obj_dict['_collection_obj'] = obj_dict['_document']._get_collection()
# update attributes
self.__dict__.update(obj_dict)
@@ -166,7 +167,7 @@ class BaseQuerySet(object):
queryset._skip, queryset._limit = key.start, key.stop
if key.start and key.stop:
queryset._limit = key.stop - key.start
except IndexError, err:
except IndexError as err:
# PyMongo raises an error if key.start == key.stop, catch it,
# bin it, kill it.
start = key.start or 0
@@ -199,19 +200,16 @@ class BaseQuerySet(object):
raise NotImplementedError
def _has_data(self):
""" Retrieves whether cursor has any data. """
"""Return True if cursor has any data."""
queryset = self.order_by()
return False if queryset.first() is None else True
def __nonzero__(self):
""" Avoid to open all records in an if stmt in Py2. """
"""Avoid to open all records in an if stmt in Py2."""
return self._has_data()
def __bool__(self):
""" Avoid to open all records in an if stmt in Py3. """
"""Avoid to open all records in an if stmt in Py3."""
return self._has_data()
# Core functions
@@ -239,7 +237,7 @@ class BaseQuerySet(object):
queryset = self.clone()
if queryset._search_text:
raise OperationError(
"It is not possible to use search_text two times.")
'It is not possible to use search_text two times.')
query_kwargs = SON({'$search': text})
if language:
@@ -268,7 +266,7 @@ class BaseQuerySet(object):
try:
result = queryset.next()
except StopIteration:
msg = ("%s matching query does not exist."
msg = ('%s matching query does not exist.'
% queryset._document._class_name)
raise queryset._document.DoesNotExist(msg)
try:
@@ -290,8 +288,7 @@ class BaseQuerySet(object):
return self._document(**kwargs).save()
def first(self):
"""Retrieve the first object matching the query.
"""
"""Retrieve the first object matching the query."""
queryset = self.clone()
try:
result = queryset[0]
@@ -340,7 +337,7 @@ class BaseQuerySet(object):
% str(self._document))
raise OperationError(msg)
if doc.pk and not doc._created:
msg = "Some documents have ObjectIds use doc.update() instead"
msg = 'Some documents have ObjectIds use doc.update() instead'
raise OperationError(msg)
signal_kwargs = signal_kwargs or {}
@@ -350,17 +347,17 @@ class BaseQuerySet(object):
raw = [doc.to_mongo() for doc in docs]
try:
ids = self._collection.insert(raw, **write_concern)
except pymongo.errors.DuplicateKeyError, err:
except pymongo.errors.DuplicateKeyError as err:
message = 'Could not save document (%s)'
raise NotUniqueError(message % unicode(err))
except pymongo.errors.OperationFailure, err:
raise NotUniqueError(message % six.text_type(err))
except pymongo.errors.OperationFailure as err:
message = 'Could not save document (%s)'
if re.match('^E1100[01] duplicate key', unicode(err)):
if re.match('^E1100[01] duplicate key', six.text_type(err)):
# E11000 - duplicate key error index
# E11001 - duplicate key on update
message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % unicode(err))
raise OperationError(message % unicode(err))
raise NotUniqueError(message % six.text_type(err))
raise OperationError(message % six.text_type(err))
if not load_bulk:
signals.post_bulk_insert.send(
@@ -386,7 +383,8 @@ class BaseQuerySet(object):
return 0
return self._cursor.count(with_limit_and_skip=with_limit_and_skip)
def delete(self, write_concern=None, _from_doc_delete=False, cascade_refs=None):
def delete(self, write_concern=None, _from_doc_delete=False,
cascade_refs=None):
"""Delete the documents matched by the query.
:param write_concern: Extra keyword arguments are passed down which
@@ -409,8 +407,9 @@ class BaseQuerySet(object):
# Handle deletes where skips or limits have been applied or
# there is an untriggered delete signal
has_delete_signal = signals.signals_available and (
signals.pre_delete.has_receivers_for(self._document) or
signals.post_delete.has_receivers_for(self._document))
signals.pre_delete.has_receivers_for(doc) or
signals.post_delete.has_receivers_for(doc)
)
call_document_delete = (queryset._skip or queryset._limit or
has_delete_signal) and not _from_doc_delete
@@ -423,37 +422,44 @@ class BaseQuerySet(object):
return cnt
delete_rules = doc._meta.get('delete_rules') or {}
delete_rules = list(delete_rules.items())
# Check for DENY rules before actually deleting/nullifying any other
# references
for rule_entry in delete_rules:
for rule_entry, rule in delete_rules:
document_cls, field_name = rule_entry
if document_cls._meta.get('abstract'):
continue
rule = doc._meta['delete_rules'][rule_entry]
if rule == DENY and document_cls.objects(
**{field_name + '__in': self}).count() > 0:
msg = ("Could not delete document (%s.%s refers to it)"
% (document_cls.__name__, field_name))
raise OperationError(msg)
for rule_entry in delete_rules:
if rule == DENY:
refs = document_cls.objects(**{field_name + '__in': self})
if refs.limit(1).count() > 0:
raise OperationError(
'Could not delete document (%s.%s refers to it)'
% (document_cls.__name__, field_name)
)
# Check all the other rules
for rule_entry, rule in delete_rules:
document_cls, field_name = rule_entry
if document_cls._meta.get('abstract'):
continue
rule = doc._meta['delete_rules'][rule_entry]
if rule == CASCADE:
cascade_refs = set() if cascade_refs is None else cascade_refs
# Handle recursive reference
if doc._collection == document_cls._collection:
for ref in queryset:
cascade_refs.add(ref.id)
ref_q = document_cls.objects(**{field_name + '__in': self, 'pk__nin': cascade_refs})
ref_q_count = ref_q.count()
if ref_q_count > 0:
ref_q.delete(write_concern=write_concern, cascade_refs=cascade_refs)
refs = document_cls.objects(**{field_name + '__in': self,
'pk__nin': cascade_refs})
if refs.count() > 0:
refs.delete(write_concern=write_concern,
cascade_refs=cascade_refs)
elif rule == NULLIFY:
document_cls.objects(**{field_name + '__in': self}).update(
write_concern=write_concern, **{'unset__%s' % field_name: 1})
write_concern=write_concern,
**{'unset__%s' % field_name: 1})
elif rule == PULL:
document_cls.objects(**{field_name + '__in': self}).update(
write_concern=write_concern,
@@ -461,7 +467,7 @@ class BaseQuerySet(object):
result = queryset._collection.remove(queryset._query, **write_concern)
if result:
return result.get("n")
return result.get('n')
def update(self, upsert=False, multi=True, write_concern=None,
full_result=False, **update):
@@ -482,7 +488,7 @@ class BaseQuerySet(object):
.. versionadded:: 0.2
"""
if not update and not upsert:
raise OperationError("No update parameters, would remove data")
raise OperationError('No update parameters, would remove data')
if write_concern is None:
write_concern = {}
@@ -495,9 +501,9 @@ class BaseQuerySet(object):
# then ensure we add _cls to the update operation
if upsert and '_cls' in query:
if '$set' in update:
update["$set"]["_cls"] = queryset._document._class_name
update['$set']['_cls'] = queryset._document._class_name
else:
update["$set"] = {"_cls": queryset._document._class_name}
update['$set'] = {'_cls': queryset._document._class_name}
try:
result = queryset._collection.update(query, update, multi=multi,
upsert=upsert, **write_concern)
@@ -505,13 +511,13 @@ class BaseQuerySet(object):
return result
elif result:
return result['n']
except pymongo.errors.DuplicateKeyError, err:
raise NotUniqueError(u'Update failed (%s)' % unicode(err))
except pymongo.errors.OperationFailure, err:
if unicode(err) == u'multi not coded yet':
except pymongo.errors.DuplicateKeyError as err:
raise NotUniqueError(u'Update failed (%s)' % six.text_type(err))
except pymongo.errors.OperationFailure as err:
if six.text_type(err) == u'multi not coded yet':
message = u'update() method requires MongoDB 1.1.3+'
raise OperationError(message)
raise OperationError(u'Update failed (%s)' % unicode(err))
raise OperationError(u'Update failed (%s)' % six.text_type(err))
def upsert_one(self, write_concern=None, **update):
"""Overwrite or add the first document matched by the query.
@@ -582,11 +588,11 @@ class BaseQuerySet(object):
"""
if remove and new:
raise OperationError("Conflicting parameters: remove and new")
raise OperationError('Conflicting parameters: remove and new')
if not update and not upsert and not remove:
raise OperationError(
"No update parameters, must either update or remove")
'No update parameters, must either update or remove')
queryset = self.clone()
query = queryset._query
@@ -597,7 +603,7 @@ class BaseQuerySet(object):
try:
if IS_PYMONGO_3:
if full_response:
msg = "With PyMongo 3+, it is not possible anymore to get the full response."
msg = 'With PyMongo 3+, it is not possible anymore to get the full response.'
warnings.warn(msg, DeprecationWarning)
if remove:
result = queryset._collection.find_one_and_delete(
@@ -615,14 +621,14 @@ class BaseQuerySet(object):
result = queryset._collection.find_and_modify(
query, update, upsert=upsert, sort=sort, remove=remove, new=new,
full_response=full_response, **self._cursor_args)
except pymongo.errors.DuplicateKeyError, err:
raise NotUniqueError(u"Update failed (%s)" % err)
except pymongo.errors.OperationFailure, err:
raise OperationError(u"Update failed (%s)" % err)
except pymongo.errors.DuplicateKeyError as err:
raise NotUniqueError(u'Update failed (%s)' % err)
except pymongo.errors.OperationFailure as err:
raise OperationError(u'Update failed (%s)' % err)
if full_response:
if result["value"] is not None:
result["value"] = self._document._from_son(result["value"], only_fields=self.only_fields)
if result['value'] is not None:
result['value'] = self._document._from_son(result['value'], only_fields=self.only_fields)
else:
if result is not None:
result = self._document._from_son(result, only_fields=self.only_fields)
@@ -640,7 +646,7 @@ class BaseQuerySet(object):
"""
queryset = self.clone()
if not queryset._query_obj.empty:
msg = "Cannot use a filter whilst using `with_id`"
msg = 'Cannot use a filter whilst using `with_id`'
raise InvalidQueryError(msg)
return queryset.filter(pk=object_id).first()
@@ -684,7 +690,7 @@ class BaseQuerySet(object):
Only return instances of this document and not any inherited documents
"""
if self._document._meta.get('allow_inheritance') is True:
self._initial_query = {"_cls": self._document._class_name}
self._initial_query = {'_cls': self._document._class_name}
return self
@@ -810,49 +816,56 @@ class BaseQuerySet(object):
.. versionchanged:: 0.6 - Improved db_field refrence handling
"""
queryset = self.clone()
try:
field = self._fields_to_dbfields([field]).pop()
finally:
distinct = self._dereference(queryset._cursor.distinct(field), 1,
name=field, instance=self._document)
except LookUpError:
pass
doc_field = self._document._fields.get(field.split('.', 1)[0])
instance = False
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
ListField = _import_class('ListField')
GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField')
if isinstance(doc_field, ListField):
doc_field = getattr(doc_field, "field", doc_field)
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
instance = getattr(doc_field, "document_type", False)
# handle distinct on subdocuments
if '.' in field:
for field_part in field.split('.')[1:]:
# if looping on embedded document, get the document type instance
if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
doc_field = instance
# now get the subdocument
doc_field = getattr(doc_field, field_part, doc_field)
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
if isinstance(doc_field, ListField):
doc_field = getattr(doc_field, "field", doc_field)
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
instance = getattr(doc_field, "document_type", False)
if instance and isinstance(doc_field, (EmbeddedDocumentField,
GenericEmbeddedDocumentField)):
distinct = [instance(**doc) for doc in distinct]
return distinct
distinct = self._dereference(queryset._cursor.distinct(field), 1,
name=field, instance=self._document)
doc_field = self._document._fields.get(field.split('.', 1)[0])
instance = None
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
ListField = _import_class('ListField')
GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField')
if isinstance(doc_field, ListField):
doc_field = getattr(doc_field, 'field', doc_field)
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
instance = getattr(doc_field, 'document_type', None)
# handle distinct on subdocuments
if '.' in field:
for field_part in field.split('.')[1:]:
# if looping on embedded document, get the document type instance
if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
doc_field = instance
# now get the subdocument
doc_field = getattr(doc_field, field_part, doc_field)
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
if isinstance(doc_field, ListField):
doc_field = getattr(doc_field, 'field', doc_field)
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
instance = getattr(doc_field, 'document_type', None)
if instance and isinstance(doc_field, (EmbeddedDocumentField,
GenericEmbeddedDocumentField)):
distinct = [instance(**doc) for doc in distinct]
return distinct
def only(self, *fields):
"""Load only a subset of this document's fields. ::
post = BlogPost.objects(...).only("title", "author.name")
post = BlogPost.objects(...).only('title', 'author.name')
.. note :: `only()` is chainable and will perform a union ::
So with the following it will fetch both: `title` and `author.name`::
post = BlogPost.objects.only("title").only("author.name")
post = BlogPost.objects.only('title').only('author.name')
:func:`~mongoengine.queryset.QuerySet.all_fields` will reset any
field filters.
@@ -862,19 +875,19 @@ class BaseQuerySet(object):
.. versionadded:: 0.3
.. versionchanged:: 0.5 - Added subfield support
"""
fields = dict([(f, QueryFieldList.ONLY) for f in fields])
fields = {f: QueryFieldList.ONLY for f in fields}
self.only_fields = fields.keys()
return self.fields(True, **fields)
def exclude(self, *fields):
"""Opposite to .only(), exclude some document's fields. ::
post = BlogPost.objects(...).exclude("comments")
post = BlogPost.objects(...).exclude('comments')
.. note :: `exclude()` is chainable and will perform a union ::
So with the following it will exclude both: `title` and `author.name`::
post = BlogPost.objects.exclude("title").exclude("author.name")
post = BlogPost.objects.exclude('title').exclude('author.name')
:func:`~mongoengine.queryset.QuerySet.all_fields` will reset any
field filters.
@@ -883,7 +896,7 @@ class BaseQuerySet(object):
.. versionadded:: 0.5
"""
fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields])
fields = {f: QueryFieldList.EXCLUDE for f in fields}
return self.fields(**fields)
def fields(self, _only_called=False, **kwargs):
@@ -904,7 +917,7 @@ class BaseQuerySet(object):
"""
# Check for an operator and transform to mongo-style if there is
operators = ["slice"]
operators = ['slice']
cleaned_fields = []
for key, value in kwargs.items():
parts = key.split('__')
@@ -928,7 +941,7 @@ class BaseQuerySet(object):
"""Include all fields. Reset all previously calls of .only() or
.exclude(). ::
post = BlogPost.objects.exclude("comments").all_fields()
post = BlogPost.objects.exclude('comments').all_fields()
.. versionadded:: 0.5
"""
@@ -955,7 +968,7 @@ class BaseQuerySet(object):
See https://docs.mongodb.com/manual/reference/method/cursor.comment/#cursor.comment
for details.
"""
return self._chainable_method("comment", text)
return self._chainable_method('comment', text)
def explain(self, format=False):
"""Return an explain plan record for the
@@ -964,8 +977,15 @@ class BaseQuerySet(object):
:param format: format the plan before returning it
"""
plan = self._cursor.explain()
# TODO remove this option completely - it's useless. If somebody
# wants to pretty-print the output, they easily can.
if format:
msg = ('"format" param of BaseQuerySet.explain has been '
'deprecated and will be removed in future versions.')
warnings.warn(msg, DeprecationWarning)
plan = pprint.pformat(plan)
return plan
# DEPRECATED. Has no more impact on PyMongo 3+
@@ -978,7 +998,7 @@ class BaseQuerySet(object):
.. deprecated:: Ignored with PyMongo 3+
"""
if IS_PYMONGO_3:
msg = "snapshot is deprecated as it has no impact when using PyMongo 3+."
msg = 'snapshot is deprecated as it has no impact when using PyMongo 3+.'
warnings.warn(msg, DeprecationWarning)
queryset = self.clone()
queryset._snapshot = enabled
@@ -1004,7 +1024,7 @@ class BaseQuerySet(object):
.. deprecated:: Ignored with PyMongo 3+
"""
if IS_PYMONGO_3:
msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+."
msg = 'slave_okay is deprecated as it has no impact when using PyMongo 3+.'
warnings.warn(msg, DeprecationWarning)
queryset = self.clone()
queryset._slave_okay = enabled
@@ -1066,7 +1086,7 @@ class BaseQuerySet(object):
:param ms: the number of milliseconds before killing the query on the server
"""
return self._chainable_method("max_time_ms", ms)
return self._chainable_method('max_time_ms', ms)
# JSON Helpers
@@ -1149,19 +1169,19 @@ class BaseQuerySet(object):
MapReduceDocument = _import_class('MapReduceDocument')
if not hasattr(self._collection, "map_reduce"):
raise NotImplementedError("Requires MongoDB >= 1.7.1")
if not hasattr(self._collection, 'map_reduce'):
raise NotImplementedError('Requires MongoDB >= 1.7.1')
map_f_scope = {}
if isinstance(map_f, Code):
map_f_scope = map_f.scope
map_f = unicode(map_f)
map_f = six.text_type(map_f)
map_f = Code(queryset._sub_js_fields(map_f), map_f_scope)
reduce_f_scope = {}
if isinstance(reduce_f, Code):
reduce_f_scope = reduce_f.scope
reduce_f = unicode(reduce_f)
reduce_f = six.text_type(reduce_f)
reduce_f_code = queryset._sub_js_fields(reduce_f)
reduce_f = Code(reduce_f_code, reduce_f_scope)
@@ -1171,7 +1191,7 @@ class BaseQuerySet(object):
finalize_f_scope = {}
if isinstance(finalize_f, Code):
finalize_f_scope = finalize_f.scope
finalize_f = unicode(finalize_f)
finalize_f = six.text_type(finalize_f)
finalize_f_code = queryset._sub_js_fields(finalize_f)
finalize_f = Code(finalize_f_code, finalize_f_scope)
mr_args['finalize'] = finalize_f
@@ -1187,7 +1207,7 @@ class BaseQuerySet(object):
else:
map_reduce_function = 'map_reduce'
if isinstance(output, basestring):
if isinstance(output, six.string_types):
mr_args['out'] = output
elif isinstance(output, dict):
@@ -1200,7 +1220,7 @@ class BaseQuerySet(object):
break
else:
raise OperationError("actionData not specified for output")
raise OperationError('actionData not specified for output')
db_alias = output.get('db_alias')
remaing_args = ['db', 'sharded', 'nonAtomic']
@@ -1430,7 +1450,7 @@ class BaseQuerySet(object):
# snapshot is not handled at all by PyMongo 3+
# TODO: evaluate similar possibilities using modifiers
if self._snapshot:
msg = "The snapshot option is not anymore available with PyMongo 3+"
msg = 'The snapshot option is not anymore available with PyMongo 3+'
warnings.warn(msg, DeprecationWarning)
cursor_args = {
'no_cursor_timeout': not self._timeout
@@ -1442,7 +1462,7 @@ class BaseQuerySet(object):
if fields_name not in cursor_args:
cursor_args[fields_name] = {}
cursor_args[fields_name]['_text_score'] = {'$meta': "textScore"}
cursor_args[fields_name]['_text_score'] = {'$meta': 'textScore'}
return cursor_args
@@ -1497,8 +1517,8 @@ class BaseQuerySet(object):
if self._mongo_query is None:
self._mongo_query = self._query_obj.to_query(self._document)
if self._class_check and self._initial_query:
if "_cls" in self._mongo_query:
self._mongo_query = {"$and": [self._initial_query, self._mongo_query]}
if '_cls' in self._mongo_query:
self._mongo_query = {'$and': [self._initial_query, self._mongo_query]}
else:
self._mongo_query.update(self._initial_query)
return self._mongo_query
@@ -1510,8 +1530,7 @@ class BaseQuerySet(object):
return self.__dereference
def no_dereference(self):
"""Turn off any dereferencing for the results of this queryset.
"""
"""Turn off any dereferencing for the results of this queryset."""
queryset = self.clone()
queryset._auto_dereference = False
return queryset
@@ -1540,7 +1559,7 @@ class BaseQuerySet(object):
emit(null, 1);
}
}
""" % dict(field=field)
""" % {'field': field}
reduce_func = """
function(key, values) {
var total = 0;
@@ -1562,8 +1581,8 @@ class BaseQuerySet(object):
if normalize:
count = sum(frequencies.values())
frequencies = dict([(k, float(v) / count)
for k, v in frequencies.items()])
frequencies = {k: float(v) / count
for k, v in frequencies.items()}
return frequencies
@@ -1615,10 +1634,10 @@ class BaseQuerySet(object):
}
"""
total, data, types = self.exec_js(freq_func, field)
values = dict([(types.get(k), int(v)) for k, v in data.iteritems()])
values = {types.get(k): int(v) for k, v in data.iteritems()}
if normalize:
values = dict([(k, float(v) / total) for k, v in values.items()])
values = {k: float(v) / total for k, v in values.items()}
frequencies = {}
for k, v in values.iteritems():
@@ -1640,14 +1659,14 @@ class BaseQuerySet(object):
for x in document._subclasses][1:]
for field in fields:
try:
field = ".".join(f.db_field for f in
field = '.'.join(f.db_field for f in
document._lookup_field(field.split('.')))
ret.append(field)
except LookUpError, err:
except LookUpError as err:
found = False
for subdoc in subclasses:
try:
subfield = ".".join(f.db_field for f in
subfield = '.'.join(f.db_field for f in
subdoc._lookup_field(field.split('.')))
ret.append(subfield)
found = True
@@ -1660,15 +1679,14 @@ class BaseQuerySet(object):
return ret
def _get_order_by(self, keys):
"""Creates a list of order by fields
"""
"""Creates a list of order by fields"""
key_list = []
for key in keys:
if not key:
continue
if key == '$text_score':
key_list.append(('_text_score', {'$meta': "textScore"}))
key_list.append(('_text_score', {'$meta': 'textScore'}))
continue
direction = pymongo.ASCENDING
@@ -1740,7 +1758,7 @@ class BaseQuerySet(object):
# If we need to coerce types, we need to determine the
# type of this field and use the corresponding
# .to_python(...)
from mongoengine.fields import EmbeddedDocumentField
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
obj = self._document
for chunk in path.split('.'):
@@ -1774,7 +1792,7 @@ class BaseQuerySet(object):
field_name = match.group(1).split('.')
fields = self._document._lookup_field(field_name)
# Substitute the correct name for the field into the javascript
return ".".join([f.db_field for f in fields])
return '.'.join([f.db_field for f in fields])
code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code)
code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub,
@@ -1785,21 +1803,21 @@ class BaseQuerySet(object):
queryset = self.clone()
method = getattr(queryset._cursor, method_name)
method(val)
setattr(queryset, "_" + method_name, val)
setattr(queryset, '_' + method_name, val)
return queryset
# Deprecated
def ensure_index(self, **kwargs):
"""Deprecated use :func:`Document.ensure_index`"""
msg = ("Doc.objects()._ensure_index() is deprecated. "
"Use Doc.ensure_index() instead.")
msg = ('Doc.objects()._ensure_index() is deprecated. '
'Use Doc.ensure_index() instead.')
warnings.warn(msg, DeprecationWarning)
self._document.__class__.ensure_index(**kwargs)
return self
def _ensure_indexes(self):
"""Deprecated use :func:`~Document.ensure_indexes`"""
msg = ("Doc.objects()._ensure_indexes() is deprecated. "
"Use Doc.ensure_indexes() instead.")
msg = ('Doc.objects()._ensure_indexes() is deprecated. '
'Use Doc.ensure_indexes() instead.')
warnings.warn(msg, DeprecationWarning)
self._document.__class__.ensure_indexes()