Improve the health of this package (#1428)

This commit is contained in:
Stefan Wójcik
2016-12-11 18:49:21 -05:00
committed by GitHub
parent 3135b456be
commit 835d3c3d18
60 changed files with 1564 additions and 1893 deletions

View File

@@ -1,25 +1,35 @@
import connection
from connection import *
import document
from document import *
import errors
from errors import *
import fields
from fields import *
import queryset
from queryset import *
import signals
from signals import *
# Import submodules so that we can expose their __all__
from mongoengine import connection
from mongoengine import document
from mongoengine import errors
from mongoengine import fields
from mongoengine import queryset
from mongoengine import signals
# Import everything from each submodule so that it can be accessed via
# mongoengine, e.g. instead of `from mongoengine.connection import connect`,
# users can simply use `from mongoengine import connect`, or even
# `from mongoengine import *` and then `connect('testdb')`.
from mongoengine.connection import *
from mongoengine.document import *
from mongoengine.errors import *
from mongoengine.fields import *
from mongoengine.queryset import *
from mongoengine.signals import *
__all__ = (list(document.__all__) + list(fields.__all__) +
list(connection.__all__) + list(queryset.__all__) +
list(signals.__all__) + list(errors.__all__))
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
list(queryset.__all__) + signals.__all__ + list(errors.__all__))
VERSION = (0, 10, 9)
def get_version():
if isinstance(VERSION[-1], basestring):
return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
"""Return the VERSION as a string, e.g. for VERSION == (0, 10, 7),
return '0.10.7'.
"""
return '.'.join(map(str, VERSION))

View File

@@ -1,8 +1,28 @@
# Base module is split into several files for convenience. Files inside of
# this module should import from a specific submodule (e.g.
# `from mongoengine.base.document import BaseDocument`), but all of the
# other modules should import directly from the top-level module (e.g.
# `from mongoengine.base import BaseDocument`). This approach is cleaner and
# also helps with cyclical import errors.
from mongoengine.base.common import *
from mongoengine.base.datastructures import *
from mongoengine.base.document import *
from mongoengine.base.fields import *
from mongoengine.base.metaclasses import *
# Help with backwards compatibility
from mongoengine.errors import *
__all__ = (
# common
'UPDATE_OPERATORS', '_document_registry', 'get_document',
# datastructures
'BaseDict', 'BaseList', 'EmbeddedDocumentList',
# document
'BaseDocument',
# fields
'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField',
# metaclasses
'DocumentMetaclass', 'TopLevelDocumentMetaclass'
)

View File

@@ -1,13 +1,18 @@
from mongoengine.errors import NotRegistered
__all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry')
__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert', 'min', 'max'])
ALLOW_INHERITANCE = False
_document_registry = {}
def get_document(name):
"""Get a document class by name."""
doc = _document_registry.get(name, None)
if not doc:
# Possible old style name

View File

@@ -1,14 +1,16 @@
import itertools
import weakref
import six
from mongoengine.common import _import_class
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
__all__ = ("BaseDict", "BaseList", "EmbeddedDocumentList")
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList')
class BaseDict(dict):
"""A special dict so we can watch any changes"""
"""A special dict so we can watch any changes."""
_dereferenced = False
_instance = None
@@ -93,8 +95,7 @@ class BaseDict(dict):
class BaseList(list):
"""A special list so we can watch any changes
"""
"""A special list so we can watch any changes."""
_dereferenced = False
_instance = None
@@ -209,17 +210,22 @@ class BaseList(list):
class EmbeddedDocumentList(BaseList):
@classmethod
def __match_all(cls, i, kwargs):
items = kwargs.items()
return all([
getattr(i, k) == v or unicode(getattr(i, k)) == v for k, v in items
])
def __match_all(cls, embedded_doc, kwargs):
"""Return True if a given embedded doc matches all the filter
kwargs. If it doesn't return False.
"""
for key, expected_value in kwargs.items():
doc_val = getattr(embedded_doc, key)
if doc_val != expected_value and six.text_type(doc_val) != expected_value:
return False
return True
@classmethod
def __only_matches(cls, obj, kwargs):
def __only_matches(cls, embedded_docs, kwargs):
"""Return embedded docs that match the filter kwargs."""
if not kwargs:
return obj
return filter(lambda i: cls.__match_all(i, kwargs), obj)
return embedded_docs
return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)]
def __init__(self, list_items, instance, name):
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
@@ -285,18 +291,18 @@ class EmbeddedDocumentList(BaseList):
values = self.__only_matches(self, kwargs)
if len(values) == 0:
raise DoesNotExist(
"%s matching query does not exist." % self._name
'%s matching query does not exist.' % self._name
)
elif len(values) > 1:
raise MultipleObjectsReturned(
"%d items returned, instead of 1" % len(values)
'%d items returned, instead of 1' % len(values)
)
return values[0]
def first(self):
"""
Returns the first embedded document in the list, or ``None`` if empty.
"""Return the first embedded document in the list, or ``None``
if empty.
"""
if len(self) > 0:
return self[0]
@@ -438,7 +444,7 @@ class StrictDict(object):
__slots__ = allowed_keys_tuple
def __repr__(self):
return "{%s}" % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items())
return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items())
cls._classes[allowed_keys] = SpecificStrictDict
return cls._classes[allowed_keys]

View File

@@ -1,6 +1,5 @@
import copy
import numbers
import operator
from collections import Hashable
from functools import partial
@@ -8,30 +7,27 @@ from bson import ObjectId, json_util
from bson.dbref import DBRef
from bson.son import SON
import pymongo
import six
from mongoengine import signals
from mongoengine.base.common import ALLOW_INHERITANCE, get_document
from mongoengine.base.datastructures import (
BaseDict,
BaseList,
EmbeddedDocumentList,
SemiStrictDict,
StrictDict
)
from mongoengine.base.common import get_document
from mongoengine.base.datastructures import (BaseDict, BaseList,
EmbeddedDocumentList,
SemiStrictDict, StrictDict)
from mongoengine.base.fields import ComplexBaseField
from mongoengine.common import _import_class
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError,
LookUpError, ValidationError)
from mongoengine.python_support import PY3, txt_type
LookUpError, OperationError, ValidationError)
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
__all__ = ('BaseDocument',)
NON_FIELD_ERRORS = '__all__'
class BaseDocument(object):
__slots__ = ('_changed_fields', '_initialised', '_created', '_data',
'_dynamic_fields', '_auto_id_field', '_db_field_map', '__weakref__')
'_dynamic_fields', '_auto_id_field', '_db_field_map',
'__weakref__')
_dynamic = False
_dynamic_lock = True
@@ -57,15 +53,15 @@ class BaseDocument(object):
name = next(field)
if name in values:
raise TypeError(
"Multiple values for keyword argument '" + name + "'")
'Multiple values for keyword argument "%s"' % name)
values[name] = value
__auto_convert = values.pop("__auto_convert", True)
__auto_convert = values.pop('__auto_convert', True)
# 399: set default values only to fields loaded from DB
__only_fields = set(values.pop("__only_fields", values))
__only_fields = set(values.pop('__only_fields', values))
_created = values.pop("_created", True)
_created = values.pop('_created', True)
signals.pre_init.send(self.__class__, document=self, values=values)
@@ -76,7 +72,7 @@ class BaseDocument(object):
self._fields.keys() + ['id', 'pk', '_cls', '_text_score'])
if _undefined_fields:
msg = (
"The fields '{0}' do not exist on the document '{1}'"
'The fields "{0}" do not exist on the document "{1}"'
).format(_undefined_fields, self._class_name)
raise FieldDoesNotExist(msg)
@@ -95,7 +91,7 @@ class BaseDocument(object):
value = getattr(self, key, None)
setattr(self, key, value)
if "_cls" not in values:
if '_cls' not in values:
self._cls = self._class_name
# Set passed values after initialisation
@@ -150,7 +146,7 @@ class BaseDocument(object):
if self._dynamic and not self._dynamic_lock:
if not hasattr(self, name) and not name.startswith('_'):
DynamicField = _import_class("DynamicField")
DynamicField = _import_class('DynamicField')
field = DynamicField(db_field=name)
field.name = name
self._dynamic_fields[name] = field
@@ -169,11 +165,13 @@ class BaseDocument(object):
except AttributeError:
self__created = True
if (self._is_document and not self__created and
name in self._meta.get('shard_key', tuple()) and
self._data.get(name) != value):
OperationError = _import_class('OperationError')
msg = "Shard Keys are immutable. Tried to update %s" % name
if (
self._is_document and
not self__created and
name in self._meta.get('shard_key', tuple()) and
self._data.get(name) != value
):
msg = 'Shard Keys are immutable. Tried to update %s' % name
raise OperationError(msg)
try:
@@ -197,8 +195,8 @@ class BaseDocument(object):
return data
def __setstate__(self, data):
if isinstance(data["_data"], SON):
data["_data"] = self.__class__._from_son(data["_data"])._data
if isinstance(data['_data'], SON):
data['_data'] = self.__class__._from_son(data['_data'])._data
for k in ('_changed_fields', '_initialised', '_created', '_data',
'_dynamic_fields'):
if k in data:
@@ -212,7 +210,7 @@ class BaseDocument(object):
dynamic_fields = data.get('_dynamic_fields') or SON()
for k in dynamic_fields.keys():
setattr(self, k, data["_data"].get(k))
setattr(self, k, data['_data'].get(k))
def __iter__(self):
return iter(self._fields_ordered)
@@ -254,12 +252,13 @@ class BaseDocument(object):
return repr_type('<%s: %s>' % (self.__class__.__name__, u))
def __str__(self):
# TODO this could be simpler?
if hasattr(self, '__unicode__'):
if PY3:
if six.PY3:
return self.__unicode__()
else:
return unicode(self).encode('utf-8')
return txt_type('%s object' % self.__class__.__name__)
return six.text_type(self).encode('utf-8')
return six.text_type('%s object' % self.__class__.__name__)
def __eq__(self, other):
if isinstance(other, self.__class__) and hasattr(other, 'id') and other.id is not None:
@@ -308,7 +307,7 @@ class BaseDocument(object):
fields = []
data = SON()
data["_id"] = None
data['_id'] = None
data['_cls'] = self._class_name
# only root fields ['test1.a', 'test2'] => ['test1', 'test2']
@@ -351,18 +350,8 @@ class BaseDocument(object):
else:
data[field.name] = value
# If "_id" has not been set, then try and set it
Document = _import_class("Document")
if isinstance(self, Document):
if data["_id"] is None:
data["_id"] = self._data.get("id", None)
if data['_id'] is None:
data.pop('_id')
# Only add _cls if allow_inheritance is True
if (not hasattr(self, '_meta') or
not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
if not self._meta.get('allow_inheritance'):
data.pop('_cls')
return data
@@ -376,16 +365,16 @@ class BaseDocument(object):
if clean:
try:
self.clean()
except ValidationError, error:
except ValidationError as error:
errors[NON_FIELD_ERRORS] = error
# Get a list of tuples of field names and their current values
fields = [(self._fields.get(name, self._dynamic_fields.get(name)),
self._data.get(name)) for name in self._fields_ordered]
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
GenericEmbeddedDocumentField = _import_class(
"GenericEmbeddedDocumentField")
'GenericEmbeddedDocumentField')
for field, value in fields:
if value is not None:
@@ -395,21 +384,21 @@ class BaseDocument(object):
field._validate(value, clean=clean)
else:
field._validate(value)
except ValidationError, error:
except ValidationError as error:
errors[field.name] = error.errors or error
except (ValueError, AttributeError, AssertionError), error:
except (ValueError, AttributeError, AssertionError) as error:
errors[field.name] = error
elif field.required and not getattr(field, '_auto_gen', False):
errors[field.name] = ValidationError('Field is required',
field_name=field.name)
if errors:
pk = "None"
pk = 'None'
if hasattr(self, 'pk'):
pk = self.pk
elif self._instance and hasattr(self._instance, 'pk'):
pk = self._instance.pk
message = "ValidationError (%s:%s) " % (self._class_name, pk)
message = 'ValidationError (%s:%s) ' % (self._class_name, pk)
raise ValidationError(message, errors=errors)
def to_json(self, *args, **kwargs):
@@ -426,33 +415,26 @@ class BaseDocument(object):
return cls._from_son(json_util.loads(json_data), created=created)
def __expand_dynamic_values(self, name, value):
"""expand any dynamic values to their correct types / values"""
"""Expand any dynamic values to their correct types / values."""
if not isinstance(value, (dict, list, tuple)):
return value
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
is_list = False
if not hasattr(value, 'items'):
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
if not is_list and '_cls' in value:
# If the value is a dict with '_cls' in it, turn it into a document
is_dict = isinstance(value, dict)
if is_dict and '_cls' in value:
cls = get_document(value['_cls'])
return cls(**value)
data = {}
for k, v in value.items():
key = name if is_list else k
data[k] = self.__expand_dynamic_values(key, v)
if is_list: # Convert back to a list
data_items = sorted(data.items(), key=operator.itemgetter(0))
value = [v for k, v in data_items]
if is_dict:
value = {
k: self.__expand_dynamic_values(k, v)
for k, v in value.items()
}
else:
value = data
value = [self.__expand_dynamic_values(name, v) for v in value]
# Convert lists / values so we can watch for any changes on them
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
if (isinstance(value, (list, tuple)) and
not isinstance(value, BaseList)):
if issubclass(type(self), EmbeddedDocumentListField):
@@ -465,8 +447,7 @@ class BaseDocument(object):
return value
def _mark_as_changed(self, key):
"""Marks a key as explicitly changed by the user
"""
"""Mark a key as explicitly changed by the user."""
if not key:
return
@@ -496,10 +477,11 @@ class BaseDocument(object):
remove(field)
def _clear_changed_fields(self):
"""Using get_changed_fields iterate and remove any fields that are
marked as changed"""
"""Using _get_changed_fields iterate and remove any fields that
are marked as changed.
"""
for changed in self._get_changed_fields():
parts = changed.split(".")
parts = changed.split('.')
data = self
for part in parts:
if isinstance(data, list):
@@ -511,10 +493,13 @@ class BaseDocument(object):
data = data.get(part, None)
else:
data = getattr(data, part, None)
if hasattr(data, "_changed_fields"):
if hasattr(data, "_is_document") and data._is_document:
if hasattr(data, '_changed_fields'):
if getattr(data, '_is_document', False):
continue
data._changed_fields = []
self._changed_fields = []
def _nestable_types_changed_fields(self, changed_fields, key, data, inspected):
@@ -526,26 +511,27 @@ class BaseDocument(object):
iterator = data.iteritems()
for index, value in iterator:
list_key = "%s%s." % (key, index)
list_key = '%s%s.' % (key, index)
# don't check anything lower if this key is already marked
# as changed.
if list_key[:-1] in changed_fields:
continue
if hasattr(value, '_get_changed_fields'):
changed = value._get_changed_fields(inspected)
changed_fields += ["%s%s" % (list_key, k)
changed_fields += ['%s%s' % (list_key, k)
for k in changed if k]
elif isinstance(value, (list, tuple, dict)):
self._nestable_types_changed_fields(
changed_fields, list_key, value, inspected)
def _get_changed_fields(self, inspected=None):
"""Returns a list of all fields that have explicitly been changed.
"""Return a list of all fields that have explicitly been changed.
"""
EmbeddedDocument = _import_class("EmbeddedDocument")
DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument")
ReferenceField = _import_class("ReferenceField")
SortedListField = _import_class("SortedListField")
EmbeddedDocument = _import_class('EmbeddedDocument')
DynamicEmbeddedDocument = _import_class('DynamicEmbeddedDocument')
ReferenceField = _import_class('ReferenceField')
SortedListField = _import_class('SortedListField')
changed_fields = []
changed_fields += getattr(self, '_changed_fields', [])
@@ -572,7 +558,7 @@ class BaseDocument(object):
):
# Find all embedded fields that have been changed
changed = data._get_changed_fields(inspected)
changed_fields += ["%s%s" % (key, k) for k in changed if k]
changed_fields += ['%s%s' % (key, k) for k in changed if k]
elif (isinstance(data, (list, tuple, dict)) and
db_field_name not in changed_fields):
if (hasattr(field, 'field') and
@@ -676,21 +662,25 @@ class BaseDocument(object):
@classmethod
def _get_collection_name(cls):
"""Returns the collection name for this class. None for abstract class
"""Return the collection name for this class. None for abstract
class.
"""
return cls._meta.get('collection', None)
@classmethod
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
"""Create an instance of a Document (subclass) from a PyMongo SON.
"""Create an instance of a Document (subclass) from a PyMongo
SON.
"""
if not only_fields:
only_fields = []
# get the class name from the document, falling back to the given
# Get the class name from the document, falling back to the given
# class if unavailable
class_name = son.get('_cls', cls._class_name)
data = dict(("%s" % key, value) for key, value in son.iteritems())
# Convert SON to a dict, making sure each key is a string
data = {str(key): value for key, value in son.iteritems()}
# Return correct subclass for document type
if class_name != cls._class_name:
@@ -712,19 +702,20 @@ class BaseDocument(object):
else field.to_python(value))
if field_name != field.db_field:
del data[field.db_field]
except (AttributeError, ValueError), e:
except (AttributeError, ValueError) as e:
errors_dict[field_name] = e
if errors_dict:
errors = "\n".join(["%s - %s" % (k, v)
errors = '\n'.join(['%s - %s' % (k, v)
for k, v in errors_dict.items()])
msg = ("Invalid data to create a `%s` instance.\n%s"
msg = ('Invalid data to create a `%s` instance.\n%s'
% (cls._class_name, errors))
raise InvalidDocumentError(msg)
# In STRICT documents, remove any keys that aren't in cls._fields
if cls.STRICT:
data = dict((k, v)
for k, v in data.iteritems() if k in cls._fields)
data = {k: v for k, v in data.iteritems() if k in cls._fields}
obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data)
obj._changed_fields = changed_fields
if not _auto_dereference:
@@ -734,37 +725,43 @@ class BaseDocument(object):
@classmethod
def _build_index_specs(cls, meta_indexes):
"""Generate and merge the full index specs
"""
"""Generate and merge the full index specs."""
geo_indices = cls._geo_indices()
unique_indices = cls._unique_with_indexes()
index_specs = [cls._build_index_spec(spec)
for spec in meta_indexes]
index_specs = [cls._build_index_spec(spec) for spec in meta_indexes]
def merge_index_specs(index_specs, indices):
"""Helper method for merging index specs."""
if not indices:
return index_specs
spec_fields = [v['fields']
for k, v in enumerate(index_specs)]
# Merge unique_indexes with existing specs
for k, v in enumerate(indices):
if v['fields'] in spec_fields:
index_specs[spec_fields.index(v['fields'])].update(v)
# Create a map of index fields to index spec. We're converting
# the fields from a list to a tuple so that it's hashable.
spec_fields = {
tuple(index['fields']): index for index in index_specs
}
# For each new index, if there's an existing index with the same
# fields list, update the existing spec with all data from the
# new spec.
for new_index in indices:
candidate = spec_fields.get(tuple(new_index['fields']))
if candidate is None:
index_specs.append(new_index)
else:
index_specs.append(v)
candidate.update(new_index)
return index_specs
# Merge geo indexes and unique_with indexes into the meta index specs.
index_specs = merge_index_specs(index_specs, geo_indices)
index_specs = merge_index_specs(index_specs, unique_indices)
return index_specs
@classmethod
def _build_index_spec(cls, spec):
"""Build a PyMongo index spec from a MongoEngine index spec.
"""
if isinstance(spec, basestring):
"""Build a PyMongo index spec from a MongoEngine index spec."""
if isinstance(spec, six.string_types):
spec = {'fields': [spec]}
elif isinstance(spec, (list, tuple)):
spec = {'fields': list(spec)}
@@ -775,8 +772,7 @@ class BaseDocument(object):
direction = None
# Check to see if we need to include _cls
allow_inheritance = cls._meta.get('allow_inheritance',
ALLOW_INHERITANCE)
allow_inheritance = cls._meta.get('allow_inheritance')
include_cls = (
allow_inheritance and
not spec.get('sparse', False) and
@@ -786,7 +782,7 @@ class BaseDocument(object):
# 733: don't include cls if index_cls is False unless there is an explicit cls with the index
include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True))
if "cls" in spec:
if 'cls' in spec:
spec.pop('cls')
for key in spec['fields']:
# If inherited spec continue
@@ -801,19 +797,19 @@ class BaseDocument(object):
# GEOHAYSTACK from )
# GEO2D from *
direction = pymongo.ASCENDING
if key.startswith("-"):
if key.startswith('-'):
direction = pymongo.DESCENDING
elif key.startswith("$"):
elif key.startswith('$'):
direction = pymongo.TEXT
elif key.startswith("#"):
elif key.startswith('#'):
direction = pymongo.HASHED
elif key.startswith("("):
elif key.startswith('('):
direction = pymongo.GEOSPHERE
elif key.startswith(")"):
elif key.startswith(')'):
direction = pymongo.GEOHAYSTACK
elif key.startswith("*"):
elif key.startswith('*'):
direction = pymongo.GEO2D
if key.startswith(("+", "-", "*", "$", "#", "(", ")")):
if key.startswith(('+', '-', '*', '$', '#', '(', ')')):
key = key[1:]
# Use real field name, do it manually because we need field
@@ -826,7 +822,7 @@ class BaseDocument(object):
parts = []
for field in fields:
try:
if field != "_id":
if field != '_id':
field = field.db_field
except AttributeError:
pass
@@ -845,49 +841,53 @@ class BaseDocument(object):
return spec
@classmethod
def _unique_with_indexes(cls, namespace=""):
"""
Find and set unique indexes
"""
def _unique_with_indexes(cls, namespace=''):
"""Find unique indexes in the document schema and return them."""
unique_indexes = []
for field_name, field in cls._fields.items():
sparse = field.sparse
# Generate a list of indexes needed by uniqueness constraints
if field.unique:
unique_fields = [field.db_field]
# Add any unique_with fields to the back of the index spec
if field.unique_with:
if isinstance(field.unique_with, basestring):
if isinstance(field.unique_with, six.string_types):
field.unique_with = [field.unique_with]
# Convert unique_with field names to real field names
unique_with = []
for other_name in field.unique_with:
parts = other_name.split('.')
# Lookup real name
parts = cls._lookup_field(parts)
name_parts = [part.db_field for part in parts]
unique_with.append('.'.join(name_parts))
# Unique field should be required
parts[-1].required = True
sparse = (not sparse and
parts[-1].name not in cls.__dict__)
unique_fields += unique_with
# Add the new index to the list
fields = [("%s%s" % (namespace, f), pymongo.ASCENDING)
for f in unique_fields]
fields = [
('%s%s' % (namespace, f), pymongo.ASCENDING)
for f in unique_fields
]
index = {'fields': fields, 'unique': True, 'sparse': sparse}
unique_indexes.append(index)
if field.__class__.__name__ == "ListField":
if field.__class__.__name__ == 'ListField':
field = field.field
# Grab any embedded document field unique indexes
if (field.__class__.__name__ == "EmbeddedDocumentField" and
if (field.__class__.__name__ == 'EmbeddedDocumentField' and
field.document_type != cls):
field_namespace = "%s." % field_name
field_namespace = '%s.' % field_name
doc_cls = field.document_type
unique_indexes += doc_cls._unique_with_indexes(field_namespace)
@@ -899,8 +899,9 @@ class BaseDocument(object):
geo_indices = []
inspected.append(cls)
geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField",
"PointField", "LineStringField", "PolygonField"]
geo_field_type_names = ('EmbeddedDocumentField', 'GeoPointField',
'PointField', 'LineStringField',
'PolygonField')
geo_field_types = tuple([_import_class(field)
for field in geo_field_type_names])
@@ -908,32 +909,68 @@ class BaseDocument(object):
for field in cls._fields.values():
if not isinstance(field, geo_field_types):
continue
if hasattr(field, 'document_type'):
field_cls = field.document_type
if field_cls in inspected:
continue
if hasattr(field_cls, '_geo_indices'):
geo_indices += field_cls._geo_indices(
inspected, parent_field=field.db_field)
elif field._geo_index:
field_name = field.db_field
if parent_field:
field_name = "%s.%s" % (parent_field, field_name)
geo_indices.append({'fields':
[(field_name, field._geo_index)]})
field_name = '%s.%s' % (parent_field, field_name)
geo_indices.append({
'fields': [(field_name, field._geo_index)]
})
return geo_indices
@classmethod
def _lookup_field(cls, parts):
"""Lookup a field based on its attribute and return a list containing
the field's parents and the field.
"""
"""Given the path to a given field, return a list containing
the Field object associated with that field and all of its parent
Field objects.
ListField = _import_class("ListField")
Args:
parts (str, list, or tuple) - path to the field. Should be a
string for simple fields existing on this document or a list
of strings for a field that exists deeper in embedded documents.
Returns:
A list of Field instances for fields that were found or
strings for sub-fields that weren't.
Example:
>>> user._lookup_field('name')
[<mongoengine.fields.StringField at 0x1119bff50>]
>>> user._lookup_field('roles')
[<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>]
>>> user._lookup_field(['roles', 'role'])
[<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>,
<mongoengine.fields.StringField at 0x1119ec050>]
>>> user._lookup_field('doesnt_exist')
raises LookUpError
>>> user._lookup_field(['roles', 'doesnt_exist'])
[<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>,
'doesnt_exist']
"""
# TODO this method is WAY too complicated. Simplify it.
# TODO don't think returning a string for embedded non-existent fields is desired
ListField = _import_class('ListField')
DynamicField = _import_class('DynamicField')
if not isinstance(parts, (list, tuple)):
parts = [parts]
fields = []
field = None
@@ -943,16 +980,17 @@ class BaseDocument(object):
fields.append(field_name)
continue
# Look up first field from the document
if field is None:
# Look up first field from the document
if field_name == 'pk':
# Deal with "primary key" alias
field_name = cls._meta['id_field']
if field_name in cls._fields:
field = cls._fields[field_name]
elif cls._dynamic:
field = DynamicField(db_field=field_name)
elif cls._meta.get("allow_inheritance", False) or cls._meta.get("abstract", False):
elif cls._meta.get('allow_inheritance') or cls._meta.get('abstract', False):
# 744: in case the field is defined in a subclass
for subcls in cls.__subclasses__():
try:
@@ -965,35 +1003,55 @@ class BaseDocument(object):
else:
raise LookUpError('Cannot resolve field "%s"' % field_name)
else:
raise LookUpError('Cannot resolve field "%s"'
% field_name)
raise LookUpError('Cannot resolve field "%s"' % field_name)
else:
ReferenceField = _import_class('ReferenceField')
GenericReferenceField = _import_class('GenericReferenceField')
# If previous field was a reference, throw an error (we
# cannot look up fields that are on references).
if isinstance(field, (ReferenceField, GenericReferenceField)):
raise LookUpError('Cannot perform join in mongoDB: %s' %
'__'.join(parts))
# If the parent field has a "field" attribute which has a
# lookup_member method, call it to find the field
# corresponding to this iteration.
if hasattr(getattr(field, 'field', None), 'lookup_member'):
new_field = field.field.lookup_member(field_name)
# If the parent field is a DynamicField or if it's part of
# a DynamicDocument, mark current field as a DynamicField
# with db_name equal to the field name.
elif cls._dynamic and (isinstance(field, DynamicField) or
getattr(getattr(field, 'document_type', None), '_dynamic', None)):
new_field = DynamicField(db_field=field_name)
# Else, try to use the parent field's lookup_member method
# to find the subfield.
elif hasattr(field, 'lookup_member'):
new_field = field.lookup_member(field_name)
# Raise a LookUpError if all the other conditions failed.
else:
# Look up subfield on the previous field or raise
try:
new_field = field.lookup_member(field_name)
except AttributeError:
raise LookUpError('Cannot resolve subfield or operator {} '
'on the field {}'.format(
field_name, field.name))
raise LookUpError(
'Cannot resolve subfield or operator {} '
'on the field {}'.format(field_name, field.name)
)
# If current field still wasn't found and the parent field
# is a ComplexBaseField, add the name current field name and
# move on.
if not new_field and isinstance(field, ComplexBaseField):
fields.append(field_name)
continue
elif not new_field:
raise LookUpError('Cannot resolve field "%s"'
% field_name)
raise LookUpError('Cannot resolve field "%s"' % field_name)
field = new_field # update field to the new field type
fields.append(field)
return fields
@classmethod

View File

@@ -4,21 +4,17 @@ import weakref
from bson import DBRef, ObjectId, SON
import pymongo
import six
from mongoengine.base.common import ALLOW_INHERITANCE
from mongoengine.base.datastructures import (
BaseDict, BaseList, EmbeddedDocumentList
)
from mongoengine.base.common import UPDATE_OPERATORS
from mongoengine.base.datastructures import (BaseDict, BaseList,
EmbeddedDocumentList)
from mongoengine.common import _import_class
from mongoengine.errors import ValidationError
__all__ = ("BaseField", "ComplexBaseField",
"ObjectIdField", "GeoJsonBaseField")
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert', 'min', 'max'])
__all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField',
'GeoJsonBaseField')
class BaseField(object):
@@ -73,7 +69,7 @@ class BaseField(object):
self.db_field = (db_field or name) if not primary_key else '_id'
if name:
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
msg = 'Field\'s "name" attribute deprecated in favour of "db_field"'
warnings.warn(msg, DeprecationWarning)
self.required = required or primary_key
self.default = default
@@ -89,7 +85,7 @@ class BaseField(object):
# Detect and report conflicts between metadata and base properties.
conflicts = set(dir(self)) & set(kwargs)
if conflicts:
raise TypeError("%s already has attribute(s): %s" % (
raise TypeError('%s already has attribute(s): %s' % (
self.__class__.__name__, ', '.join(conflicts)))
# Assign metadata to the instance
@@ -147,25 +143,21 @@ class BaseField(object):
v._instance = weakref.proxy(instance)
instance._data[self.name] = value
def error(self, message="", errors=None, field_name=None):
"""Raises a ValidationError.
"""
def error(self, message='', errors=None, field_name=None):
"""Raise a ValidationError."""
field_name = field_name if field_name else self.name
raise ValidationError(message, errors=errors, field_name=field_name)
def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type.
"""
"""Convert a MongoDB-compatible type to a Python type."""
return value
def to_mongo(self, value):
"""Convert a Python type to a MongoDB-compatible type.
"""
"""Convert a Python type to a MongoDB-compatible type."""
return self.to_python(value)
def _to_mongo_safe_call(self, value, use_db_field=True, fields=None):
"""A helper method to call to_mongo with proper inputs
"""
"""Helper method to call to_mongo with proper inputs."""
f_inputs = self.to_mongo.__code__.co_varnames
ex_vars = {}
if 'fields' in f_inputs:
@@ -177,15 +169,13 @@ class BaseField(object):
return self.to_mongo(value, **ex_vars)
def prepare_query_value(self, op, value):
"""Prepare a value that is being used in a query for PyMongo.
"""
"""Prepare a value that is being used in a query for PyMongo."""
if op in UPDATE_OPERATORS:
self.validate(value)
return value
def validate(self, value, clean=True):
"""Perform validation on a value.
"""
"""Perform validation on a value."""
pass
def _validate_choices(self, value):
@@ -200,11 +190,13 @@ class BaseField(object):
if isinstance(value, (Document, EmbeddedDocument)):
if not any(isinstance(value, c) for c in choice_list):
self.error(
'Value must be instance of %s' % unicode(choice_list)
'Value must be an instance of %s' % (
six.text_type(choice_list)
)
)
# Choices which are types other than Documents
elif value not in choice_list:
self.error('Value must be one of %s' % unicode(choice_list))
self.error('Value must be one of %s' % six.text_type(choice_list))
def _validate(self, value, **kwargs):
# Check the Choices Constraint
@@ -247,8 +239,7 @@ class ComplexBaseField(BaseField):
field = None
def __get__(self, instance, owner):
"""Descriptor to automatically dereference references.
"""
"""Descriptor to automatically dereference references."""
if instance is None:
# Document class being used rather than a document object
return self
@@ -260,7 +251,7 @@ class ComplexBaseField(BaseField):
(self.field is None or isinstance(self.field,
(GenericReferenceField, ReferenceField))))
_dereference = _import_class("DeReference")()
_dereference = _import_class('DeReference')()
self._auto_dereference = instance._fields[self.name]._auto_dereference
if instance._initialised and dereference and instance._data.get(self.name):
@@ -295,9 +286,8 @@ class ComplexBaseField(BaseField):
return value
def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type.
"""
if isinstance(value, basestring):
"""Convert a MongoDB-compatible type to a Python type."""
if isinstance(value, six.string_types):
return value
if hasattr(value, 'to_python'):
@@ -307,14 +297,14 @@ class ComplexBaseField(BaseField):
if not hasattr(value, 'items'):
try:
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
value = {k: v for k, v in enumerate(value)}
except TypeError: # Not iterable return the value
return value
if self.field:
self.field._auto_dereference = self._auto_dereference
value_dict = dict([(key, self.field.to_python(item))
for key, item in value.items()])
value_dict = {key: self.field.to_python(item)
for key, item in value.items()}
else:
Document = _import_class('Document')
value_dict = {}
@@ -337,13 +327,12 @@ class ComplexBaseField(BaseField):
return value_dict
def to_mongo(self, value, use_db_field=True, fields=None):
"""Convert a Python type to a MongoDB-compatible type.
"""
Document = _import_class("Document")
EmbeddedDocument = _import_class("EmbeddedDocument")
GenericReferenceField = _import_class("GenericReferenceField")
"""Convert a Python type to a MongoDB-compatible type."""
Document = _import_class('Document')
EmbeddedDocument = _import_class('EmbeddedDocument')
GenericReferenceField = _import_class('GenericReferenceField')
if isinstance(value, basestring):
if isinstance(value, six.string_types):
return value
if hasattr(value, 'to_mongo'):
@@ -360,13 +349,15 @@ class ComplexBaseField(BaseField):
if not hasattr(value, 'items'):
try:
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
value = {k: v for k, v in enumerate(value)}
except TypeError: # Not iterable return the value
return value
if self.field:
value_dict = dict([(key, self.field._to_mongo_safe_call(item, use_db_field, fields))
for key, item in value.iteritems()])
value_dict = {
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
for key, item in value.iteritems()
}
else:
value_dict = {}
for k, v in value.iteritems():
@@ -380,9 +371,7 @@ class ComplexBaseField(BaseField):
# any _cls data so make it a generic reference allows
# us to dereference
meta = getattr(v, '_meta', {})
allow_inheritance = (
meta.get('allow_inheritance', ALLOW_INHERITANCE)
is True)
allow_inheritance = meta.get('allow_inheritance')
if not allow_inheritance and not self.field:
value_dict[k] = GenericReferenceField().to_mongo(v)
else:
@@ -404,8 +393,7 @@ class ComplexBaseField(BaseField):
return value_dict
def validate(self, value):
"""If field is provided ensure the value is valid.
"""
"""If field is provided ensure the value is valid."""
errors = {}
if self.field:
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
@@ -415,9 +403,9 @@ class ComplexBaseField(BaseField):
for k, v in sequence:
try:
self.field._validate(v)
except ValidationError, error:
except ValidationError as error:
errors[k] = error.errors or error
except (ValueError, AssertionError), error:
except (ValueError, AssertionError) as error:
errors[k] = error
if errors:
@@ -443,8 +431,7 @@ class ComplexBaseField(BaseField):
class ObjectIdField(BaseField):
"""A field wrapper around MongoDB's ObjectIds.
"""
"""A field wrapper around MongoDB's ObjectIds."""
def to_python(self, value):
try:
@@ -457,10 +444,10 @@ class ObjectIdField(BaseField):
def to_mongo(self, value):
if not isinstance(value, ObjectId):
try:
return ObjectId(unicode(value))
except Exception, e:
return ObjectId(six.text_type(value))
except Exception as e:
# e.message attribute has been deprecated since Python 2.6
self.error(unicode(e))
self.error(six.text_type(e))
return value
def prepare_query_value(self, op, value):
@@ -468,7 +455,7 @@ class ObjectIdField(BaseField):
def validate(self, value):
try:
ObjectId(unicode(value))
ObjectId(six.text_type(value))
except Exception:
self.error('Invalid Object ID')
@@ -480,21 +467,20 @@ class GeoJsonBaseField(BaseField):
"""
_geo_index = pymongo.GEOSPHERE
_type = "GeoBase"
_type = 'GeoBase'
def __init__(self, auto_index=True, *args, **kwargs):
"""
:param bool auto_index: Automatically create a "2dsphere" index.\
:param bool auto_index: Automatically create a '2dsphere' index.\
Defaults to `True`.
"""
self._name = "%sField" % self._type
self._name = '%sField' % self._type
if not auto_index:
self._geo_index = False
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
def validate(self, value):
"""Validate the GeoJson object based on its type
"""
"""Validate the GeoJson object based on its type."""
if isinstance(value, dict):
if set(value.keys()) == set(['type', 'coordinates']):
if value['type'] != self._type:
@@ -509,7 +495,7 @@ class GeoJsonBaseField(BaseField):
self.error('%s can only accept lists of [x, y]' % self._name)
return
validate = getattr(self, "_validate_%s" % self._type.lower())
validate = getattr(self, '_validate_%s' % self._type.lower())
error = validate(value)
if error:
self.error(error)
@@ -522,7 +508,7 @@ class GeoJsonBaseField(BaseField):
try:
value[0][0][0]
except (TypeError, IndexError):
return "Invalid Polygon must contain at least one valid linestring"
return 'Invalid Polygon must contain at least one valid linestring'
errors = []
for val in value:
@@ -533,12 +519,12 @@ class GeoJsonBaseField(BaseField):
errors.append(error)
if errors:
if top_level:
return "Invalid Polygon:\n%s" % ", ".join(errors)
return 'Invalid Polygon:\n%s' % ', '.join(errors)
else:
return "%s" % ", ".join(errors)
return '%s' % ', '.join(errors)
def _validate_linestring(self, value, top_level=True):
"""Validates a linestring"""
"""Validate a linestring."""
if not isinstance(value, (list, tuple)):
return 'LineStrings must contain list of coordinate pairs'
@@ -546,7 +532,7 @@ class GeoJsonBaseField(BaseField):
try:
value[0][0]
except (TypeError, IndexError):
return "Invalid LineString must contain at least one valid point"
return 'Invalid LineString must contain at least one valid point'
errors = []
for val in value:
@@ -555,19 +541,19 @@ class GeoJsonBaseField(BaseField):
errors.append(error)
if errors:
if top_level:
return "Invalid LineString:\n%s" % ", ".join(errors)
return 'Invalid LineString:\n%s' % ', '.join(errors)
else:
return "%s" % ", ".join(errors)
return '%s' % ', '.join(errors)
def _validate_point(self, value):
"""Validate each set of coords"""
if not isinstance(value, (list, tuple)):
return 'Points must be a list of coordinate pairs'
elif not len(value) == 2:
return "Value (%s) must be a two-dimensional point" % repr(value)
return 'Value (%s) must be a two-dimensional point' % repr(value)
elif (not isinstance(value[0], (float, int)) or
not isinstance(value[1], (float, int))):
return "Both values (%s) in point must be float or int" % repr(value)
return 'Both values (%s) in point must be float or int' % repr(value)
def _validate_multipoint(self, value):
if not isinstance(value, (list, tuple)):
@@ -577,7 +563,7 @@ class GeoJsonBaseField(BaseField):
try:
value[0][0]
except (TypeError, IndexError):
return "Invalid MultiPoint must contain at least one valid point"
return 'Invalid MultiPoint must contain at least one valid point'
errors = []
for point in value:
@@ -586,7 +572,7 @@ class GeoJsonBaseField(BaseField):
errors.append(error)
if errors:
return "%s" % ", ".join(errors)
return '%s' % ', '.join(errors)
def _validate_multilinestring(self, value, top_level=True):
if not isinstance(value, (list, tuple)):
@@ -596,7 +582,7 @@ class GeoJsonBaseField(BaseField):
try:
value[0][0][0]
except (TypeError, IndexError):
return "Invalid MultiLineString must contain at least one valid linestring"
return 'Invalid MultiLineString must contain at least one valid linestring'
errors = []
for linestring in value:
@@ -606,9 +592,9 @@ class GeoJsonBaseField(BaseField):
if errors:
if top_level:
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
return 'Invalid MultiLineString:\n%s' % ', '.join(errors)
else:
return "%s" % ", ".join(errors)
return '%s' % ', '.join(errors)
def _validate_multipolygon(self, value):
if not isinstance(value, (list, tuple)):
@@ -618,7 +604,7 @@ class GeoJsonBaseField(BaseField):
try:
value[0][0][0][0]
except (TypeError, IndexError):
return "Invalid MultiPolygon must contain at least one valid Polygon"
return 'Invalid MultiPolygon must contain at least one valid Polygon'
errors = []
for polygon in value:
@@ -627,9 +613,9 @@ class GeoJsonBaseField(BaseField):
errors.append(error)
if errors:
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
return 'Invalid MultiPolygon:\n%s' % ', '.join(errors)
def to_mongo(self, value):
if isinstance(value, dict):
return value
return SON([("type", self._type), ("coordinates", value)])
return SON([('type', self._type), ('coordinates', value)])

View File

@@ -1,10 +1,11 @@
import warnings
from mongoengine.base.common import ALLOW_INHERITANCE, _document_registry
import six
from mongoengine.base.common import _document_registry
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
from mongoengine.common import _import_class
from mongoengine.errors import InvalidDocumentError
from mongoengine.python_support import PY3
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
MultipleObjectsReturned,
QuerySetManager)
@@ -45,7 +46,8 @@ class DocumentMetaclass(type):
attrs['_meta'] = meta
attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract
if attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE):
# If allow_inheritance is True, add a "_cls" string field to the attrs
if attrs['_meta'].get('allow_inheritance'):
StringField = _import_class('StringField')
attrs['_cls'] = StringField()
@@ -87,16 +89,17 @@ class DocumentMetaclass(type):
# Ensure no duplicate db_fields
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
if duplicate_db_fields:
msg = ("Multiple db_fields defined for: %s " %
", ".join(duplicate_db_fields))
msg = ('Multiple db_fields defined for: %s ' %
', '.join(duplicate_db_fields))
raise InvalidDocumentError(msg)
# Set _fields and db_field maps
attrs['_fields'] = doc_fields
attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k))
for k, v in doc_fields.iteritems()])
attrs['_reverse_db_field_map'] = dict(
(v, k) for k, v in attrs['_db_field_map'].iteritems())
attrs['_db_field_map'] = {k: getattr(v, 'db_field', k)
for k, v in doc_fields.items()}
attrs['_reverse_db_field_map'] = {
v: k for k, v in attrs['_db_field_map'].items()
}
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
(v.creation_counter, v.name)
@@ -116,10 +119,8 @@ class DocumentMetaclass(type):
if hasattr(base, '_meta'):
# Warn if allow_inheritance isn't set and prevent
# inheritance of classes where inheritance is set to False
allow_inheritance = base._meta.get('allow_inheritance',
ALLOW_INHERITANCE)
if (allow_inheritance is not True and
not base._meta.get('abstract')):
allow_inheritance = base._meta.get('allow_inheritance')
if not allow_inheritance and not base._meta.get('abstract'):
raise ValueError('Document %s may not be subclassed' %
base.__name__)
@@ -161,7 +162,7 @@ class DocumentMetaclass(type):
# module continues to use im_func and im_self, so the code below
# copies __func__ into im_func and __self__ into im_self for
# classmethod objects in Document derived classes.
if PY3:
if six.PY3:
for val in new_class.__dict__.values():
if isinstance(val, classmethod):
f = val.__get__(new_class)
@@ -179,11 +180,11 @@ class DocumentMetaclass(type):
if isinstance(f, CachedReferenceField):
if issubclass(new_class, EmbeddedDocument):
raise InvalidDocumentError(
"CachedReferenceFields is not allowed in EmbeddedDocuments")
raise InvalidDocumentError('CachedReferenceFields is not '
'allowed in EmbeddedDocuments')
if not f.document_type:
raise InvalidDocumentError(
"Document is not available to sync")
'Document is not available to sync')
if f.auto_sync:
f.start_listener()
@@ -195,8 +196,8 @@ class DocumentMetaclass(type):
'reverse_delete_rule',
DO_NOTHING)
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
msg = ("Reverse delete rules are not supported "
"for %s (field: %s)" %
msg = ('Reverse delete rules are not supported '
'for %s (field: %s)' %
(field.__class__.__name__, field.name))
raise InvalidDocumentError(msg)
@@ -204,16 +205,16 @@ class DocumentMetaclass(type):
if delete_rule != DO_NOTHING:
if issubclass(new_class, EmbeddedDocument):
msg = ("Reverse delete rules are not supported for "
"EmbeddedDocuments (field: %s)" % field.name)
msg = ('Reverse delete rules are not supported for '
'EmbeddedDocuments (field: %s)' % field.name)
raise InvalidDocumentError(msg)
f.document_type.register_delete_rule(new_class,
field.name, delete_rule)
if (field.name and hasattr(Document, field.name) and
EmbeddedDocument not in new_class.mro()):
msg = ("%s is a document method and not a valid "
"field name" % field.name)
msg = ('%s is a document method and not a valid '
'field name' % field.name)
raise InvalidDocumentError(msg)
return new_class
@@ -271,6 +272,11 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
'index_drop_dups': False,
'index_opts': None,
'delete_rules': None,
# allow_inheritance can be True, False, and None. True means
# "allow inheritance", False means "don't allow inheritance",
# None means "do whatever your parent does, or don't allow
# inheritance if you're a top-level class".
'allow_inheritance': None,
}
attrs['_is_base_cls'] = True
@@ -303,7 +309,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# If parent wasn't an abstract class
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
not parent_doc_cls._meta.get('abstract', True)):
msg = "Trying to set a collection on a subclass (%s)" % name
msg = 'Trying to set a collection on a subclass (%s)' % name
warnings.warn(msg, SyntaxWarning)
del attrs['_meta']['collection']
@@ -311,7 +317,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
if (parent_doc_cls and
not parent_doc_cls._meta.get('abstract', False)):
msg = "Abstract document cannot have non-abstract base"
msg = 'Abstract document cannot have non-abstract base'
raise ValueError(msg)
return super_new(cls, name, bases, attrs)
@@ -334,12 +340,16 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
meta.merge(attrs.get('_meta', {})) # Top level meta
# Only simple classes (direct subclasses of Document)
# may set allow_inheritance to False
# Only simple classes (i.e. direct subclasses of Document) may set
# allow_inheritance to False. If the base Document allows inheritance,
# none of its subclasses can override allow_inheritance to False.
simple_class = all([b._meta.get('abstract')
for b in flattened_bases if hasattr(b, '_meta')])
if (not simple_class and meta['allow_inheritance'] is False and
not meta['abstract']):
if (
not simple_class and
meta['allow_inheritance'] is False and
not meta['abstract']
):
raise ValueError('Only direct subclasses of Document may set '
'"allow_inheritance" to False')

View File

@@ -1,7 +1,9 @@
from pymongo import MongoClient, ReadPreference, uri_parser
from mongoengine.python_support import (IS_PYMONGO_3, str_types)
import six
__all__ = ['ConnectionError', 'connect', 'register_connection',
from mongoengine.python_support import IS_PYMONGO_3
__all__ = ['MongoEngineConnectionError', 'connect', 'register_connection',
'DEFAULT_CONNECTION_NAME']
@@ -14,7 +16,10 @@ else:
READ_PREFERENCE = False
class ConnectionError(Exception):
class MongoEngineConnectionError(Exception):
"""Error raised when the database connection can't be established or
when a connection with a requested alias can't be retrieved.
"""
pass
@@ -50,8 +55,6 @@ def register_connection(alias, name=None, host=None, port=None,
.. versionchanged:: 0.10.6 - added mongomock support
"""
global _connection_settings
conn_settings = {
'name': name or 'test',
'host': host or 'localhost',
@@ -66,7 +69,7 @@ def register_connection(alias, name=None, host=None, port=None,
# Handle uri style connections
conn_host = conn_settings['host']
# host can be a list or a string, so if string, force to a list
if isinstance(conn_host, str_types):
if isinstance(conn_host, six.string_types):
conn_host = [conn_host]
resolved_hosts = []
@@ -111,9 +114,7 @@ def register_connection(alias, name=None, host=None, port=None,
def disconnect(alias=DEFAULT_CONNECTION_NAME):
global _connections
global _dbs
"""Close the connection with a given alias."""
if alias in _connections:
get_connection(alias=alias).close()
del _connections[alias]
@@ -122,71 +123,100 @@ def disconnect(alias=DEFAULT_CONNECTION_NAME):
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
global _connections
"""Return a connection with a given alias."""
# Connect to the database if not already connected
if reconnect:
disconnect(alias)
if alias not in _connections:
if alias not in _connection_settings:
msg = 'Connection with alias "%s" has not been defined' % alias
if alias == DEFAULT_CONNECTION_NAME:
msg = 'You have not defined a default connection'
raise ConnectionError(msg)
conn_settings = _connection_settings[alias].copy()
# If the requested alias already exists in the _connections list, return
# it immediately.
if alias in _connections:
return _connections[alias]
conn_settings.pop('name', None)
conn_settings.pop('username', None)
conn_settings.pop('password', None)
conn_settings.pop('authentication_source', None)
conn_settings.pop('authentication_mechanism', None)
is_mock = conn_settings.pop('is_mock', None)
if is_mock:
# Use MongoClient from mongomock
try:
import mongomock
except ImportError:
raise RuntimeError('You need mongomock installed '
'to mock MongoEngine.')
connection_class = mongomock.MongoClient
# Validate that the requested alias exists in the _connection_settings.
# Raise MongoEngineConnectionError if it doesn't.
if alias not in _connection_settings:
if alias == DEFAULT_CONNECTION_NAME:
msg = 'You have not defined a default connection'
else:
# Use MongoClient from pymongo
connection_class = MongoClient
msg = 'Connection with alias "%s" has not been defined' % alias
raise MongoEngineConnectionError(msg)
def _clean_settings(settings_dict):
irrelevant_fields = set([
'name', 'username', 'password', 'authentication_source',
'authentication_mechanism'
])
return {
k: v for k, v in settings_dict.items()
if k not in irrelevant_fields
}
# Retrieve a copy of the connection settings associated with the requested
# alias and remove the database name and authentication info (we don't
# care about them at this point).
conn_settings = _clean_settings(_connection_settings[alias].copy())
# Determine if we should use PyMongo's or mongomock's MongoClient.
is_mock = conn_settings.pop('is_mock', False)
if is_mock:
try:
import mongomock
except ImportError:
raise RuntimeError('You need mongomock installed to mock '
'MongoEngine.')
connection_class = mongomock.MongoClient
else:
connection_class = MongoClient
# Handle replica set connections
if 'replicaSet' in conn_settings:
# Discard port since it can't be used on MongoReplicaSetClient
conn_settings.pop('port', None)
# Discard replicaSet if not base string
if not isinstance(conn_settings['replicaSet'], basestring):
conn_settings.pop('replicaSet', None)
# Discard replicaSet if it's not a string
if not isinstance(conn_settings['replicaSet'], six.string_types):
del conn_settings['replicaSet']
# For replica set connections with PyMongo 2.x, use
# MongoReplicaSetClient.
# TODO remove this once we stop supporting PyMongo 2.x.
if not IS_PYMONGO_3:
connection_class = MongoReplicaSetClient
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
try:
connection = None
# check for shared connections
connection_settings_iterator = (
(db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems())
for db_alias, connection_settings in connection_settings_iterator:
connection_settings.pop('name', None)
connection_settings.pop('username', None)
connection_settings.pop('password', None)
connection_settings.pop('authentication_source', None)
connection_settings.pop('authentication_mechanism', None)
if conn_settings == connection_settings and _connections.get(db_alias, None):
connection = _connections[db_alias]
break
# Iterate over all of the connection settings and if a connection with
# the same parameters is already established, use it instead of creating
# a new one.
existing_connection = None
connection_settings_iterator = (
(db_alias, settings.copy())
for db_alias, settings in _connection_settings.items()
)
for db_alias, connection_settings in connection_settings_iterator:
connection_settings = _clean_settings(connection_settings)
if conn_settings == connection_settings and _connections.get(db_alias):
existing_connection = _connections[db_alias]
break
# If an existing connection was found, assign it to the new alias
if existing_connection:
_connections[alias] = existing_connection
else:
# Otherwise, create the new connection for this alias. Raise
# MongoEngineConnectionError if it can't be established.
try:
_connections[alias] = connection_class(**conn_settings)
except Exception as e:
raise MongoEngineConnectionError(
'Cannot connect to database %s :\n%s' % (alias, e))
_connections[alias] = connection if connection else connection_class(**conn_settings)
except Exception, e:
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
return _connections[alias]
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
global _dbs
if reconnect:
disconnect(alias)
@@ -217,7 +247,6 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
.. versionchanged:: 0.6 - added multiple database support.
"""
global _connections
if alias not in _connections:
register_connection(alias, db, **kwargs)

View File

@@ -2,12 +2,12 @@ from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
__all__ = ("switch_db", "switch_collection", "no_dereference",
"no_sub_classes", "query_counter")
__all__ = ('switch_db', 'switch_collection', 'no_dereference',
'no_sub_classes', 'query_counter')
class switch_db(object):
""" switch_db alias context manager.
"""switch_db alias context manager.
Example ::
@@ -18,15 +18,14 @@ class switch_db(object):
class Group(Document):
name = StringField()
Group(name="test").save() # Saves in the default db
Group(name='test').save() # Saves in the default db
with switch_db(Group, 'testdb-1') as Group:
Group(name="hello testdb!").save() # Saves in testdb-1
Group(name='hello testdb!').save() # Saves in testdb-1
"""
def __init__(self, cls, db_alias):
""" Construct the switch_db context manager
"""Construct the switch_db context manager
:param cls: the class to change the registered db
:param db_alias: the name of the specific database to use
@@ -34,37 +33,36 @@ class switch_db(object):
self.cls = cls
self.collection = cls._get_collection()
self.db_alias = db_alias
self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)
self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)
def __enter__(self):
""" change the db_alias and clear the cached collection """
self.cls._meta["db_alias"] = self.db_alias
"""Change the db_alias and clear the cached collection."""
self.cls._meta['db_alias'] = self.db_alias
self.cls._collection = None
return self.cls
def __exit__(self, t, value, traceback):
""" Reset the db_alias and collection """
self.cls._meta["db_alias"] = self.ori_db_alias
"""Reset the db_alias and collection."""
self.cls._meta['db_alias'] = self.ori_db_alias
self.cls._collection = self.collection
class switch_collection(object):
""" switch_collection alias context manager.
"""switch_collection alias context manager.
Example ::
class Group(Document):
name = StringField()
Group(name="test").save() # Saves in the default db
Group(name='test').save() # Saves in the default db
with switch_collection(Group, 'group1') as Group:
Group(name="hello testdb!").save() # Saves in group1 collection
Group(name='hello testdb!').save() # Saves in group1 collection
"""
def __init__(self, cls, collection_name):
""" Construct the switch_collection context manager
"""Construct the switch_collection context manager.
:param cls: the class to change the registered db
:param collection_name: the name of the collection to use
@@ -75,7 +73,7 @@ class switch_collection(object):
self.collection_name = collection_name
def __enter__(self):
""" change the _get_collection_name and clear the cached collection """
"""Change the _get_collection_name and clear the cached collection."""
@classmethod
def _get_collection_name(cls):
@@ -86,24 +84,23 @@ class switch_collection(object):
return self.cls
def __exit__(self, t, value, traceback):
""" Reset the collection """
"""Reset the collection."""
self.cls._collection = self.ori_collection
self.cls._get_collection_name = self.ori_get_collection_name
class no_dereference(object):
""" no_dereference context manager.
"""no_dereference context manager.
Turns off all dereferencing in Documents for the duration of the context
manager::
with no_dereference(Group) as Group:
Group.objects.find()
"""
def __init__(self, cls):
""" Construct the no_dereference context manager.
"""Construct the no_dereference context manager.
:param cls: the class to turn dereferencing off on
"""
@@ -119,103 +116,102 @@ class no_dereference(object):
ComplexBaseField))]
def __enter__(self):
""" change the objects default and _auto_dereference values"""
"""Change the objects default and _auto_dereference values."""
for field in self.deref_fields:
self.cls._fields[field]._auto_dereference = False
return self.cls
def __exit__(self, t, value, traceback):
""" Reset the default and _auto_dereference values"""
"""Reset the default and _auto_dereference values."""
for field in self.deref_fields:
self.cls._fields[field]._auto_dereference = True
return self.cls
class no_sub_classes(object):
""" no_sub_classes context manager.
"""no_sub_classes context manager.
Only returns instances of this class and no sub (inherited) classes::
with no_sub_classes(Group) as Group:
Group.objects.find()
"""
def __init__(self, cls):
""" Construct the no_sub_classes context manager.
"""Construct the no_sub_classes context manager.
:param cls: the class to turn querying sub classes on
"""
self.cls = cls
def __enter__(self):
""" change the objects default and _auto_dereference values"""
"""Change the objects default and _auto_dereference values."""
self.cls._all_subclasses = self.cls._subclasses
self.cls._subclasses = (self.cls,)
return self.cls
def __exit__(self, t, value, traceback):
""" Reset the default and _auto_dereference values"""
"""Reset the default and _auto_dereference values."""
self.cls._subclasses = self.cls._all_subclasses
delattr(self.cls, '_all_subclasses')
return self.cls
class query_counter(object):
""" Query_counter context manager to get the number of queries. """
"""Query_counter context manager to get the number of queries."""
def __init__(self):
""" Construct the query_counter. """
"""Construct the query_counter."""
self.counter = 0
self.db = get_db()
def __enter__(self):
""" On every with block we need to drop the profile collection. """
"""On every with block we need to drop the profile collection."""
self.db.set_profiling_level(0)
self.db.system.profile.drop()
self.db.set_profiling_level(2)
return self
def __exit__(self, t, value, traceback):
""" Reset the profiling level. """
"""Reset the profiling level."""
self.db.set_profiling_level(0)
def __eq__(self, value):
""" == Compare querycounter. """
"""== Compare querycounter."""
counter = self._get_count()
return value == counter
def __ne__(self, value):
""" != Compare querycounter. """
"""!= Compare querycounter."""
return not self.__eq__(value)
def __lt__(self, value):
""" < Compare querycounter. """
"""< Compare querycounter."""
return self._get_count() < value
def __le__(self, value):
""" <= Compare querycounter. """
"""<= Compare querycounter."""
return self._get_count() <= value
def __gt__(self, value):
""" > Compare querycounter. """
"""> Compare querycounter."""
return self._get_count() > value
def __ge__(self, value):
""" >= Compare querycounter. """
""">= Compare querycounter."""
return self._get_count() >= value
def __int__(self):
""" int representation. """
"""int representation."""
return self._get_count()
def __repr__(self):
""" repr query_counter as the number of queries. """
"""repr query_counter as the number of queries."""
return u"%s" % self._get_count()
def _get_count(self):
""" Get the number of queries. """
ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}}
"""Get the number of queries."""
ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}}
count = self.db.system.profile.find(ignore_query).count() - self.counter
self.counter += 1
return count

View File

@@ -1,14 +1,12 @@
from bson import DBRef, SON
import six
from .base import (
BaseDict, BaseList, EmbeddedDocumentList,
TopLevelDocumentMetaclass, get_document
)
from .connection import get_db
from .document import Document, EmbeddedDocument
from .fields import DictField, ListField, MapField, ReferenceField
from .python_support import txt_type
from .queryset import QuerySet
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
TopLevelDocumentMetaclass, get_document)
from mongoengine.connection import get_db
from mongoengine.document import Document, EmbeddedDocument
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
from mongoengine.queryset import QuerySet
class DeReference(object):
@@ -25,7 +23,7 @@ class DeReference(object):
:class:`~mongoengine.base.ComplexBaseField`
:param get: A boolean determining if being called by __get__
"""
if items is None or isinstance(items, basestring):
if items is None or isinstance(items, six.string_types):
return items
# cheapest way to convert a queryset to a list
@@ -68,11 +66,11 @@ class DeReference(object):
items = _get_items(items)
else:
items = dict([
(k, field.to_python(v))
if not isinstance(v, (DBRef, Document)) else (k, v)
for k, v in items.iteritems()]
)
items = {
k: (v if isinstance(v, (DBRef, Document))
else field.to_python(v))
for k, v in items.iteritems()
}
self.reference_map = self._find_references(items)
self.object_map = self._fetch_objects(doc_type=doc_type)
@@ -90,14 +88,14 @@ class DeReference(object):
return reference_map
# Determine the iterator to use
if not hasattr(items, 'items'):
iterator = enumerate(items)
if isinstance(items, dict):
iterator = items.values()
else:
iterator = items.iteritems()
iterator = items
# Recursively find dbreferences
depth += 1
for k, item in iterator:
for item in iterator:
if isinstance(item, (Document, EmbeddedDocument)):
for field_name, field in item._fields.iteritems():
v = item._data.get(field_name, None)
@@ -151,7 +149,7 @@ class DeReference(object):
references = get_db()[collection].find({'_id': {'$in': refs}})
for ref in references:
if '_cls' in ref:
doc = get_document(ref["_cls"])._from_son(ref)
doc = get_document(ref['_cls'])._from_son(ref)
elif doc_type is None:
doc = get_document(
''.join(x.capitalize()
@@ -218,7 +216,7 @@ class DeReference(object):
if k in self.object_map and not is_list:
data[k] = self.object_map[k]
elif isinstance(v, (Document, EmbeddedDocument)):
for field_name, field in v._fields.iteritems():
for field_name in v._fields:
v = data[k]._data.get(field_name, None)
if isinstance(v, DBRef):
data[k]._data[field_name] = self.object_map.get(
@@ -227,7 +225,7 @@ class DeReference(object):
data[k]._data[field_name] = self.object_map.get(
(v['_ref'].collection, v['_ref'].id), v)
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
item_name = txt_type("{0}.{1}.{2}").format(name, k, field_name)
item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name)
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
item_name = '%s.%s' % (name, k) if name else name

View File

@@ -4,18 +4,12 @@ import warnings
from bson.dbref import DBRef
import pymongo
from pymongo.read_preferences import ReadPreference
import six
from mongoengine import signals
from mongoengine.base import (
ALLOW_INHERITANCE,
BaseDict,
BaseDocument,
BaseList,
DocumentMetaclass,
EmbeddedDocumentList,
TopLevelDocumentMetaclass,
get_document
)
from mongoengine.base import (BaseDict, BaseDocument, BaseList,
DocumentMetaclass, EmbeddedDocumentList,
TopLevelDocumentMetaclass, get_document)
from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
from mongoengine.context_managers import switch_collection, switch_db
@@ -31,12 +25,10 @@ __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument',
def includes_cls(fields):
""" Helper function used for ensuring and comparing indexes
"""
"""Helper function used for ensuring and comparing indexes."""
first_field = None
if len(fields):
if isinstance(fields[0], basestring):
if isinstance(fields[0], six.string_types):
first_field = fields[0]
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
first_field = fields[0][0]
@@ -57,9 +49,8 @@ class EmbeddedDocument(BaseDocument):
to create a specialised version of the embedded document that will be
stored in the same collection. To facilitate this behaviour a `_cls`
field is added to documents (hidden though the MongoEngine interface).
To disable this behaviour and remove the dependence on the presence of
`_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
dictionary.
To enable this behaviour set :attr:`allow_inheritance` to ``True`` in the
:attr:`meta` dictionary.
"""
__slots__ = ('_instance', )
@@ -82,6 +73,15 @@ class EmbeddedDocument(BaseDocument):
def __ne__(self, other):
return not self.__eq__(other)
def to_mongo(self, *args, **kwargs):
data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs)
# remove _id from the SON if it's in it and it's None
if '_id' in data and data['_id'] is None:
del data['_id']
return data
def save(self, *args, **kwargs):
self._instance.save(*args, **kwargs)
@@ -106,9 +106,8 @@ class Document(BaseDocument):
create a specialised version of the document that will be stored in the
same collection. To facilitate this behaviour a `_cls`
field is added to documents (hidden though the MongoEngine interface).
To disable this behaviour and remove the dependence on the presence of
`_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
dictionary.
To enable this behaviourset :attr:`allow_inheritance` to ``True`` in the
:attr:`meta` dictionary.
A :class:`~mongoengine.Document` may use a **Capped Collection** by
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
@@ -149,26 +148,22 @@ class Document(BaseDocument):
__slots__ = ('__objects',)
def pk():
"""Primary key alias
"""
@property
def pk(self):
"""Get the primary key."""
if 'id_field' not in self._meta:
return None
return getattr(self, self._meta['id_field'])
def fget(self):
if 'id_field' not in self._meta:
return None
return getattr(self, self._meta['id_field'])
def fset(self, value):
return setattr(self, self._meta['id_field'], value)
return property(fget, fset)
pk = pk()
@pk.setter
def pk(self, value):
"""Set the primary key."""
return setattr(self, self._meta['id_field'], value)
@classmethod
def _get_db(cls):
"""Some Model using other db_alias"""
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME))
return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME))
@classmethod
def _get_collection(cls):
@@ -211,7 +206,20 @@ class Document(BaseDocument):
cls.ensure_indexes()
return cls._collection
def modify(self, query={}, **update):
def to_mongo(self, *args, **kwargs):
data = super(Document, self).to_mongo(*args, **kwargs)
# If '_id' is None, try and set it from self._data. If that
# doesn't exist either, remote '_id' from the SON completely.
if data['_id'] is None:
if self._data.get('id') is None:
del data['_id']
else:
data['_id'] = self._data['id']
return data
def modify(self, query=None, **update):
"""Perform an atomic update of the document in the database and reload
the document object using updated version.
@@ -225,17 +233,19 @@ class Document(BaseDocument):
database matches the query
:param update: Django-style update keyword arguments
"""
if query is None:
query = {}
if self.pk is None:
raise InvalidDocumentError("The document does not have a primary key.")
raise InvalidDocumentError('The document does not have a primary key.')
id_field = self._meta["id_field"]
id_field = self._meta['id_field']
query = query.copy() if isinstance(query, dict) else query.to_query(self)
if id_field not in query:
query[id_field] = self.pk
elif query[id_field] != self.pk:
raise InvalidQueryError("Invalid document modify query: it must modify only this document.")
raise InvalidQueryError('Invalid document modify query: it must modify only this document.')
updated = self._qs(**query).modify(new=True, **update)
if updated is None:
@@ -310,7 +320,7 @@ class Document(BaseDocument):
self.validate(clean=clean)
if write_concern is None:
write_concern = {"w": 1}
write_concern = {'w': 1}
doc = self.to_mongo()
@@ -347,7 +357,7 @@ class Document(BaseDocument):
else:
select_dict = {}
select_dict['_id'] = object_id
shard_key = self.__class__._meta.get('shard_key', tuple())
shard_key = self._meta.get('shard_key', tuple())
for k in shard_key:
path = self._lookup_field(k.split('.'))
actual_key = [p.db_field for p in path]
@@ -358,7 +368,7 @@ class Document(BaseDocument):
def is_new_object(last_error):
if last_error is not None:
updated = last_error.get("updatedExisting")
updated = last_error.get('updatedExisting')
if updated is not None:
return not updated
return created
@@ -366,14 +376,14 @@ class Document(BaseDocument):
update_query = {}
if updates:
update_query["$set"] = updates
update_query['$set'] = updates
if removals:
update_query["$unset"] = removals
update_query['$unset'] = removals
if updates or removals:
upsert = save_condition is None
last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern)
if not upsert and last_error["n"] == 0:
if not upsert and last_error['n'] == 0:
raise SaveConditionError('Race condition preventing'
' document update detected')
created = is_new_object(last_error)
@@ -384,26 +394,27 @@ class Document(BaseDocument):
if cascade:
kwargs = {
"force_insert": force_insert,
"validate": validate,
"write_concern": write_concern,
"cascade": cascade
'force_insert': force_insert,
'validate': validate,
'write_concern': write_concern,
'cascade': cascade
}
if cascade_kwargs: # Allow granular control over cascades
kwargs.update(cascade_kwargs)
kwargs['_refs'] = _refs
self.cascade_save(**kwargs)
except pymongo.errors.DuplicateKeyError, err:
except pymongo.errors.DuplicateKeyError as err:
message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % unicode(err))
except pymongo.errors.OperationFailure, err:
raise NotUniqueError(message % six.text_type(err))
except pymongo.errors.OperationFailure as err:
message = 'Could not save document (%s)'
if re.match('^E1100[01] duplicate key', unicode(err)):
if re.match('^E1100[01] duplicate key', six.text_type(err)):
# E11000 - duplicate key error index
# E11001 - duplicate key on update
message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % unicode(err))
raise OperationError(message % unicode(err))
raise NotUniqueError(message % six.text_type(err))
raise OperationError(message % six.text_type(err))
id_field = self._meta['id_field']
if created or id_field not in self._meta.get('shard_key', []):
self[id_field] = self._fields[id_field].to_python(object_id)
@@ -414,10 +425,11 @@ class Document(BaseDocument):
self._created = False
return self
def cascade_save(self, *args, **kwargs):
"""Recursively saves any references /
generic references on the document"""
_refs = kwargs.get('_refs', []) or []
def cascade_save(self, **kwargs):
"""Recursively save any references and generic references on the
document.
"""
_refs = kwargs.get('_refs') or []
ReferenceField = _import_class('ReferenceField')
GenericReferenceField = _import_class('GenericReferenceField')
@@ -443,16 +455,17 @@ class Document(BaseDocument):
@property
def _qs(self):
"""
Returns the queryset to use for updating / reloading / deletions
"""
"""Return the queryset to use for updating / reloading / deletions."""
if not hasattr(self, '__objects'):
self.__objects = QuerySet(self, self._get_collection())
return self.__objects
@property
def _object_key(self):
"""Dict to identify object in collection
"""Get the query dict that can be used to fetch this object from
the database. Most of the time it's a simple PK lookup, but in
case of a sharded collection with a compound shard key, it can
contain a more complex query.
"""
select_dict = {'pk': self.pk}
shard_key = self.__class__._meta.get('shard_key', tuple())
@@ -475,8 +488,8 @@ class Document(BaseDocument):
if self.pk is None:
if kwargs.get('upsert', False):
query = self.to_mongo()
if "_cls" in query:
del query["_cls"]
if '_cls' in query:
del query['_cls']
return self._qs.filter(**query).update_one(**kwargs)
else:
raise OperationError(
@@ -513,7 +526,7 @@ class Document(BaseDocument):
try:
self._qs.filter(
**self._object_key).delete(write_concern=write_concern, _from_doc_delete=True)
except pymongo.errors.OperationFailure, err:
except pymongo.errors.OperationFailure as err:
message = u'Could not delete document (%s)' % err.message
raise OperationError(message)
signals.post_delete.send(self.__class__, document=self, **signal_kwargs)
@@ -601,11 +614,12 @@ class Document(BaseDocument):
if fields and isinstance(fields[0], int):
max_depth = fields[0]
fields = fields[1:]
elif "max_depth" in kwargs:
max_depth = kwargs["max_depth"]
elif 'max_depth' in kwargs:
max_depth = kwargs['max_depth']
if self.pk is None:
raise self.DoesNotExist("Document does not exist")
raise self.DoesNotExist('Document does not exist')
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
**self._object_key).only(*fields).limit(
1).select_related(max_depth=max_depth)
@@ -613,7 +627,7 @@ class Document(BaseDocument):
if obj:
obj = obj[0]
else:
raise self.DoesNotExist("Document does not exist")
raise self.DoesNotExist('Document does not exist')
for field in obj._data:
if not fields or field in fields:
@@ -656,7 +670,7 @@ class Document(BaseDocument):
"""Returns an instance of :class:`~bson.dbref.DBRef` useful in
`__raw__` queries."""
if self.pk is None:
msg = "Only saved documents can have a valid dbref"
msg = 'Only saved documents can have a valid dbref'
raise OperationError(msg)
return DBRef(self.__class__._get_collection_name(), self.pk)
@@ -711,7 +725,7 @@ class Document(BaseDocument):
fields = index_spec.pop('fields')
drop_dups = kwargs.get('drop_dups', False)
if IS_PYMONGO_3 and drop_dups:
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.'
warnings.warn(msg, DeprecationWarning)
elif not IS_PYMONGO_3:
index_spec['drop_dups'] = drop_dups
@@ -737,7 +751,7 @@ class Document(BaseDocument):
will be removed if PyMongo3+ is used
"""
if IS_PYMONGO_3 and drop_dups:
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.'
warnings.warn(msg, DeprecationWarning)
elif not IS_PYMONGO_3:
kwargs.update({'drop_dups': drop_dups})
@@ -757,7 +771,7 @@ class Document(BaseDocument):
index_opts = cls._meta.get('index_opts') or {}
index_cls = cls._meta.get('index_cls', True)
if IS_PYMONGO_3 and drop_dups:
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.'
warnings.warn(msg, DeprecationWarning)
collection = cls._get_collection()
@@ -795,8 +809,7 @@ class Document(BaseDocument):
# If _cls is being used (for polymorphism), it needs an index,
# only if another index doesn't begin with _cls
if (index_cls and not cls_indexed and
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
if index_cls and not cls_indexed and cls._meta.get('allow_inheritance'):
# we shouldn't pass 'cls' to the collection.ensureIndex options
# because of https://jira.mongodb.org/browse/SERVER-769
@@ -866,16 +879,15 @@ class Document(BaseDocument):
# finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed
if [(u'_id', 1)] not in indexes:
indexes.append([(u'_id', 1)])
if (cls._meta.get('index_cls', True) and
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
if cls._meta.get('index_cls', True) and cls._meta.get('allow_inheritance'):
indexes.append([(u'_cls', 1)])
return indexes
@classmethod
def compare_indexes(cls):
""" Compares the indexes defined in MongoEngine with the ones existing
in the database. Returns any missing/extra indexes.
""" Compares the indexes defined in MongoEngine with the ones
existing in the database. Returns any missing/extra indexes.
"""
required = cls.list_indexes()
@@ -919,8 +931,9 @@ class DynamicDocument(Document):
_dynamic = True
def __delattr__(self, *args, **kwargs):
"""Deletes the attribute by setting to None and allowing _delta to unset
it"""
"""Delete the attribute by setting to None and allowing _delta
to unset it.
"""
field_name = args[0]
if field_name in self._dynamic_fields:
setattr(self, field_name, None)
@@ -942,8 +955,9 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
_dynamic = True
def __delattr__(self, *args, **kwargs):
"""Deletes the attribute by setting to None and allowing _delta to unset
it"""
"""Delete the attribute by setting to None and allowing _delta
to unset it.
"""
field_name = args[0]
if field_name in self._fields:
default = self._fields[field_name].default
@@ -985,10 +999,10 @@ class MapReduceDocument(object):
try:
self.key = id_field_type(self.key)
except Exception:
raise Exception("Could not cast key as %s" %
raise Exception('Could not cast key as %s' %
id_field_type.__name__)
if not hasattr(self, "_key_object"):
if not hasattr(self, '_key_object'):
self._key_object = self._document.objects.with_id(self.key)
return self._key_object
return self._key_object

View File

@@ -1,7 +1,6 @@
from collections import defaultdict
from mongoengine.python_support import txt_type
import six
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
@@ -71,13 +70,13 @@ class ValidationError(AssertionError):
field_name = None
_message = None
def __init__(self, message="", **kwargs):
def __init__(self, message='', **kwargs):
self.errors = kwargs.get('errors', {})
self.field_name = kwargs.get('field_name')
self.message = message
def __str__(self):
return txt_type(self.message)
return six.text_type(self.message)
def __repr__(self):
return '%s(%s,)' % (self.__class__.__name__, self.message)
@@ -111,17 +110,20 @@ class ValidationError(AssertionError):
errors_dict = {}
if not source:
return errors_dict
if isinstance(source, dict):
for field_name, error in source.iteritems():
errors_dict[field_name] = build_dict(error)
elif isinstance(source, ValidationError) and source.errors:
return build_dict(source.errors)
else:
return unicode(source)
return six.text_type(source)
return errors_dict
if not self.errors:
return {}
return build_dict(self.errors)
def _format_errors(self):
@@ -134,10 +136,10 @@ class ValidationError(AssertionError):
value = ' '.join(
[generate_key(v, k) for k, v in value.iteritems()])
results = "%s.%s" % (prefix, value) if prefix else value
results = '%s.%s' % (prefix, value) if prefix else value
return results
error_dict = defaultdict(list)
for k, v in self.to_dict().iteritems():
error_dict[generate_key(v)].append(k)
return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()])
return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()])

View File

@@ -3,7 +3,6 @@ import decimal
import itertools
import re
import time
import urllib2
import uuid
import warnings
from operator import itemgetter
@@ -25,13 +24,13 @@ try:
except ImportError:
Int64 = long
from .base import (BaseDocument, BaseField, ComplexBaseField, GeoJsonBaseField,
ObjectIdField, get_document)
from .connection import DEFAULT_CONNECTION_NAME, get_db
from .document import Document, EmbeddedDocument
from .errors import DoesNotExist, ValidationError
from .python_support import PY3, StringIO, bin_type, str_types, txt_type
from .queryset import DO_NOTHING, QuerySet
from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField,
GeoJsonBaseField, ObjectIdField, get_document)
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
from mongoengine.document import Document, EmbeddedDocument
from mongoengine.errors import DoesNotExist, ValidationError
from mongoengine.python_support import StringIO
from mongoengine.queryset import DO_NOTHING, QuerySet
try:
from PIL import Image, ImageOps
@@ -39,7 +38,7 @@ except ImportError:
Image = None
ImageOps = None
__all__ = [
__all__ = (
'StringField', 'URLField', 'EmailField', 'IntField', 'LongField',
'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField',
'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField',
@@ -50,14 +49,14 @@ __all__ = [
'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField',
'GeoPointField', 'PointField', 'LineStringField', 'PolygonField',
'SequenceField', 'UUIDField', 'MultiPointField', 'MultiLineStringField',
'MultiPolygonField', 'GeoJsonBaseField']
'MultiPolygonField', 'GeoJsonBaseField'
)
RECURSIVE_REFERENCE_CONSTANT = 'self'
class StringField(BaseField):
"""A unicode string field.
"""
"""A unicode string field."""
def __init__(self, regex=None, max_length=None, min_length=None, **kwargs):
self.regex = re.compile(regex) if regex else None
@@ -66,7 +65,7 @@ class StringField(BaseField):
super(StringField, self).__init__(**kwargs)
def to_python(self, value):
if isinstance(value, unicode):
if isinstance(value, six.text_type):
return value
try:
value = value.decode('utf-8')
@@ -75,7 +74,7 @@ class StringField(BaseField):
return value
def validate(self, value):
if not isinstance(value, basestring):
if not isinstance(value, six.string_types):
self.error('StringField only accepts string values')
if self.max_length is not None and len(value) > self.max_length:
@@ -91,7 +90,7 @@ class StringField(BaseField):
return None
def prepare_query_value(self, op, value):
if not isinstance(op, basestring):
if not isinstance(op, six.string_types):
return value
if op.lstrip('i') in ('startswith', 'endswith', 'contains', 'exact'):
@@ -148,17 +147,6 @@ class URLField(StringField):
self.error('Invalid URL: {}'.format(value))
return
if self.verify_exists:
warnings.warn(
"The URLField verify_exists argument has intractable security "
"and performance issues. Accordingly, it has been deprecated.",
DeprecationWarning)
try:
request = urllib2.Request(value)
urllib2.urlopen(request)
except Exception, e:
self.error('This URL appears to be a broken link: %s' % e)
class EmailField(StringField):
"""A field that validates input as an email address.
@@ -182,8 +170,7 @@ class EmailField(StringField):
class IntField(BaseField):
"""An 32-bit integer field.
"""
"""32-bit integer field."""
def __init__(self, min_value=None, max_value=None, **kwargs):
self.min_value, self.max_value = min_value, max_value
@@ -216,8 +203,7 @@ class IntField(BaseField):
class LongField(BaseField):
"""An 64-bit integer field.
"""
"""64-bit integer field."""
def __init__(self, min_value=None, max_value=None, **kwargs):
self.min_value, self.max_value = min_value, max_value
@@ -253,8 +239,7 @@ class LongField(BaseField):
class FloatField(BaseField):
"""An floating point number field.
"""
"""Floating point number field."""
def __init__(self, min_value=None, max_value=None, **kwargs):
self.min_value, self.max_value = min_value, max_value
@@ -291,7 +276,7 @@ class FloatField(BaseField):
class DecimalField(BaseField):
"""A fixed-point decimal number field.
"""Fixed-point decimal number field.
.. versionchanged:: 0.8
.. versionadded:: 0.3
@@ -332,25 +317,25 @@ class DecimalField(BaseField):
# Convert to string for python 2.6 before casting to Decimal
try:
value = decimal.Decimal("%s" % value)
value = decimal.Decimal('%s' % value)
except decimal.InvalidOperation:
return value
return value.quantize(decimal.Decimal(".%s" % ("0" * self.precision)), rounding=self.rounding)
return value.quantize(decimal.Decimal('.%s' % ('0' * self.precision)), rounding=self.rounding)
def to_mongo(self, value):
if value is None:
return value
if self.force_string:
return unicode(self.to_python(value))
return six.text_type(self.to_python(value))
return float(self.to_python(value))
def validate(self, value):
if not isinstance(value, decimal.Decimal):
if not isinstance(value, basestring):
value = unicode(value)
if not isinstance(value, six.string_types):
value = six.text_type(value)
try:
value = decimal.Decimal(value)
except Exception, exc:
except Exception as exc:
self.error('Could not convert value to decimal: %s' % exc)
if self.min_value is not None and value < self.min_value:
@@ -364,7 +349,7 @@ class DecimalField(BaseField):
class BooleanField(BaseField):
"""A boolean field type.
"""Boolean field type.
.. versionadded:: 0.1.2
"""
@@ -382,7 +367,7 @@ class BooleanField(BaseField):
class DateTimeField(BaseField):
"""A datetime field.
"""Datetime field.
Uses the python-dateutil library if available alternatively use time.strptime
to parse the dates. Note: python-dateutil's parser is fully featured and when
@@ -410,7 +395,7 @@ class DateTimeField(BaseField):
if callable(value):
return value()
if not isinstance(value, basestring):
if not isinstance(value, six.string_types):
return None
# Attempt to parse a datetime:
@@ -537,16 +522,19 @@ class EmbeddedDocumentField(BaseField):
"""
def __init__(self, document_type, **kwargs):
if not isinstance(document_type, basestring):
if not issubclass(document_type, EmbeddedDocument):
self.error('Invalid embedded document class provided to an '
'EmbeddedDocumentField')
if (
not isinstance(document_type, six.string_types) and
not issubclass(document_type, EmbeddedDocument)
):
self.error('Invalid embedded document class provided to an '
'EmbeddedDocumentField')
self.document_type_obj = document_type
super(EmbeddedDocumentField, self).__init__(**kwargs)
@property
def document_type(self):
if isinstance(self.document_type_obj, basestring):
if isinstance(self.document_type_obj, six.string_types):
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
self.document_type_obj = self.owner_document
else:
@@ -631,7 +619,7 @@ class DynamicField(BaseField):
"""Convert a Python type to a MongoDB compatible type.
"""
if isinstance(value, basestring):
if isinstance(value, six.string_types):
return value
if hasattr(value, 'to_mongo'):
@@ -639,7 +627,7 @@ class DynamicField(BaseField):
val = value.to_mongo(use_db_field, fields)
# If we its a document thats not inherited add _cls
if isinstance(value, Document):
val = {"_ref": value.to_dbref(), "_cls": cls.__name__}
val = {'_ref': value.to_dbref(), '_cls': cls.__name__}
if isinstance(value, EmbeddedDocument):
val['_cls'] = cls.__name__
return val
@@ -650,7 +638,7 @@ class DynamicField(BaseField):
is_list = False
if not hasattr(value, 'items'):
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
value = {k: v for k, v in enumerate(value)}
data = {}
for k, v in value.iteritems():
@@ -674,12 +662,12 @@ class DynamicField(BaseField):
return member_name
def prepare_query_value(self, op, value):
if isinstance(value, basestring):
if isinstance(value, six.string_types):
return StringField().prepare_query_value(op, value)
return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value))
def validate(self, value, clean=True):
if hasattr(value, "validate"):
if hasattr(value, 'validate'):
value.validate(clean=clean)
@@ -699,21 +687,27 @@ class ListField(ComplexBaseField):
super(ListField, self).__init__(**kwargs)
def validate(self, value):
"""Make sure that a list of valid fields is being used.
"""
"""Make sure that a list of valid fields is being used."""
if (not isinstance(value, (list, tuple, QuerySet)) or
isinstance(value, basestring)):
isinstance(value, six.string_types)):
self.error('Only lists and tuples may be used in a list field')
super(ListField, self).validate(value)
def prepare_query_value(self, op, value):
if self.field:
if op in ('set', 'unset', None) and (
not isinstance(value, basestring) and
not isinstance(value, BaseDocument) and
hasattr(value, '__iter__')):
# If the value is iterable and it's not a string nor a
# BaseDocument, call prepare_query_value for each of its items.
if (
op in ('set', 'unset', None) and
hasattr(value, '__iter__') and
not isinstance(value, six.string_types) and
not isinstance(value, BaseDocument)
):
return [self.field.prepare_query_value(op, v) for v in value]
return self.field.prepare_query_value(op, value)
return super(ListField, self).prepare_query_value(op, value)
@@ -726,7 +720,6 @@ class EmbeddedDocumentListField(ListField):
:class:`~mongoengine.EmbeddedDocument`.
.. versionadded:: 0.9
"""
def __init__(self, document_type, **kwargs):
@@ -775,17 +768,17 @@ class SortedListField(ListField):
def key_not_string(d):
""" Helper function to recursively determine if any key in a dictionary is
not a string.
"""Helper function to recursively determine if any key in a
dictionary is not a string.
"""
for k, v in d.items():
if not isinstance(k, basestring) or (isinstance(v, dict) and key_not_string(v)):
if not isinstance(k, six.string_types) or (isinstance(v, dict) and key_not_string(v)):
return True
def key_has_dot_or_dollar(d):
""" Helper function to recursively determine if any key in a dictionary
contains a dot or a dollar sign.
"""Helper function to recursively determine if any key in a
dictionary contains a dot or a dollar sign.
"""
for k, v in d.items():
if ('.' in k or '$' in k) or (isinstance(v, dict) and key_has_dot_or_dollar(v)):
@@ -813,14 +806,13 @@ class DictField(ComplexBaseField):
super(DictField, self).__init__(*args, **kwargs)
def validate(self, value):
"""Make sure that a list of valid fields is being used.
"""
"""Make sure that a list of valid fields is being used."""
if not isinstance(value, dict):
self.error('Only dictionaries may be used in a DictField')
if key_not_string(value):
msg = ("Invalid dictionary key - documents must "
"have only string keys")
msg = ('Invalid dictionary key - documents must '
'have only string keys')
self.error(msg)
if key_has_dot_or_dollar(value):
self.error('Invalid dictionary key name - keys may not contain "."'
@@ -835,14 +827,15 @@ class DictField(ComplexBaseField):
'istartswith', 'endswith', 'iendswith',
'exact', 'iexact']
if op in match_operators and isinstance(value, basestring):
if op in match_operators and isinstance(value, six.string_types):
return StringField().prepare_query_value(op, value)
if hasattr(self.field, 'field'):
if op in ('set', 'unset') and isinstance(value, dict):
return dict(
(k, self.field.prepare_query_value(op, v))
for k, v in value.items())
return {
k: self.field.prepare_query_value(op, v)
for k, v in value.items()
}
return self.field.prepare_query_value(op, value)
return super(DictField, self).prepare_query_value(op, value)
@@ -911,10 +904,12 @@ class ReferenceField(BaseField):
A reference to an abstract document type is always stored as a
:class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`.
"""
if not isinstance(document_type, basestring):
if not issubclass(document_type, (Document, basestring)):
self.error('Argument to ReferenceField constructor must be a '
'document class or a string')
if (
not isinstance(document_type, six.string_types) and
not issubclass(document_type, Document)
):
self.error('Argument to ReferenceField constructor must be a '
'document class or a string')
self.dbref = dbref
self.document_type_obj = document_type
@@ -923,7 +918,7 @@ class ReferenceField(BaseField):
@property
def document_type(self):
if isinstance(self.document_type_obj, basestring):
if isinstance(self.document_type_obj, six.string_types):
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
self.document_type_obj = self.owner_document
else:
@@ -931,8 +926,7 @@ class ReferenceField(BaseField):
return self.document_type_obj
def __get__(self, instance, owner):
"""Descriptor to allow lazy dereferencing.
"""
"""Descriptor to allow lazy dereferencing."""
if instance is None:
# Document class being used rather than a document object
return self
@@ -989,8 +983,7 @@ class ReferenceField(BaseField):
return id_
def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type.
"""
"""Convert a MongoDB-compatible type to a Python type."""
if (not self.dbref and
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
collection = self.document_type._get_collection_name()
@@ -1006,7 +999,7 @@ class ReferenceField(BaseField):
def validate(self, value):
if not isinstance(value, (self.document_type, DBRef)):
self.error("A ReferenceField only accepts DBRef or documents")
self.error('A ReferenceField only accepts DBRef or documents')
if isinstance(value, Document) and value.id is None:
self.error('You can only reference documents once they have been '
@@ -1030,14 +1023,19 @@ class CachedReferenceField(BaseField):
.. versionadded:: 0.9
"""
def __init__(self, document_type, fields=[], auto_sync=True, **kwargs):
def __init__(self, document_type, fields=None, auto_sync=True, **kwargs):
"""Initialises the Cached Reference Field.
:param fields: A list of fields to be cached in document
:param auto_sync: if True documents are auto updated.
"""
if not isinstance(document_type, basestring) and \
not issubclass(document_type, (Document, basestring)):
if fields is None:
fields = []
if (
not isinstance(document_type, six.string_types) and
not issubclass(document_type, Document)
):
self.error('Argument to CachedReferenceField constructor must be a'
' document class or a string')
@@ -1053,18 +1051,20 @@ class CachedReferenceField(BaseField):
sender=self.document_type)
def on_document_pre_save(self, sender, document, created, **kwargs):
if not created:
update_kwargs = dict(
('set__%s__%s' % (self.name, k), v)
for k, v in document._delta()[0].items()
if k in self.fields)
if created:
return None
if update_kwargs:
filter_kwargs = {}
filter_kwargs[self.name] = document
update_kwargs = {
'set__%s__%s' % (self.name, key): val
for key, val in document._delta()[0].items()
if key in self.fields
}
if update_kwargs:
filter_kwargs = {}
filter_kwargs[self.name] = document
self.owner_document.objects(
**filter_kwargs).update(**update_kwargs)
self.owner_document.objects(
**filter_kwargs).update(**update_kwargs)
def to_python(self, value):
if isinstance(value, dict):
@@ -1077,7 +1077,7 @@ class CachedReferenceField(BaseField):
@property
def document_type(self):
if isinstance(self.document_type_obj, basestring):
if isinstance(self.document_type_obj, six.string_types):
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
self.document_type_obj = self.owner_document
else:
@@ -1117,7 +1117,7 @@ class CachedReferenceField(BaseField):
# TODO: should raise here or will fail next statement
value = SON((
("_id", id_field.to_mongo(id_)),
('_id', id_field.to_mongo(id_)),
))
if fields:
@@ -1143,7 +1143,7 @@ class CachedReferenceField(BaseField):
def validate(self, value):
if not isinstance(value, self.document_type):
self.error("A CachedReferenceField only accepts documents")
self.error('A CachedReferenceField only accepts documents')
if isinstance(value, Document) and value.id is None:
self.error('You can only reference documents once they have been '
@@ -1191,13 +1191,13 @@ class GenericReferenceField(BaseField):
# Keep the choices as a list of allowed Document class names
if choices:
for choice in choices:
if isinstance(choice, basestring):
if isinstance(choice, six.string_types):
self.choices.append(choice)
elif isinstance(choice, type) and issubclass(choice, Document):
self.choices.append(choice._class_name)
else:
self.error('Invalid choices provided: must be a list of'
'Document subclasses and/or basestrings')
'Document subclasses and/or six.string_typess')
def _validate_choices(self, value):
if isinstance(value, dict):
@@ -1280,8 +1280,7 @@ class GenericReferenceField(BaseField):
class BinaryField(BaseField):
"""A binary data field.
"""
"""A binary data field."""
def __init__(self, max_bytes=None, **kwargs):
self.max_bytes = max_bytes
@@ -1289,18 +1288,18 @@ class BinaryField(BaseField):
def __set__(self, instance, value):
"""Handle bytearrays in python 3.1"""
if PY3 and isinstance(value, bytearray):
value = bin_type(value)
if six.PY3 and isinstance(value, bytearray):
value = six.binary_type(value)
return super(BinaryField, self).__set__(instance, value)
def to_mongo(self, value):
return Binary(value)
def validate(self, value):
if not isinstance(value, (bin_type, txt_type, Binary)):
self.error("BinaryField only accepts instances of "
"(%s, %s, Binary)" % (
bin_type.__name__, txt_type.__name__))
if not isinstance(value, (six.binary_type, six.text_type, Binary)):
self.error('BinaryField only accepts instances of '
'(%s, %s, Binary)' % (
six.binary_type.__name__, six.text_type.__name__))
if self.max_bytes is not None and len(value) > self.max_bytes:
self.error('Binary value is too long')
@@ -1384,11 +1383,13 @@ class GridFSProxy(object):
get_db(self.db_alias), self.collection_name)
return self._fs
def get(self, id=None):
if id:
self.grid_id = id
def get(self, grid_id=None):
if grid_id:
self.grid_id = grid_id
if self.grid_id is None:
return None
try:
if self.gridout is None:
self.gridout = self.fs.get(self.grid_id)
@@ -1432,7 +1433,7 @@ class GridFSProxy(object):
try:
return gridout.read(size)
except Exception:
return ""
return ''
def delete(self):
# Delete file from GridFS, FileField still remains
@@ -1464,9 +1465,8 @@ class FileField(BaseField):
"""
proxy_class = GridFSProxy
def __init__(self,
db_alias=DEFAULT_CONNECTION_NAME,
collection_name="fs", **kwargs):
def __init__(self, db_alias=DEFAULT_CONNECTION_NAME, collection_name='fs',
**kwargs):
super(FileField, self).__init__(**kwargs)
self.collection_name = collection_name
self.db_alias = db_alias
@@ -1488,8 +1488,10 @@ class FileField(BaseField):
def __set__(self, instance, value):
key = self.name
if ((hasattr(value, 'read') and not
isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
if (
(hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or
isinstance(value, (six.binary_type, six.string_types))
):
# using "FileField() = file/string" notation
grid_file = instance._data.get(self.name)
# If a file already exists, delete it
@@ -1558,7 +1560,7 @@ class ImageGridFsProxy(GridFSProxy):
try:
img = Image.open(file_obj)
img_format = img.format
except Exception, e:
except Exception as e:
raise ValidationError('Invalid image: %s' % e)
# Progressive JPEG
@@ -1667,10 +1669,10 @@ class ImageGridFsProxy(GridFSProxy):
return self.fs.get(out.thumbnail_id)
def write(self, *args, **kwargs):
raise RuntimeError("Please use \"put\" method instead")
raise RuntimeError('Please use "put" method instead')
def writelines(self, *args, **kwargs):
raise RuntimeError("Please use \"put\" method instead")
raise RuntimeError('Please use "put" method instead')
class ImproperlyConfigured(Exception):
@@ -1695,14 +1697,17 @@ class ImageField(FileField):
def __init__(self, size=None, thumbnail_size=None,
collection_name='images', **kwargs):
if not Image:
raise ImproperlyConfigured("PIL library was not found")
raise ImproperlyConfigured('PIL library was not found')
params_size = ('width', 'height', 'force')
extra_args = dict(size=size, thumbnail_size=thumbnail_size)
extra_args = {
'size': size,
'thumbnail_size': thumbnail_size
}
for att_name, att in extra_args.items():
value = None
if isinstance(att, (tuple, list)):
if PY3:
if six.PY3:
value = dict(itertools.zip_longest(params_size, att,
fillvalue=None))
else:
@@ -1763,10 +1768,10 @@ class SequenceField(BaseField):
Generate and Increment the counter
"""
sequence_name = self.get_sequence_name()
sequence_id = "%s.%s" % (sequence_name, self.name)
sequence_id = '%s.%s' % (sequence_name, self.name)
collection = get_db(alias=self.db_alias)[self.collection_name]
counter = collection.find_and_modify(query={"_id": sequence_id},
update={"$inc": {"next": 1}},
counter = collection.find_and_modify(query={'_id': sequence_id},
update={'$inc': {'next': 1}},
new=True,
upsert=True)
return self.value_decorator(counter['next'])
@@ -1789,9 +1794,9 @@ class SequenceField(BaseField):
as it is only fixed on set.
"""
sequence_name = self.get_sequence_name()
sequence_id = "%s.%s" % (sequence_name, self.name)
sequence_id = '%s.%s' % (sequence_name, self.name)
collection = get_db(alias=self.db_alias)[self.collection_name]
data = collection.find_one({"_id": sequence_id})
data = collection.find_one({'_id': sequence_id})
if data:
return self.value_decorator(data['next'] + 1)
@@ -1861,8 +1866,8 @@ class UUIDField(BaseField):
if not self._binary:
original_value = value
try:
if not isinstance(value, basestring):
value = unicode(value)
if not isinstance(value, six.string_types):
value = six.text_type(value)
return uuid.UUID(value)
except Exception:
return original_value
@@ -1870,8 +1875,8 @@ class UUIDField(BaseField):
def to_mongo(self, value):
if not self._binary:
return unicode(value)
elif isinstance(value, basestring):
return six.text_type(value)
elif isinstance(value, six.string_types):
return uuid.UUID(value)
return value
@@ -1882,11 +1887,11 @@ class UUIDField(BaseField):
def validate(self, value):
if not isinstance(value, uuid.UUID):
if not isinstance(value, basestring):
if not isinstance(value, six.string_types):
value = str(value)
try:
uuid.UUID(value)
except Exception, exc:
except Exception as exc:
self.error('Could not convert to UUID: %s' % exc)
@@ -1904,19 +1909,18 @@ class GeoPointField(BaseField):
_geo_index = pymongo.GEO2D
def validate(self, value):
"""Make sure that a geo-value is of type (x, y)
"""
"""Make sure that a geo-value is of type (x, y)"""
if not isinstance(value, (list, tuple)):
self.error('GeoPointField can only accept tuples or lists '
'of (x, y)')
if not len(value) == 2:
self.error("Value (%s) must be a two-dimensional point" %
self.error('Value (%s) must be a two-dimensional point' %
repr(value))
elif (not isinstance(value[0], (float, int)) or
not isinstance(value[1], (float, int))):
self.error(
"Both values (%s) in point must be float or int" % repr(value))
'Both values (%s) in point must be float or int' % repr(value))
class PointField(GeoJsonBaseField):
@@ -1926,8 +1930,8 @@ class PointField(GeoJsonBaseField):
.. code-block:: js
{ "type" : "Point" ,
"coordinates" : [x, y]}
{'type' : 'Point' ,
'coordinates' : [x, y]}
You can either pass a dict with the full information or a list
to set the value.
@@ -1936,7 +1940,7 @@ class PointField(GeoJsonBaseField):
.. versionadded:: 0.8
"""
_type = "Point"
_type = 'Point'
class LineStringField(GeoJsonBaseField):
@@ -1946,8 +1950,8 @@ class LineStringField(GeoJsonBaseField):
.. code-block:: js
{ "type" : "LineString" ,
"coordinates" : [[x1, y1], [x1, y1] ... [xn, yn]]}
{'type' : 'LineString' ,
'coordinates' : [[x1, y1], [x1, y1] ... [xn, yn]]}
You can either pass a dict with the full information or a list of points.
@@ -1955,7 +1959,7 @@ class LineStringField(GeoJsonBaseField):
.. versionadded:: 0.8
"""
_type = "LineString"
_type = 'LineString'
class PolygonField(GeoJsonBaseField):
@@ -1965,9 +1969,9 @@ class PolygonField(GeoJsonBaseField):
.. code-block:: js
{ "type" : "Polygon" ,
"coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]],
[[x1, y1], [x1, y1] ... [xn, yn]]}
{'type' : 'Polygon' ,
'coordinates' : [[[x1, y1], [x1, y1] ... [xn, yn]],
[[x1, y1], [x1, y1] ... [xn, yn]]}
You can either pass a dict with the full information or a list
of LineStrings. The first LineString being the outside and the rest being
@@ -1977,7 +1981,7 @@ class PolygonField(GeoJsonBaseField):
.. versionadded:: 0.8
"""
_type = "Polygon"
_type = 'Polygon'
class MultiPointField(GeoJsonBaseField):
@@ -1987,8 +1991,8 @@ class MultiPointField(GeoJsonBaseField):
.. code-block:: js
{ "type" : "MultiPoint" ,
"coordinates" : [[x1, y1], [x2, y2]]}
{'type' : 'MultiPoint' ,
'coordinates' : [[x1, y1], [x2, y2]]}
You can either pass a dict with the full information or a list
to set the value.
@@ -1997,7 +2001,7 @@ class MultiPointField(GeoJsonBaseField):
.. versionadded:: 0.9
"""
_type = "MultiPoint"
_type = 'MultiPoint'
class MultiLineStringField(GeoJsonBaseField):
@@ -2007,9 +2011,9 @@ class MultiLineStringField(GeoJsonBaseField):
.. code-block:: js
{ "type" : "MultiLineString" ,
"coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]],
[[x1, y1], [x1, y1] ... [xn, yn]]]}
{'type' : 'MultiLineString' ,
'coordinates' : [[[x1, y1], [x1, y1] ... [xn, yn]],
[[x1, y1], [x1, y1] ... [xn, yn]]]}
You can either pass a dict with the full information or a list of points.
@@ -2017,7 +2021,7 @@ class MultiLineStringField(GeoJsonBaseField):
.. versionadded:: 0.9
"""
_type = "MultiLineString"
_type = 'MultiLineString'
class MultiPolygonField(GeoJsonBaseField):
@@ -2027,14 +2031,14 @@ class MultiPolygonField(GeoJsonBaseField):
.. code-block:: js
{ "type" : "MultiPolygon" ,
"coordinates" : [[
[[x1, y1], [x1, y1] ... [xn, yn]],
[[x1, y1], [x1, y1] ... [xn, yn]]
], [
[[x1, y1], [x1, y1] ... [xn, yn]],
[[x1, y1], [x1, y1] ... [xn, yn]]
]
{'type' : 'MultiPolygon' ,
'coordinates' : [[
[[x1, y1], [x1, y1] ... [xn, yn]],
[[x1, y1], [x1, y1] ... [xn, yn]]
], [
[[x1, y1], [x1, y1] ... [xn, yn]],
[[x1, y1], [x1, y1] ... [xn, yn]]
]
}
You can either pass a dict with the full information or a list
@@ -2044,4 +2048,4 @@ class MultiPolygonField(GeoJsonBaseField):
.. versionadded:: 0.9
"""
_type = "MultiPolygon"
_type = 'MultiPolygon'

View File

@@ -1,50 +1,25 @@
"""Helper functions and types to aid with Python 2.6 - 3 support."""
import sys
import warnings
"""
Helper functions, constants, and types to aid with Python v2.7 - v3.x and
PyMongo v2.7 - v3.x support.
"""
import pymongo
import six
# Show a deprecation warning for people using Python v2.6
# TODO remove in mongoengine v0.11.0
if sys.version_info[0] == 2 and sys.version_info[1] == 6:
warnings.warn(
'Python v2.6 support is deprecated and is going to be dropped '
'entirely in the upcoming v0.11.0 release. Update your Python '
'version if you want to have access to the latest features and '
'bug fixes in MongoEngine.',
DeprecationWarning
)
if pymongo.version_tuple[0] < 3:
IS_PYMONGO_3 = False
else:
IS_PYMONGO_3 = True
PY3 = sys.version_info[0] == 3
if PY3:
import codecs
from io import BytesIO as StringIO
# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3.
StringIO = six.BytesIO
# return s converted to binary. b('test') should be equivalent to b'test'
def b(s):
return codecs.latin_1_encode(s)[0]
bin_type = bytes
txt_type = str
else:
# Additionally for Py2, try to use the faster cStringIO, if available
if not six.PY3:
try:
from cStringIO import StringIO
import cStringIO
except ImportError:
from StringIO import StringIO
# Conversion to binary only necessary in Python 3
def b(s):
return s
bin_type = str
txt_type = unicode
str_types = (bin_type, txt_type)
pass
else:
StringIO = cStringIO.StringIO

View File

@@ -1,11 +1,17 @@
from mongoengine.errors import (DoesNotExist, InvalidQueryError,
MultipleObjectsReturned, NotUniqueError,
OperationError)
from mongoengine.errors import *
from mongoengine.queryset.field_list import *
from mongoengine.queryset.manager import *
from mongoengine.queryset.queryset import *
from mongoengine.queryset.transform import *
from mongoengine.queryset.visitor import *
__all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ +
transform.__all__ + visitor.__all__)
# Expose just the public subset of all imported objects and constants.
__all__ = (
'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager',
'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL',
# Errors that might be related to a queryset, mostly here for backward
# compatibility
'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned',
'NotUniqueError', 'OperationError',
)

View File

@@ -12,9 +12,10 @@ from bson.code import Code
import pymongo
import pymongo.errors
from pymongo.common import validate_read_preference
import six
from mongoengine import signals
from mongoengine.base.common import get_document
from mongoengine.base import get_document
from mongoengine.common import _import_class
from mongoengine.connection import get_db
from mongoengine.context_managers import switch_db
@@ -73,10 +74,10 @@ class BaseQuerySet(object):
# subclasses of the class being used
if document._meta.get('allow_inheritance') is True:
if len(self._document._subclasses) == 1:
self._initial_query = {"_cls": self._document._subclasses[0]}
self._initial_query = {'_cls': self._document._subclasses[0]}
else:
self._initial_query = {
"_cls": {"$in": self._document._subclasses}}
'_cls': {'$in': self._document._subclasses}}
self._loaded_fields = QueryFieldList(always_include=['_cls'])
self._cursor_obj = None
self._limit = None
@@ -105,8 +106,8 @@ class BaseQuerySet(object):
if q_obj:
# make sure proper query object is passed
if not isinstance(q_obj, QNode):
msg = ("Not a query object: %s. "
"Did you intend to use key=value?" % q_obj)
msg = ('Not a query object: %s. '
'Did you intend to use key=value?' % q_obj)
raise InvalidQueryError(msg)
query &= q_obj
@@ -133,10 +134,10 @@ class BaseQuerySet(object):
obj_dict = self.__dict__.copy()
# don't picke collection, instead pickle collection params
obj_dict.pop("_collection_obj")
obj_dict.pop('_collection_obj')
# don't pickle cursor
obj_dict["_cursor_obj"] = None
obj_dict['_cursor_obj'] = None
return obj_dict
@@ -147,7 +148,7 @@ class BaseQuerySet(object):
See https://github.com/MongoEngine/mongoengine/issues/442
"""
obj_dict["_collection_obj"] = obj_dict["_document"]._get_collection()
obj_dict['_collection_obj'] = obj_dict['_document']._get_collection()
# update attributes
self.__dict__.update(obj_dict)
@@ -166,7 +167,7 @@ class BaseQuerySet(object):
queryset._skip, queryset._limit = key.start, key.stop
if key.start and key.stop:
queryset._limit = key.stop - key.start
except IndexError, err:
except IndexError as err:
# PyMongo raises an error if key.start == key.stop, catch it,
# bin it, kill it.
start = key.start or 0
@@ -199,19 +200,16 @@ class BaseQuerySet(object):
raise NotImplementedError
def _has_data(self):
""" Retrieves whether cursor has any data. """
"""Return True if cursor has any data."""
queryset = self.order_by()
return False if queryset.first() is None else True
def __nonzero__(self):
""" Avoid to open all records in an if stmt in Py2. """
"""Avoid to open all records in an if stmt in Py2."""
return self._has_data()
def __bool__(self):
""" Avoid to open all records in an if stmt in Py3. """
"""Avoid to open all records in an if stmt in Py3."""
return self._has_data()
# Core functions
@@ -239,7 +237,7 @@ class BaseQuerySet(object):
queryset = self.clone()
if queryset._search_text:
raise OperationError(
"It is not possible to use search_text two times.")
'It is not possible to use search_text two times.')
query_kwargs = SON({'$search': text})
if language:
@@ -268,7 +266,7 @@ class BaseQuerySet(object):
try:
result = queryset.next()
except StopIteration:
msg = ("%s matching query does not exist."
msg = ('%s matching query does not exist.'
% queryset._document._class_name)
raise queryset._document.DoesNotExist(msg)
try:
@@ -290,8 +288,7 @@ class BaseQuerySet(object):
return self._document(**kwargs).save()
def first(self):
"""Retrieve the first object matching the query.
"""
"""Retrieve the first object matching the query."""
queryset = self.clone()
try:
result = queryset[0]
@@ -340,7 +337,7 @@ class BaseQuerySet(object):
% str(self._document))
raise OperationError(msg)
if doc.pk and not doc._created:
msg = "Some documents have ObjectIds use doc.update() instead"
msg = 'Some documents have ObjectIds use doc.update() instead'
raise OperationError(msg)
signal_kwargs = signal_kwargs or {}
@@ -350,17 +347,17 @@ class BaseQuerySet(object):
raw = [doc.to_mongo() for doc in docs]
try:
ids = self._collection.insert(raw, **write_concern)
except pymongo.errors.DuplicateKeyError, err:
except pymongo.errors.DuplicateKeyError as err:
message = 'Could not save document (%s)'
raise NotUniqueError(message % unicode(err))
except pymongo.errors.OperationFailure, err:
raise NotUniqueError(message % six.text_type(err))
except pymongo.errors.OperationFailure as err:
message = 'Could not save document (%s)'
if re.match('^E1100[01] duplicate key', unicode(err)):
if re.match('^E1100[01] duplicate key', six.text_type(err)):
# E11000 - duplicate key error index
# E11001 - duplicate key on update
message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % unicode(err))
raise OperationError(message % unicode(err))
raise NotUniqueError(message % six.text_type(err))
raise OperationError(message % six.text_type(err))
if not load_bulk:
signals.post_bulk_insert.send(
@@ -386,7 +383,8 @@ class BaseQuerySet(object):
return 0
return self._cursor.count(with_limit_and_skip=with_limit_and_skip)
def delete(self, write_concern=None, _from_doc_delete=False, cascade_refs=None):
def delete(self, write_concern=None, _from_doc_delete=False,
cascade_refs=None):
"""Delete the documents matched by the query.
:param write_concern: Extra keyword arguments are passed down which
@@ -409,8 +407,9 @@ class BaseQuerySet(object):
# Handle deletes where skips or limits have been applied or
# there is an untriggered delete signal
has_delete_signal = signals.signals_available and (
signals.pre_delete.has_receivers_for(self._document) or
signals.post_delete.has_receivers_for(self._document))
signals.pre_delete.has_receivers_for(doc) or
signals.post_delete.has_receivers_for(doc)
)
call_document_delete = (queryset._skip or queryset._limit or
has_delete_signal) and not _from_doc_delete
@@ -423,37 +422,44 @@ class BaseQuerySet(object):
return cnt
delete_rules = doc._meta.get('delete_rules') or {}
delete_rules = list(delete_rules.items())
# Check for DENY rules before actually deleting/nullifying any other
# references
for rule_entry in delete_rules:
for rule_entry, rule in delete_rules:
document_cls, field_name = rule_entry
if document_cls._meta.get('abstract'):
continue
rule = doc._meta['delete_rules'][rule_entry]
if rule == DENY and document_cls.objects(
**{field_name + '__in': self}).count() > 0:
msg = ("Could not delete document (%s.%s refers to it)"
% (document_cls.__name__, field_name))
raise OperationError(msg)
for rule_entry in delete_rules:
if rule == DENY:
refs = document_cls.objects(**{field_name + '__in': self})
if refs.limit(1).count() > 0:
raise OperationError(
'Could not delete document (%s.%s refers to it)'
% (document_cls.__name__, field_name)
)
# Check all the other rules
for rule_entry, rule in delete_rules:
document_cls, field_name = rule_entry
if document_cls._meta.get('abstract'):
continue
rule = doc._meta['delete_rules'][rule_entry]
if rule == CASCADE:
cascade_refs = set() if cascade_refs is None else cascade_refs
# Handle recursive reference
if doc._collection == document_cls._collection:
for ref in queryset:
cascade_refs.add(ref.id)
ref_q = document_cls.objects(**{field_name + '__in': self, 'pk__nin': cascade_refs})
ref_q_count = ref_q.count()
if ref_q_count > 0:
ref_q.delete(write_concern=write_concern, cascade_refs=cascade_refs)
refs = document_cls.objects(**{field_name + '__in': self,
'pk__nin': cascade_refs})
if refs.count() > 0:
refs.delete(write_concern=write_concern,
cascade_refs=cascade_refs)
elif rule == NULLIFY:
document_cls.objects(**{field_name + '__in': self}).update(
write_concern=write_concern, **{'unset__%s' % field_name: 1})
write_concern=write_concern,
**{'unset__%s' % field_name: 1})
elif rule == PULL:
document_cls.objects(**{field_name + '__in': self}).update(
write_concern=write_concern,
@@ -461,7 +467,7 @@ class BaseQuerySet(object):
result = queryset._collection.remove(queryset._query, **write_concern)
if result:
return result.get("n")
return result.get('n')
def update(self, upsert=False, multi=True, write_concern=None,
full_result=False, **update):
@@ -482,7 +488,7 @@ class BaseQuerySet(object):
.. versionadded:: 0.2
"""
if not update and not upsert:
raise OperationError("No update parameters, would remove data")
raise OperationError('No update parameters, would remove data')
if write_concern is None:
write_concern = {}
@@ -495,9 +501,9 @@ class BaseQuerySet(object):
# then ensure we add _cls to the update operation
if upsert and '_cls' in query:
if '$set' in update:
update["$set"]["_cls"] = queryset._document._class_name
update['$set']['_cls'] = queryset._document._class_name
else:
update["$set"] = {"_cls": queryset._document._class_name}
update['$set'] = {'_cls': queryset._document._class_name}
try:
result = queryset._collection.update(query, update, multi=multi,
upsert=upsert, **write_concern)
@@ -505,13 +511,13 @@ class BaseQuerySet(object):
return result
elif result:
return result['n']
except pymongo.errors.DuplicateKeyError, err:
raise NotUniqueError(u'Update failed (%s)' % unicode(err))
except pymongo.errors.OperationFailure, err:
if unicode(err) == u'multi not coded yet':
except pymongo.errors.DuplicateKeyError as err:
raise NotUniqueError(u'Update failed (%s)' % six.text_type(err))
except pymongo.errors.OperationFailure as err:
if six.text_type(err) == u'multi not coded yet':
message = u'update() method requires MongoDB 1.1.3+'
raise OperationError(message)
raise OperationError(u'Update failed (%s)' % unicode(err))
raise OperationError(u'Update failed (%s)' % six.text_type(err))
def upsert_one(self, write_concern=None, **update):
"""Overwrite or add the first document matched by the query.
@@ -582,11 +588,11 @@ class BaseQuerySet(object):
"""
if remove and new:
raise OperationError("Conflicting parameters: remove and new")
raise OperationError('Conflicting parameters: remove and new')
if not update and not upsert and not remove:
raise OperationError(
"No update parameters, must either update or remove")
'No update parameters, must either update or remove')
queryset = self.clone()
query = queryset._query
@@ -597,7 +603,7 @@ class BaseQuerySet(object):
try:
if IS_PYMONGO_3:
if full_response:
msg = "With PyMongo 3+, it is not possible anymore to get the full response."
msg = 'With PyMongo 3+, it is not possible anymore to get the full response.'
warnings.warn(msg, DeprecationWarning)
if remove:
result = queryset._collection.find_one_and_delete(
@@ -615,14 +621,14 @@ class BaseQuerySet(object):
result = queryset._collection.find_and_modify(
query, update, upsert=upsert, sort=sort, remove=remove, new=new,
full_response=full_response, **self._cursor_args)
except pymongo.errors.DuplicateKeyError, err:
raise NotUniqueError(u"Update failed (%s)" % err)
except pymongo.errors.OperationFailure, err:
raise OperationError(u"Update failed (%s)" % err)
except pymongo.errors.DuplicateKeyError as err:
raise NotUniqueError(u'Update failed (%s)' % err)
except pymongo.errors.OperationFailure as err:
raise OperationError(u'Update failed (%s)' % err)
if full_response:
if result["value"] is not None:
result["value"] = self._document._from_son(result["value"], only_fields=self.only_fields)
if result['value'] is not None:
result['value'] = self._document._from_son(result['value'], only_fields=self.only_fields)
else:
if result is not None:
result = self._document._from_son(result, only_fields=self.only_fields)
@@ -640,7 +646,7 @@ class BaseQuerySet(object):
"""
queryset = self.clone()
if not queryset._query_obj.empty:
msg = "Cannot use a filter whilst using `with_id`"
msg = 'Cannot use a filter whilst using `with_id`'
raise InvalidQueryError(msg)
return queryset.filter(pk=object_id).first()
@@ -684,7 +690,7 @@ class BaseQuerySet(object):
Only return instances of this document and not any inherited documents
"""
if self._document._meta.get('allow_inheritance') is True:
self._initial_query = {"_cls": self._document._class_name}
self._initial_query = {'_cls': self._document._class_name}
return self
@@ -810,49 +816,56 @@ class BaseQuerySet(object):
.. versionchanged:: 0.6 - Improved db_field refrence handling
"""
queryset = self.clone()
try:
field = self._fields_to_dbfields([field]).pop()
finally:
distinct = self._dereference(queryset._cursor.distinct(field), 1,
name=field, instance=self._document)
except LookUpError:
pass
doc_field = self._document._fields.get(field.split('.', 1)[0])
instance = False
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
ListField = _import_class('ListField')
GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField')
if isinstance(doc_field, ListField):
doc_field = getattr(doc_field, "field", doc_field)
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
instance = getattr(doc_field, "document_type", False)
# handle distinct on subdocuments
if '.' in field:
for field_part in field.split('.')[1:]:
# if looping on embedded document, get the document type instance
if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
doc_field = instance
# now get the subdocument
doc_field = getattr(doc_field, field_part, doc_field)
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
if isinstance(doc_field, ListField):
doc_field = getattr(doc_field, "field", doc_field)
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
instance = getattr(doc_field, "document_type", False)
if instance and isinstance(doc_field, (EmbeddedDocumentField,
GenericEmbeddedDocumentField)):
distinct = [instance(**doc) for doc in distinct]
return distinct
distinct = self._dereference(queryset._cursor.distinct(field), 1,
name=field, instance=self._document)
doc_field = self._document._fields.get(field.split('.', 1)[0])
instance = None
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
ListField = _import_class('ListField')
GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField')
if isinstance(doc_field, ListField):
doc_field = getattr(doc_field, 'field', doc_field)
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
instance = getattr(doc_field, 'document_type', None)
# handle distinct on subdocuments
if '.' in field:
for field_part in field.split('.')[1:]:
# if looping on embedded document, get the document type instance
if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
doc_field = instance
# now get the subdocument
doc_field = getattr(doc_field, field_part, doc_field)
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
if isinstance(doc_field, ListField):
doc_field = getattr(doc_field, 'field', doc_field)
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
instance = getattr(doc_field, 'document_type', None)
if instance and isinstance(doc_field, (EmbeddedDocumentField,
GenericEmbeddedDocumentField)):
distinct = [instance(**doc) for doc in distinct]
return distinct
def only(self, *fields):
"""Load only a subset of this document's fields. ::
post = BlogPost.objects(...).only("title", "author.name")
post = BlogPost.objects(...).only('title', 'author.name')
.. note :: `only()` is chainable and will perform a union ::
So with the following it will fetch both: `title` and `author.name`::
post = BlogPost.objects.only("title").only("author.name")
post = BlogPost.objects.only('title').only('author.name')
:func:`~mongoengine.queryset.QuerySet.all_fields` will reset any
field filters.
@@ -862,19 +875,19 @@ class BaseQuerySet(object):
.. versionadded:: 0.3
.. versionchanged:: 0.5 - Added subfield support
"""
fields = dict([(f, QueryFieldList.ONLY) for f in fields])
fields = {f: QueryFieldList.ONLY for f in fields}
self.only_fields = fields.keys()
return self.fields(True, **fields)
def exclude(self, *fields):
"""Opposite to .only(), exclude some document's fields. ::
post = BlogPost.objects(...).exclude("comments")
post = BlogPost.objects(...).exclude('comments')
.. note :: `exclude()` is chainable and will perform a union ::
So with the following it will exclude both: `title` and `author.name`::
post = BlogPost.objects.exclude("title").exclude("author.name")
post = BlogPost.objects.exclude('title').exclude('author.name')
:func:`~mongoengine.queryset.QuerySet.all_fields` will reset any
field filters.
@@ -883,7 +896,7 @@ class BaseQuerySet(object):
.. versionadded:: 0.5
"""
fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields])
fields = {f: QueryFieldList.EXCLUDE for f in fields}
return self.fields(**fields)
def fields(self, _only_called=False, **kwargs):
@@ -904,7 +917,7 @@ class BaseQuerySet(object):
"""
# Check for an operator and transform to mongo-style if there is
operators = ["slice"]
operators = ['slice']
cleaned_fields = []
for key, value in kwargs.items():
parts = key.split('__')
@@ -928,7 +941,7 @@ class BaseQuerySet(object):
"""Include all fields. Reset all previously calls of .only() or
.exclude(). ::
post = BlogPost.objects.exclude("comments").all_fields()
post = BlogPost.objects.exclude('comments').all_fields()
.. versionadded:: 0.5
"""
@@ -955,7 +968,7 @@ class BaseQuerySet(object):
See https://docs.mongodb.com/manual/reference/method/cursor.comment/#cursor.comment
for details.
"""
return self._chainable_method("comment", text)
return self._chainable_method('comment', text)
def explain(self, format=False):
"""Return an explain plan record for the
@@ -964,8 +977,15 @@ class BaseQuerySet(object):
:param format: format the plan before returning it
"""
plan = self._cursor.explain()
# TODO remove this option completely - it's useless. If somebody
# wants to pretty-print the output, they easily can.
if format:
msg = ('"format" param of BaseQuerySet.explain has been '
'deprecated and will be removed in future versions.')
warnings.warn(msg, DeprecationWarning)
plan = pprint.pformat(plan)
return plan
# DEPRECATED. Has no more impact on PyMongo 3+
@@ -978,7 +998,7 @@ class BaseQuerySet(object):
.. deprecated:: Ignored with PyMongo 3+
"""
if IS_PYMONGO_3:
msg = "snapshot is deprecated as it has no impact when using PyMongo 3+."
msg = 'snapshot is deprecated as it has no impact when using PyMongo 3+.'
warnings.warn(msg, DeprecationWarning)
queryset = self.clone()
queryset._snapshot = enabled
@@ -1004,7 +1024,7 @@ class BaseQuerySet(object):
.. deprecated:: Ignored with PyMongo 3+
"""
if IS_PYMONGO_3:
msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+."
msg = 'slave_okay is deprecated as it has no impact when using PyMongo 3+.'
warnings.warn(msg, DeprecationWarning)
queryset = self.clone()
queryset._slave_okay = enabled
@@ -1066,7 +1086,7 @@ class BaseQuerySet(object):
:param ms: the number of milliseconds before killing the query on the server
"""
return self._chainable_method("max_time_ms", ms)
return self._chainable_method('max_time_ms', ms)
# JSON Helpers
@@ -1149,19 +1169,19 @@ class BaseQuerySet(object):
MapReduceDocument = _import_class('MapReduceDocument')
if not hasattr(self._collection, "map_reduce"):
raise NotImplementedError("Requires MongoDB >= 1.7.1")
if not hasattr(self._collection, 'map_reduce'):
raise NotImplementedError('Requires MongoDB >= 1.7.1')
map_f_scope = {}
if isinstance(map_f, Code):
map_f_scope = map_f.scope
map_f = unicode(map_f)
map_f = six.text_type(map_f)
map_f = Code(queryset._sub_js_fields(map_f), map_f_scope)
reduce_f_scope = {}
if isinstance(reduce_f, Code):
reduce_f_scope = reduce_f.scope
reduce_f = unicode(reduce_f)
reduce_f = six.text_type(reduce_f)
reduce_f_code = queryset._sub_js_fields(reduce_f)
reduce_f = Code(reduce_f_code, reduce_f_scope)
@@ -1171,7 +1191,7 @@ class BaseQuerySet(object):
finalize_f_scope = {}
if isinstance(finalize_f, Code):
finalize_f_scope = finalize_f.scope
finalize_f = unicode(finalize_f)
finalize_f = six.text_type(finalize_f)
finalize_f_code = queryset._sub_js_fields(finalize_f)
finalize_f = Code(finalize_f_code, finalize_f_scope)
mr_args['finalize'] = finalize_f
@@ -1187,7 +1207,7 @@ class BaseQuerySet(object):
else:
map_reduce_function = 'map_reduce'
if isinstance(output, basestring):
if isinstance(output, six.string_types):
mr_args['out'] = output
elif isinstance(output, dict):
@@ -1200,7 +1220,7 @@ class BaseQuerySet(object):
break
else:
raise OperationError("actionData not specified for output")
raise OperationError('actionData not specified for output')
db_alias = output.get('db_alias')
remaing_args = ['db', 'sharded', 'nonAtomic']
@@ -1430,7 +1450,7 @@ class BaseQuerySet(object):
# snapshot is not handled at all by PyMongo 3+
# TODO: evaluate similar possibilities using modifiers
if self._snapshot:
msg = "The snapshot option is not anymore available with PyMongo 3+"
msg = 'The snapshot option is not anymore available with PyMongo 3+'
warnings.warn(msg, DeprecationWarning)
cursor_args = {
'no_cursor_timeout': not self._timeout
@@ -1442,7 +1462,7 @@ class BaseQuerySet(object):
if fields_name not in cursor_args:
cursor_args[fields_name] = {}
cursor_args[fields_name]['_text_score'] = {'$meta': "textScore"}
cursor_args[fields_name]['_text_score'] = {'$meta': 'textScore'}
return cursor_args
@@ -1497,8 +1517,8 @@ class BaseQuerySet(object):
if self._mongo_query is None:
self._mongo_query = self._query_obj.to_query(self._document)
if self._class_check and self._initial_query:
if "_cls" in self._mongo_query:
self._mongo_query = {"$and": [self._initial_query, self._mongo_query]}
if '_cls' in self._mongo_query:
self._mongo_query = {'$and': [self._initial_query, self._mongo_query]}
else:
self._mongo_query.update(self._initial_query)
return self._mongo_query
@@ -1510,8 +1530,7 @@ class BaseQuerySet(object):
return self.__dereference
def no_dereference(self):
"""Turn off any dereferencing for the results of this queryset.
"""
"""Turn off any dereferencing for the results of this queryset."""
queryset = self.clone()
queryset._auto_dereference = False
return queryset
@@ -1540,7 +1559,7 @@ class BaseQuerySet(object):
emit(null, 1);
}
}
""" % dict(field=field)
""" % {'field': field}
reduce_func = """
function(key, values) {
var total = 0;
@@ -1562,8 +1581,8 @@ class BaseQuerySet(object):
if normalize:
count = sum(frequencies.values())
frequencies = dict([(k, float(v) / count)
for k, v in frequencies.items()])
frequencies = {k: float(v) / count
for k, v in frequencies.items()}
return frequencies
@@ -1615,10 +1634,10 @@ class BaseQuerySet(object):
}
"""
total, data, types = self.exec_js(freq_func, field)
values = dict([(types.get(k), int(v)) for k, v in data.iteritems()])
values = {types.get(k): int(v) for k, v in data.iteritems()}
if normalize:
values = dict([(k, float(v) / total) for k, v in values.items()])
values = {k: float(v) / total for k, v in values.items()}
frequencies = {}
for k, v in values.iteritems():
@@ -1640,14 +1659,14 @@ class BaseQuerySet(object):
for x in document._subclasses][1:]
for field in fields:
try:
field = ".".join(f.db_field for f in
field = '.'.join(f.db_field for f in
document._lookup_field(field.split('.')))
ret.append(field)
except LookUpError, err:
except LookUpError as err:
found = False
for subdoc in subclasses:
try:
subfield = ".".join(f.db_field for f in
subfield = '.'.join(f.db_field for f in
subdoc._lookup_field(field.split('.')))
ret.append(subfield)
found = True
@@ -1660,15 +1679,14 @@ class BaseQuerySet(object):
return ret
def _get_order_by(self, keys):
"""Creates a list of order by fields
"""
"""Creates a list of order by fields"""
key_list = []
for key in keys:
if not key:
continue
if key == '$text_score':
key_list.append(('_text_score', {'$meta': "textScore"}))
key_list.append(('_text_score', {'$meta': 'textScore'}))
continue
direction = pymongo.ASCENDING
@@ -1740,7 +1758,7 @@ class BaseQuerySet(object):
# If we need to coerce types, we need to determine the
# type of this field and use the corresponding
# .to_python(...)
from mongoengine.fields import EmbeddedDocumentField
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
obj = self._document
for chunk in path.split('.'):
@@ -1774,7 +1792,7 @@ class BaseQuerySet(object):
field_name = match.group(1).split('.')
fields = self._document._lookup_field(field_name)
# Substitute the correct name for the field into the javascript
return ".".join([f.db_field for f in fields])
return '.'.join([f.db_field for f in fields])
code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code)
code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub,
@@ -1785,21 +1803,21 @@ class BaseQuerySet(object):
queryset = self.clone()
method = getattr(queryset._cursor, method_name)
method(val)
setattr(queryset, "_" + method_name, val)
setattr(queryset, '_' + method_name, val)
return queryset
# Deprecated
def ensure_index(self, **kwargs):
"""Deprecated use :func:`Document.ensure_index`"""
msg = ("Doc.objects()._ensure_index() is deprecated. "
"Use Doc.ensure_index() instead.")
msg = ('Doc.objects()._ensure_index() is deprecated. '
'Use Doc.ensure_index() instead.')
warnings.warn(msg, DeprecationWarning)
self._document.__class__.ensure_index(**kwargs)
return self
def _ensure_indexes(self):
"""Deprecated use :func:`~Document.ensure_indexes`"""
msg = ("Doc.objects()._ensure_indexes() is deprecated. "
"Use Doc.ensure_indexes() instead.")
msg = ('Doc.objects()._ensure_indexes() is deprecated. '
'Use Doc.ensure_indexes() instead.')
warnings.warn(msg, DeprecationWarning)
self._document.__class__.ensure_indexes()

View File

@@ -67,7 +67,7 @@ class QueryFieldList(object):
return bool(self.fields)
def as_dict(self):
field_list = dict((field, self.value) for field in self.fields)
field_list = {field: self.value for field in self.fields}
if self.slice:
field_list.update(self.slice)
if self._id is not None:

View File

@@ -53,15 +53,14 @@ class QuerySet(BaseQuerySet):
return self._len
def __repr__(self):
"""Provides the string representation of the QuerySet
"""
"""Provide a string representation of the QuerySet"""
if self._iter:
return '.. queryset mid-iteration ..'
self._populate_cache()
data = self._result_cache[:REPR_OUTPUT_SIZE + 1]
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
data[-1] = '...(remaining elements truncated)...'
return repr(data)
def _iter_results(self):
@@ -113,7 +112,7 @@ class QuerySet(BaseQuerySet):
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
# the result cache.
try:
for i in xrange(ITER_CHUNK_SIZE):
for _ in xrange(ITER_CHUNK_SIZE):
self._result_cache.append(self.next())
except StopIteration:
# Getting this exception means there are no more docs in the
@@ -142,7 +141,7 @@ class QuerySet(BaseQuerySet):
.. versionadded:: 0.8.3 Convert to non caching queryset
"""
if self._result_cache is not None:
raise OperationError("QuerySet already cached")
raise OperationError('QuerySet already cached')
return self.clone_into(QuerySetNoCache(self._document, self._collection))
@@ -165,13 +164,14 @@ class QuerySetNoCache(BaseQuerySet):
return '.. queryset mid-iteration ..'
data = []
for i in xrange(REPR_OUTPUT_SIZE + 1):
for _ in xrange(REPR_OUTPUT_SIZE + 1):
try:
data.append(self.next())
except StopIteration:
break
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
data[-1] = '...(remaining elements truncated)...'
self.rewind()
return repr(data)

View File

@@ -3,8 +3,9 @@ from collections import defaultdict
from bson import ObjectId, SON
from bson.dbref import DBRef
import pymongo
import six
from mongoengine.base.fields import UPDATE_OPERATORS
from mongoengine.base import UPDATE_OPERATORS
from mongoengine.common import _import_class
from mongoengine.connection import get_connection
from mongoengine.errors import InvalidQueryError
@@ -29,12 +30,11 @@ MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
# TODO make this less complex
def query(_doc_cls=None, **kwargs):
"""Transform a query from Django-style format to Mongo format.
"""
"""Transform a query from Django-style format to Mongo format."""
mongo_query = {}
merge_query = defaultdict(list)
for key, value in sorted(kwargs.items()):
if key == "__raw__":
if key == '__raw__':
mongo_query.update(value)
continue
@@ -47,7 +47,7 @@ def query(_doc_cls=None, **kwargs):
op = parts.pop()
# Allow to escape operator-like field name by __
if len(parts) > 1 and parts[-1] == "":
if len(parts) > 1 and parts[-1] == '':
parts.pop()
negate = False
@@ -59,7 +59,7 @@ def query(_doc_cls=None, **kwargs):
# Switch field names to proper names [set in Field(name='foo')]
try:
fields = _doc_cls._lookup_field(parts)
except Exception, e:
except Exception as e:
raise InvalidQueryError(e)
parts = []
@@ -69,7 +69,7 @@ def query(_doc_cls=None, **kwargs):
cleaned_fields = []
for field in fields:
append_field = True
if isinstance(field, basestring):
if isinstance(field, six.string_types):
parts.append(field)
append_field = False
# is last and CachedReferenceField
@@ -87,9 +87,9 @@ def query(_doc_cls=None, **kwargs):
singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not']
singular_ops += STRING_OPERATORS
if op in singular_ops:
if isinstance(field, basestring):
if isinstance(field, six.string_types):
if (op in STRING_OPERATORS and
isinstance(value, basestring)):
isinstance(value, six.string_types)):
StringField = _import_class('StringField')
value = StringField.prepare_query_value(op, value)
else:
@@ -129,10 +129,10 @@ def query(_doc_cls=None, **kwargs):
value = query(field.field.document_type, **value)
else:
value = field.prepare_query_value(op, value)
value = {"$elemMatch": value}
value = {'$elemMatch': value}
elif op in CUSTOM_OPERATORS:
NotImplementedError("Custom method '%s' has not "
"been implemented" % op)
NotImplementedError('Custom method "%s" has not '
'been implemented' % op)
elif op not in STRING_OPERATORS:
value = {'$' + op: value}
@@ -197,15 +197,16 @@ def query(_doc_cls=None, **kwargs):
def update(_doc_cls=None, **update):
"""Transform an update spec from Django-style format to Mongo format.
"""Transform an update spec from Django-style format to Mongo
format.
"""
mongo_update = {}
for key, value in update.items():
if key == "__raw__":
if key == '__raw__':
mongo_update.update(value)
continue
parts = key.split('__')
# if there is no operator, default to "set"
# if there is no operator, default to 'set'
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
parts.insert(0, 'set')
# Check for an operator and transform to mongo-style if there is
@@ -224,21 +225,21 @@ def update(_doc_cls=None, **update):
elif op == 'add_to_set':
op = 'addToSet'
elif op == 'set_on_insert':
op = "setOnInsert"
op = 'setOnInsert'
match = None
if parts[-1] in COMPARISON_OPERATORS:
match = parts.pop()
# Allow to escape operator-like field name by __
if len(parts) > 1 and parts[-1] == "":
if len(parts) > 1 and parts[-1] == '':
parts.pop()
if _doc_cls:
# Switch field names to proper names [set in Field(name='foo')]
try:
fields = _doc_cls._lookup_field(parts)
except Exception, e:
except Exception as e:
raise InvalidQueryError(e)
parts = []
@@ -246,7 +247,7 @@ def update(_doc_cls=None, **update):
appended_sub_field = False
for field in fields:
append_field = True
if isinstance(field, basestring):
if isinstance(field, six.string_types):
# Convert the S operator to $
if field == 'S':
field = '$'
@@ -267,7 +268,7 @@ def update(_doc_cls=None, **update):
else:
field = cleaned_fields[-1]
GeoJsonBaseField = _import_class("GeoJsonBaseField")
GeoJsonBaseField = _import_class('GeoJsonBaseField')
if isinstance(field, GeoJsonBaseField):
value = field.to_mongo(value)
@@ -281,7 +282,7 @@ def update(_doc_cls=None, **update):
value = [field.prepare_query_value(op, v) for v in value]
elif field.required or value is not None:
value = field.prepare_query_value(op, value)
elif op == "unset":
elif op == 'unset':
value = 1
if match:
@@ -291,16 +292,16 @@ def update(_doc_cls=None, **update):
key = '.'.join(parts)
if not op:
raise InvalidQueryError("Updates must supply an operation "
"eg: set__FIELD=value")
raise InvalidQueryError('Updates must supply an operation '
'eg: set__FIELD=value')
if 'pull' in op and '.' in key:
# Dot operators don't work on pull operations
# unless they point to a list field
# Otherwise it uses nested dict syntax
if op == 'pullAll':
raise InvalidQueryError("pullAll operations only support "
"a single field depth")
raise InvalidQueryError('pullAll operations only support '
'a single field depth')
# Look for the last list field and use dot notation until there
field_classes = [c.__class__ for c in cleaned_fields]
@@ -311,7 +312,7 @@ def update(_doc_cls=None, **update):
# Then process as normal
last_listField = len(
cleaned_fields) - field_classes.index(ListField)
key = ".".join(parts[:last_listField])
key = '.'.join(parts[:last_listField])
parts = parts[last_listField:]
parts.insert(0, key)
@@ -319,7 +320,7 @@ def update(_doc_cls=None, **update):
for key in parts:
value = {key: value}
elif op == 'addToSet' and isinstance(value, list):
value = {key: {"$each": value}}
value = {key: {'$each': value}}
else:
value = {key: value}
key = '$' + op
@@ -333,78 +334,82 @@ def update(_doc_cls=None, **update):
def _geo_operator(field, op, value):
"""Helper to return the query for a given geo query"""
if op == "max_distance":
"""Helper to return the query for a given geo query."""
if op == 'max_distance':
value = {'$maxDistance': value}
elif op == "min_distance":
elif op == 'min_distance':
value = {'$minDistance': value}
elif field._geo_index == pymongo.GEO2D:
if op == "within_distance":
if op == 'within_distance':
value = {'$within': {'$center': value}}
elif op == "within_spherical_distance":
elif op == 'within_spherical_distance':
value = {'$within': {'$centerSphere': value}}
elif op == "within_polygon":
elif op == 'within_polygon':
value = {'$within': {'$polygon': value}}
elif op == "near":
elif op == 'near':
value = {'$near': value}
elif op == "near_sphere":
elif op == 'near_sphere':
value = {'$nearSphere': value}
elif op == 'within_box':
value = {'$within': {'$box': value}}
else:
raise NotImplementedError("Geo method '%s' has not "
"been implemented for a GeoPointField" % op)
raise NotImplementedError('Geo method "%s" has not been '
'implemented for a GeoPointField' % op)
else:
if op == "geo_within":
value = {"$geoWithin": _infer_geometry(value)}
elif op == "geo_within_box":
value = {"$geoWithin": {"$box": value}}
elif op == "geo_within_polygon":
value = {"$geoWithin": {"$polygon": value}}
elif op == "geo_within_center":
value = {"$geoWithin": {"$center": value}}
elif op == "geo_within_sphere":
value = {"$geoWithin": {"$centerSphere": value}}
elif op == "geo_intersects":
value = {"$geoIntersects": _infer_geometry(value)}
elif op == "near":
if op == 'geo_within':
value = {'$geoWithin': _infer_geometry(value)}
elif op == 'geo_within_box':
value = {'$geoWithin': {'$box': value}}
elif op == 'geo_within_polygon':
value = {'$geoWithin': {'$polygon': value}}
elif op == 'geo_within_center':
value = {'$geoWithin': {'$center': value}}
elif op == 'geo_within_sphere':
value = {'$geoWithin': {'$centerSphere': value}}
elif op == 'geo_intersects':
value = {'$geoIntersects': _infer_geometry(value)}
elif op == 'near':
value = {'$near': _infer_geometry(value)}
else:
raise NotImplementedError("Geo method '%s' has not "
"been implemented for a %s " % (op, field._name))
raise NotImplementedError(
'Geo method "%s" has not been implemented for a %s '
% (op, field._name)
)
return value
def _infer_geometry(value):
"""Helper method that tries to infer the $geometry shape for a given value"""
"""Helper method that tries to infer the $geometry shape for a
given value.
"""
if isinstance(value, dict):
if "$geometry" in value:
if '$geometry' in value:
return value
elif 'coordinates' in value and 'type' in value:
return {"$geometry": value}
raise InvalidQueryError("Invalid $geometry dictionary should have "
"type and coordinates keys")
return {'$geometry': value}
raise InvalidQueryError('Invalid $geometry dictionary should have '
'type and coordinates keys')
elif isinstance(value, (list, set)):
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
# TODO: should both TypeError and IndexError be alike interpreted?
try:
value[0][0][0]
return {"$geometry": {"type": "Polygon", "coordinates": value}}
return {'$geometry': {'type': 'Polygon', 'coordinates': value}}
except (TypeError, IndexError):
pass
try:
value[0][0]
return {"$geometry": {"type": "LineString", "coordinates": value}}
return {'$geometry': {'type': 'LineString', 'coordinates': value}}
except (TypeError, IndexError):
pass
try:
value[0]
return {"$geometry": {"type": "Point", "coordinates": value}}
return {'$geometry': {'type': 'Point', 'coordinates': value}}
except (TypeError, IndexError):
pass
raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary "
"or (nested) lists of coordinate(s)")
raise InvalidQueryError('Invalid $geometry data. Can be either a '
'dictionary or (nested) lists of coordinate(s)')

View File

@@ -69,9 +69,9 @@ class QueryCompilerVisitor(QNodeVisitor):
self.document = document
def visit_combination(self, combination):
operator = "$and"
operator = '$and'
if combination.operation == combination.OR:
operator = "$or"
operator = '$or'
return {operator: combination.children}
def visit_query(self, query):
@@ -79,8 +79,7 @@ class QueryCompilerVisitor(QNodeVisitor):
class QNode(object):
"""Base class for nodes in query trees.
"""
"""Base class for nodes in query trees."""
AND = 0
OR = 1
@@ -94,7 +93,8 @@ class QNode(object):
raise NotImplementedError
def _combine(self, other, operation):
"""Combine this node with another node into a QCombination object.
"""Combine this node with another node into a QCombination
object.
"""
if getattr(other, 'empty', True):
return self
@@ -116,8 +116,8 @@ class QNode(object):
class QCombination(QNode):
"""Represents the combination of several conditions by a given logical
operator.
"""Represents the combination of several conditions by a given
logical operator.
"""
def __init__(self, operation, children):

View File

@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
__all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
'post_save', 'pre_delete', 'post_delete']
__all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
'post_save', 'pre_delete', 'post_delete')
signals_available = False
try:
@@ -34,6 +32,7 @@ except ImportError:
temporarily_connected_to = _fail
del _fail
# the namespace for code signals. If you are not mongoengine code, do
# not put signals in here. Create your own namespace instead.
_signals = Namespace()