Merge remote-tracking branch 'MongoEngine/master'

This commit is contained in:
Breeze.kay
2015-06-19 11:14:51 +08:00
19 changed files with 765 additions and 184 deletions

View File

@@ -1,4 +1,4 @@
import warnings
import pymongo
import re
@@ -17,6 +17,7 @@ from mongoengine.base import (
get_document
)
from mongoengine.errors import InvalidQueryError, InvalidDocumentError
from mongoengine.python_support import IS_PYMONGO_3
from mongoengine.queryset import (OperationError, NotUniqueError,
QuerySet, transform)
from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME
@@ -113,9 +114,11 @@ class Document(BaseDocument):
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
dictionary. :attr:`max_documents` is the maximum number of documents that
is allowed to be stored in the collection, and :attr:`max_size` is the
maximum size of the collection in bytes. If :attr:`max_size` is not
maximum size of the collection in bytes. :attr:`max_size` is rounded up
to the next multiple of 256 by MongoDB internally and mongoengine before.
Use also a multiple of 256 to avoid confusions. If :attr:`max_size` is not
specified and :attr:`max_documents` is, :attr:`max_size` defaults to
10000000 bytes (10MB).
10485760 bytes (10MB).
Indexes may be created by specifying :attr:`indexes` in the :attr:`meta`
dictionary. The value should be a list of field names or tuples of field
@@ -136,7 +139,7 @@ class Document(BaseDocument):
By default, any extra attribute existing in stored data but not declared
in your model will raise a :class:`~mongoengine.FieldDoesNotExist` error.
This can be disabled by setting :attr:`strict` to ``False``
in the :attr:`meta` dictionnary.
in the :attr:`meta` dictionary.
"""
# The __metaclass__ attribute is removed by 2to3 when running with Python3
@@ -144,13 +147,15 @@ class Document(BaseDocument):
my_metaclass = TopLevelDocumentMetaclass
__metaclass__ = TopLevelDocumentMetaclass
__slots__ = ('__objects')
__slots__ = ('__objects',)
def pk():
"""Primary key alias
"""
def fget(self):
if 'id_field' not in self._meta:
return None
return getattr(self, self._meta['id_field'])
def fset(self, value):
@@ -171,10 +176,13 @@ class Document(BaseDocument):
db = cls._get_db()
collection_name = cls._get_collection_name()
# Create collection as a capped collection if specified
if cls._meta['max_size'] or cls._meta['max_documents']:
if cls._meta.get('max_size') or cls._meta.get('max_documents'):
# Get max document limit and max byte size from meta
max_size = cls._meta['max_size'] or 10000000 # 10MB default
max_documents = cls._meta['max_documents']
max_size = cls._meta.get('max_size') or 10 * 2 ** 20 # 10MB default
max_documents = cls._meta.get('max_documents')
# Round up to next 256 bytes as MongoDB would do it to avoid exception
if max_size % 256:
max_size = (max_size // 256 + 1) * 256
if collection_name in db.collection_names():
cls._collection = db[collection_name]
@@ -265,7 +273,8 @@ class Document(BaseDocument):
to cascading saves. Implies ``cascade=True``.
:param _refs: A list of processed references used in cascading saves
:param save_condition: only perform save if matching record in db
satisfies condition(s) (e.g., version number)
satisfies condition(s) (e.g. version number).
Raises :class:`OperationError` if the conditions are not satisfied
.. versionchanged:: 0.5
In existing documents it only saves changed fields using
@@ -283,6 +292,8 @@ class Document(BaseDocument):
.. versionchanged:: 0.8.5
Optional save_condition that only overwrites existing documents
if the condition is satisfied in the current db record.
.. versionchanged:: 0.10
:class:`OperationError` exception raised if save_condition fails.
"""
signals.pre_save.send(self.__class__, document=self)
@@ -347,6 +358,9 @@ class Document(BaseDocument):
upsert = save_condition is None
last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern)
if not upsert and last_error['nModified'] == 0:
raise OperationError('Race condition preventing'
' document update detected')
created = is_new_object(last_error)
if cascade is None:
@@ -635,23 +649,51 @@ class Document(BaseDocument):
db = cls._get_db()
db.drop_collection(cls._get_collection_name())
@classmethod
def create_index(cls, keys, background=False, **kwargs):
"""Creates the given indexes if required.
:param keys: a single index key or a list of index keys (to
construct a multi-field index); keys may be prefixed with a **+**
or a **-** to determine the index ordering
:param background: Allows index creation in the background
"""
index_spec = cls._build_index_spec(keys)
index_spec = index_spec.copy()
fields = index_spec.pop('fields')
drop_dups = kwargs.get('drop_dups', False)
if IS_PYMONGO_3 and drop_dups:
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
warnings.warn(msg, DeprecationWarning)
elif not IS_PYMONGO_3:
index_spec['drop_dups'] = drop_dups
index_spec['background'] = background
index_spec.update(kwargs)
if IS_PYMONGO_3:
return cls._get_collection().create_index(fields, **index_spec)
else:
return cls._get_collection().ensure_index(fields, **index_spec)
@classmethod
def ensure_index(cls, key_or_list, drop_dups=False, background=False,
**kwargs):
"""Ensure that the given indexes are in place.
"""Ensure that the given indexes are in place. Deprecated in favour
of create_index.
:param key_or_list: a single index key or a list of index keys (to
construct a multi-field index); keys may be prefixed with a **+**
or a **-** to determine the index ordering
:param background: Allows index creation in the background
:param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value
will be removed if PyMongo3+ is used
"""
index_spec = cls._build_index_spec(key_or_list)
index_spec = index_spec.copy()
fields = index_spec.pop('fields')
index_spec['drop_dups'] = drop_dups
index_spec['background'] = background
index_spec.update(kwargs)
return cls._get_collection().ensure_index(fields, **index_spec)
if IS_PYMONGO_3 and drop_dups:
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
warnings.warn(msg, DeprecationWarning)
elif not IS_PYMONGO_3:
kwargs.update({'drop_dups': drop_dups})
return cls.create_index(key_or_list, background=background, **kwargs)
@classmethod
def ensure_indexes(cls):
@@ -666,6 +708,9 @@ class Document(BaseDocument):
drop_dups = cls._meta.get('index_drop_dups', False)
index_opts = cls._meta.get('index_opts') or {}
index_cls = cls._meta.get('index_cls', True)
if IS_PYMONGO_3 and drop_dups:
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
warnings.warn(msg, DeprecationWarning)
collection = cls._get_collection()
# 746: when connection is via mongos, the read preference is not necessarily an indication that
@@ -694,8 +739,11 @@ class Document(BaseDocument):
if 'cls' in opts:
del opts['cls']
collection.ensure_index(fields, background=background,
drop_dups=drop_dups, **opts)
if IS_PYMONGO_3:
collection.create_index(fields, background=background, **opts)
else:
collection.ensure_index(fields, background=background,
drop_dups=drop_dups, **opts)
# If _cls is being used (for polymorphism), it needs an index,
# only if another index doesn't begin with _cls
@@ -707,8 +755,12 @@ class Document(BaseDocument):
if 'cls' in index_opts:
del index_opts['cls']
collection.ensure_index('_cls', background=background,
**index_opts)
if IS_PYMONGO_3:
collection.create_index('_cls', background=background,
**index_opts)
else:
collection.ensure_index('_cls', background=background,
**index_opts)
@classmethod
def list_indexes(cls, go_up=True, go_down=True):