Merge branch 'dev-disable-indexing' of https://github.com/colinhowe/mongoengine
Conflicts: mongoengine/queryset.py
This commit is contained in:
commit
4ce1ba81a6
@ -4,6 +4,7 @@ Changelog
|
||||
|
||||
Changes in 0.6.X
|
||||
================
|
||||
- Added meta `auto_create_index` so you can disable index creation
|
||||
- Added write concern options to inserts
|
||||
- Fixed typo in meta for index options
|
||||
- Bug fix Read preference now passed correctly
|
||||
|
@ -74,6 +74,12 @@ class Document(BaseDocument):
|
||||
names. Index direction may be specified by prefixing the field names with
|
||||
a **+** or **-** sign.
|
||||
|
||||
Automatic index creation can be disabled by specifying
|
||||
attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
|
||||
False then indexes will not be created by MongoEngine. This is useful in
|
||||
production systems where index creation is performed as part of a deployment
|
||||
system.
|
||||
|
||||
By default, _types will be added to the start of every index (that
|
||||
doesn't contain a list) if allow_inheritence is True. This can be
|
||||
disabled by either setting types to False on the specific index or
|
||||
@ -147,8 +153,8 @@ class Document(BaseDocument):
|
||||
:meth:`~pymongo.collection.Collection.save` OR
|
||||
:meth:`~pymongo.collection.Collection.insert`
|
||||
which will be used as options for the resultant ``getLastError`` command.
|
||||
For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will
|
||||
wait until at least two servers have recorded the write and will force an
|
||||
For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will
|
||||
wait until at least two servers have recorded the write and will force an
|
||||
fsync on each server being written to.
|
||||
:param cascade: Sets the flag for cascading saves. You can set a default by setting
|
||||
"cascade" in the document __meta__
|
||||
|
@ -481,13 +481,65 @@ class QuerySet(object):
|
||||
"""Returns all documents."""
|
||||
return self.__call__()
|
||||
|
||||
def _ensure_indexes(self):
|
||||
"""Checks the document meta data and ensures all the indexes exist.
|
||||
|
||||
.. note:: You can disable automatic index creation by setting
|
||||
`auto_create_index` to False in the documents meta data
|
||||
"""
|
||||
background = self._document._meta.get('index_background', False)
|
||||
drop_dups = self._document._meta.get('index_drop_dups', False)
|
||||
index_opts = self._document._meta.get('index_opts', {})
|
||||
index_types = self._document._meta.get('index_types', True)
|
||||
|
||||
# determine if an index which we are creating includes
|
||||
# _type as its first field; if so, we can avoid creating
|
||||
# an extra index on _type, as mongodb will use the existing
|
||||
# index to service queries against _type
|
||||
types_indexed = False
|
||||
def includes_types(fields):
|
||||
first_field = None
|
||||
if len(fields):
|
||||
if isinstance(fields[0], basestring):
|
||||
first_field = fields[0]
|
||||
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||
first_field = fields[0][0]
|
||||
return first_field == '_types'
|
||||
|
||||
# Ensure indexes created by uniqueness constraints
|
||||
for index in self._document._meta['unique_indexes']:
|
||||
types_indexed = types_indexed or includes_types(index)
|
||||
self._collection.ensure_index(index, unique=True,
|
||||
background=background, drop_dups=drop_dups, **index_opts)
|
||||
|
||||
# Ensure document-defined indexes are created
|
||||
if self._document._meta['indexes']:
|
||||
for spec in self._document._meta['indexes']:
|
||||
types_indexed = types_indexed or includes_types(spec['fields'])
|
||||
opts = index_opts.copy()
|
||||
opts['unique'] = spec.get('unique', False)
|
||||
opts['sparse'] = spec.get('sparse', False)
|
||||
self._collection.ensure_index(spec['fields'],
|
||||
background=background, **opts)
|
||||
|
||||
# If _types is being used (for polymorphism), it needs an index,
|
||||
# only if another index doesn't begin with _types
|
||||
if index_types and '_types' in self._query and not types_indexed:
|
||||
self._collection.ensure_index('_types',
|
||||
background=background, **index_opts)
|
||||
|
||||
# Add geo indicies
|
||||
for field in self._document._geo_indices():
|
||||
index_spec = [(field.db_field, pymongo.GEO2D)]
|
||||
self._collection.ensure_index(index_spec,
|
||||
background=background, **index_opts)
|
||||
|
||||
@property
|
||||
def _collection(self):
|
||||
"""Property that returns the collection object. This allows us to
|
||||
perform operations only if the collection is accessed.
|
||||
"""
|
||||
if self._document not in QuerySet.__already_indexed:
|
||||
|
||||
# Ensure collection exists
|
||||
db = self._document._get_db()
|
||||
if self._collection_obj.name not in db.collection_names():
|
||||
@ -496,52 +548,8 @@ class QuerySet(object):
|
||||
|
||||
QuerySet.__already_indexed.add(self._document)
|
||||
|
||||
background = self._document._meta.get('index_background', False)
|
||||
drop_dups = self._document._meta.get('index_drop_dups', False)
|
||||
index_opts = self._document._meta.get('index_opts', {})
|
||||
index_types = self._document._meta.get('index_types', True)
|
||||
|
||||
# determine if an index which we are creating includes
|
||||
# _type as its first field; if so, we can avoid creating
|
||||
# an extra index on _type, as mongodb will use the existing
|
||||
# index to service queries against _type
|
||||
types_indexed = False
|
||||
def includes_types(fields):
|
||||
first_field = None
|
||||
if len(fields):
|
||||
if isinstance(fields[0], basestring):
|
||||
first_field = fields[0]
|
||||
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||
first_field = fields[0][0]
|
||||
return first_field == '_types'
|
||||
|
||||
# Ensure indexes created by uniqueness constraints
|
||||
for index in self._document._meta['unique_indexes']:
|
||||
types_indexed = types_indexed or includes_types(index)
|
||||
self._collection.ensure_index(index, unique=True,
|
||||
background=background, drop_dups=drop_dups, **index_opts)
|
||||
|
||||
# Ensure document-defined indexes are created
|
||||
if self._document._meta['indexes']:
|
||||
for spec in self._document._meta['indexes']:
|
||||
types_indexed = types_indexed or includes_types(spec['fields'])
|
||||
opts = index_opts.copy()
|
||||
opts['unique'] = spec.get('unique', False)
|
||||
opts['sparse'] = spec.get('sparse', False)
|
||||
self._collection.ensure_index(spec['fields'],
|
||||
background=background, **opts)
|
||||
|
||||
# If _types is being used (for polymorphism), it needs an index,
|
||||
# only if another index doesn't begin with _types
|
||||
if index_types and '_types' in self._query and not types_indexed:
|
||||
self._collection.ensure_index('_types',
|
||||
background=background, **index_opts)
|
||||
|
||||
# Add geo indicies
|
||||
for field in self._document._geo_indices():
|
||||
index_spec = [(field.db_field, pymongo.GEO2D)]
|
||||
self._collection.ensure_index(index_spec,
|
||||
background=background, **index_opts)
|
||||
if self._document._meta.get('auto_create_index', True):
|
||||
self._ensure_indexes()
|
||||
|
||||
return self._collection_obj
|
||||
|
||||
@ -836,8 +844,8 @@ class QuerySet(object):
|
||||
:param write_options: Extra keyword arguments are passed down to
|
||||
:meth:`~pymongo.collection.Collection.insert`
|
||||
which will be used as options for the resultant ``getLastError`` command.
|
||||
For example, ``insert(..., {w: 2, fsync: True})`` will wait until at least two
|
||||
servers have recorded the write and will force an fsync on each server being
|
||||
For example, ``insert(..., {w: 2, fsync: True})`` will wait until at least two
|
||||
servers have recorded the write and will force an fsync on each server being
|
||||
written to.
|
||||
|
||||
By default returns document instances, set ``load_bulk`` to False to
|
||||
|
@ -741,6 +741,28 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEqual(info.keys(), ['_types_1_user_guid_1', '_id_', '_types_1_name_1'])
|
||||
Person.drop_collection()
|
||||
|
||||
def test_disable_index_creation(self):
|
||||
"""Tests setting auto_create_index to False on the connection will
|
||||
disable any index generation.
|
||||
"""
|
||||
class User(Document):
|
||||
meta = {
|
||||
'indexes': ['user_guid'],
|
||||
'auto_create_index': False
|
||||
}
|
||||
user_guid = StringField(required=True)
|
||||
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
u = User(user_guid='123')
|
||||
u.save()
|
||||
|
||||
self.assertEquals(1, User.objects.count())
|
||||
info = User.objects._collection.index_information()
|
||||
self.assertEqual(info.keys(), ['_id_'])
|
||||
User.drop_collection()
|
||||
|
||||
def test_embedded_document_index(self):
|
||||
"""Tests settings an index on an embedded document
|
||||
"""
|
||||
|
Loading…
x
Reference in New Issue
Block a user