Merge branch 'dev' into feature/update_lists
This commit is contained in:
@@ -7,16 +7,26 @@ import pymongo
|
||||
import pymongo.objectid
|
||||
|
||||
|
||||
_document_registry = {}
|
||||
|
||||
def get_document(name):
|
||||
return _document_registry[name]
|
||||
class NotRegistered(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ValidationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
_document_registry = {}
|
||||
|
||||
def get_document(name):
|
||||
if name not in _document_registry:
|
||||
raise NotRegistered("""
|
||||
`%s` has not been registered in the document registry.
|
||||
Importing the document class automatically registers it, has it
|
||||
been imported?
|
||||
""".strip() % name)
|
||||
return _document_registry[name]
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
"""A base class for fields in a MongoDB document. Instances of this class
|
||||
may be added to subclasses of `Document` to define a document's schema.
|
||||
@@ -295,6 +305,30 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
for spec in meta['indexes']] + base_indexes
|
||||
new_class._meta['indexes'] = user_indexes
|
||||
|
||||
unique_indexes = cls._unique_with_indexes(new_class)
|
||||
new_class._meta['unique_indexes'] = unique_indexes
|
||||
|
||||
for field_name, field in new_class._fields.items():
|
||||
# Check for custom primary key
|
||||
if field.primary_key:
|
||||
current_pk = new_class._meta['id_field']
|
||||
if current_pk and current_pk != field_name:
|
||||
raise ValueError('Cannot override primary key field')
|
||||
|
||||
if not current_pk:
|
||||
new_class._meta['id_field'] = field_name
|
||||
# Make 'Document.id' an alias to the real primary key field
|
||||
new_class.id = field
|
||||
|
||||
if not new_class._meta['id_field']:
|
||||
new_class._meta['id_field'] = 'id'
|
||||
new_class._fields['id'] = ObjectIdField(db_field='_id')
|
||||
new_class.id = new_class._fields['id']
|
||||
|
||||
return new_class
|
||||
|
||||
@classmethod
|
||||
def _unique_with_indexes(cls, new_class, namespace=""):
|
||||
unique_indexes = []
|
||||
for field_name, field in new_class._fields.items():
|
||||
# Generate a list of indexes needed by uniqueness constraints
|
||||
@@ -320,28 +354,16 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
unique_fields += unique_with
|
||||
|
||||
# Add the new index to the list
|
||||
index = [(f, pymongo.ASCENDING) for f in unique_fields]
|
||||
index = [("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields]
|
||||
unique_indexes.append(index)
|
||||
|
||||
# Check for custom primary key
|
||||
if field.primary_key:
|
||||
current_pk = new_class._meta['id_field']
|
||||
if current_pk and current_pk != field_name:
|
||||
raise ValueError('Cannot override primary key field')
|
||||
# Grab any embedded document field unique indexes
|
||||
if field.__class__.__name__ == "EmbeddedDocumentField":
|
||||
field_namespace = "%s." % field_name
|
||||
unique_indexes += cls._unique_with_indexes(field.document_type,
|
||||
field_namespace)
|
||||
|
||||
if not current_pk:
|
||||
new_class._meta['id_field'] = field_name
|
||||
# Make 'Document.id' an alias to the real primary key field
|
||||
new_class.id = field
|
||||
|
||||
new_class._meta['unique_indexes'] = unique_indexes
|
||||
|
||||
if not new_class._meta['id_field']:
|
||||
new_class._meta['id_field'] = 'id'
|
||||
new_class._fields['id'] = ObjectIdField(db_field='_id')
|
||||
new_class.id = new_class._fields['id']
|
||||
|
||||
return new_class
|
||||
return unique_indexes
|
||||
|
||||
|
||||
class BaseDocument(object):
|
||||
|
@@ -40,44 +40,54 @@ class Document(BaseDocument):
|
||||
presence of `_cls` and `_types`, set :attr:`allow_inheritance` to
|
||||
``False`` in the :attr:`meta` dictionary.
|
||||
|
||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
||||
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
|
||||
dictionary. :attr:`max_documents` is the maximum number of documents that
|
||||
is allowed to be stored in the collection, and :attr:`max_size` is the
|
||||
maximum size of the collection in bytes. If :attr:`max_size` is not
|
||||
specified and :attr:`max_documents` is, :attr:`max_size` defaults to
|
||||
is allowed to be stored in the collection, and :attr:`max_size` is the
|
||||
maximum size of the collection in bytes. If :attr:`max_size` is not
|
||||
specified and :attr:`max_documents` is, :attr:`max_size` defaults to
|
||||
10000000 bytes (10MB).
|
||||
|
||||
Indexes may be created by specifying :attr:`indexes` in the :attr:`meta`
|
||||
dictionary. The value should be a list of field names or tuples of field
|
||||
dictionary. The value should be a list of field names or tuples of field
|
||||
names. Index direction may be specified by prefixing the field names with
|
||||
a **+** or **-** sign.
|
||||
"""
|
||||
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
|
||||
def save(self, safe=True, force_insert=False, validate=True):
|
||||
def save(self, safe=True, force_insert=False, validate=True, write_options=None):
|
||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||
document already exists, it will be updated, otherwise it will be
|
||||
created.
|
||||
|
||||
If ``safe=True`` and the operation is unsuccessful, an
|
||||
If ``safe=True`` and the operation is unsuccessful, an
|
||||
:class:`~mongoengine.OperationError` will be raised.
|
||||
|
||||
:param safe: check if the operation succeeded before returning
|
||||
:param force_insert: only try to create a new document, don't allow
|
||||
:param force_insert: only try to create a new document, don't allow
|
||||
updates of existing documents
|
||||
:param validate: validates the document; set to ``False`` to skip.
|
||||
:param write_options: Extra keyword arguments are passed down to
|
||||
:meth:`~pymongo.collection.Collection.save` OR
|
||||
:meth:`~pymongo.collection.Collection.insert`
|
||||
which will be used as options for the resultant ``getLastError`` command.
|
||||
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
|
||||
have recorded the write and will force an fsync on each server being written to.
|
||||
"""
|
||||
if validate:
|
||||
self.validate()
|
||||
|
||||
if not write_options:
|
||||
write_options = {}
|
||||
|
||||
doc = self.to_mongo()
|
||||
try:
|
||||
collection = self.__class__.objects._collection
|
||||
if force_insert:
|
||||
object_id = collection.insert(doc, safe=safe)
|
||||
object_id = collection.insert(doc, safe=safe, **write_options)
|
||||
else:
|
||||
object_id = collection.save(doc, safe=safe)
|
||||
object_id = collection.save(doc, safe=safe, **write_options)
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
message = 'Could not save document (%s)'
|
||||
if u'duplicate key' in unicode(err):
|
||||
@@ -131,9 +141,9 @@ class MapReduceDocument(object):
|
||||
"""A document returned from a map/reduce query.
|
||||
|
||||
:param collection: An instance of :class:`~pymongo.Collection`
|
||||
:param key: Document/result key, often an instance of
|
||||
:class:`~pymongo.objectid.ObjectId`. If supplied as
|
||||
an ``ObjectId`` found in the given ``collection``,
|
||||
:param key: Document/result key, often an instance of
|
||||
:class:`~pymongo.objectid.ObjectId`. If supplied as
|
||||
an ``ObjectId`` found in the given ``collection``,
|
||||
the object can be accessed via the ``object`` property.
|
||||
:param value: The result(s) for this key.
|
||||
|
||||
@@ -148,7 +158,7 @@ class MapReduceDocument(object):
|
||||
|
||||
@property
|
||||
def object(self):
|
||||
"""Lazy-load the object referenced by ``self.key``. ``self.key``
|
||||
"""Lazy-load the object referenced by ``self.key``. ``self.key``
|
||||
should be the ``primary_key``.
|
||||
"""
|
||||
id_field = self._document()._meta['id_field']
|
||||
|
@@ -522,6 +522,9 @@ class GenericReferenceField(BaseField):
|
||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||
that will be automatically dereferenced on access (lazily).
|
||||
|
||||
note: Any documents used as a generic reference must be registered in the
|
||||
document registry. Importing the model will automatically register it.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
|
||||
@@ -601,6 +604,7 @@ class GridFSProxy(object):
|
||||
self.fs = gridfs.GridFS(_get_db()) # Filesystem instance
|
||||
self.newfile = None # Used for partial writes
|
||||
self.grid_id = grid_id # Store GridFS id for file
|
||||
self.gridout = None
|
||||
|
||||
def __getattr__(self, name):
|
||||
obj = self.get()
|
||||
@@ -614,8 +618,12 @@ class GridFSProxy(object):
|
||||
def get(self, id=None):
|
||||
if id:
|
||||
self.grid_id = id
|
||||
if self.grid_id is None:
|
||||
return None
|
||||
try:
|
||||
return self.fs.get(id or self.grid_id)
|
||||
if self.gridout is None:
|
||||
self.gridout = self.fs.get(self.grid_id)
|
||||
return self.gridout
|
||||
except:
|
||||
# File has been deleted
|
||||
return None
|
||||
@@ -645,9 +653,9 @@ class GridFSProxy(object):
|
||||
self.grid_id = self.newfile._id
|
||||
self.newfile.writelines(lines)
|
||||
|
||||
def read(self):
|
||||
def read(self, size=-1):
|
||||
try:
|
||||
return self.get().read()
|
||||
return self.get().read(size)
|
||||
except:
|
||||
return None
|
||||
|
||||
@@ -655,6 +663,7 @@ class GridFSProxy(object):
|
||||
# Delete file from GridFS, FileField still remains
|
||||
self.fs.delete(self.grid_id)
|
||||
self.grid_id = None
|
||||
self.gridout = None
|
||||
|
||||
def replace(self, file, **kwargs):
|
||||
self.delete()
|
||||
|
@@ -8,6 +8,7 @@ import pymongo.objectid
|
||||
import re
|
||||
import copy
|
||||
import itertools
|
||||
import operator
|
||||
|
||||
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
|
||||
'InvalidCollectionError', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY']
|
||||
@@ -280,30 +281,30 @@ class QueryFieldList(object):
|
||||
ONLY = True
|
||||
EXCLUDE = False
|
||||
|
||||
def __init__(self, fields=[], direction=ONLY, always_include=[]):
|
||||
self.direction = direction
|
||||
def __init__(self, fields=[], value=ONLY, always_include=[]):
|
||||
self.value = value
|
||||
self.fields = set(fields)
|
||||
self.always_include = set(always_include)
|
||||
|
||||
def as_dict(self):
|
||||
return dict((field, self.direction) for field in self.fields)
|
||||
return dict((field, self.value) for field in self.fields)
|
||||
|
||||
def __add__(self, f):
|
||||
if not self.fields:
|
||||
self.fields = f.fields
|
||||
self.direction = f.direction
|
||||
elif self.direction is self.ONLY and f.direction is self.ONLY:
|
||||
self.value = f.value
|
||||
elif self.value is self.ONLY and f.value is self.ONLY:
|
||||
self.fields = self.fields.intersection(f.fields)
|
||||
elif self.direction is self.EXCLUDE and f.direction is self.EXCLUDE:
|
||||
elif self.value is self.EXCLUDE and f.value is self.EXCLUDE:
|
||||
self.fields = self.fields.union(f.fields)
|
||||
elif self.direction is self.ONLY and f.direction is self.EXCLUDE:
|
||||
elif self.value is self.ONLY and f.value is self.EXCLUDE:
|
||||
self.fields -= f.fields
|
||||
elif self.direction is self.EXCLUDE and f.direction is self.ONLY:
|
||||
self.direction = self.ONLY
|
||||
elif self.value is self.EXCLUDE and f.value is self.ONLY:
|
||||
self.value = self.ONLY
|
||||
self.fields = f.fields - self.fields
|
||||
|
||||
if self.always_include:
|
||||
if self.direction is self.ONLY and self.fields:
|
||||
if self.value is self.ONLY and self.fields:
|
||||
self.fields = self.fields.union(self.always_include)
|
||||
else:
|
||||
self.fields -= self.always_include
|
||||
@@ -311,7 +312,7 @@ class QueryFieldList(object):
|
||||
|
||||
def reset(self):
|
||||
self.fields = set([])
|
||||
self.direction = self.ONLY
|
||||
self.value = self.ONLY
|
||||
|
||||
def __nonzero__(self):
|
||||
return bool(self.fields)
|
||||
@@ -551,7 +552,7 @@ class QuerySet(object):
|
||||
return '.'.join(parts)
|
||||
|
||||
@classmethod
|
||||
def _transform_query(cls, _doc_cls=None, **query):
|
||||
def _transform_query(cls, _doc_cls=None, _field_operation=False, **query):
|
||||
"""Transform a query from Django-style format to Mongo format.
|
||||
"""
|
||||
operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||
@@ -646,7 +647,7 @@ class QuerySet(object):
|
||||
raise self._document.DoesNotExist("%s matching query does not exist."
|
||||
% self._document._class_name)
|
||||
|
||||
def get_or_create(self, *q_objs, **query):
|
||||
def get_or_create(self, write_options=None, *q_objs, **query):
|
||||
"""Retrieve unique object or create, if it doesn't exist. Returns a tuple of
|
||||
``(object, created)``, where ``object`` is the retrieved or created object
|
||||
and ``created`` is a boolean specifying whether a new object was created. Raises
|
||||
@@ -656,6 +657,10 @@ class QuerySet(object):
|
||||
dictionary of default values for the new document may be provided as a
|
||||
keyword argument called :attr:`defaults`.
|
||||
|
||||
:param write_options: optional extra keyword arguments used if we
|
||||
have to create a new document.
|
||||
Passes any write_options onto :meth:`~mongoengine.document.Document.save`
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
defaults = query.get('defaults', {})
|
||||
@@ -667,7 +672,7 @@ class QuerySet(object):
|
||||
if count == 0:
|
||||
query.update(defaults)
|
||||
doc = self._document(**query)
|
||||
doc.save()
|
||||
doc.save(write_options=write_options)
|
||||
return doc, True
|
||||
elif count == 1:
|
||||
return self.first(), False
|
||||
@@ -893,10 +898,8 @@ class QuerySet(object):
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
fields = self._fields_to_dbfields(fields)
|
||||
self._loaded_fields += QueryFieldList(fields, direction=QueryFieldList.ONLY)
|
||||
return self
|
||||
|
||||
fields = dict([(f, QueryFieldList.ONLY) for f in fields])
|
||||
return self.fields(**fields)
|
||||
|
||||
def exclude(self, *fields):
|
||||
"""Opposite to .only(), exclude some document's fields. ::
|
||||
@@ -905,8 +908,44 @@ class QuerySet(object):
|
||||
|
||||
:param fields: fields to exclude
|
||||
"""
|
||||
fields = self._fields_to_dbfields(fields)
|
||||
self._loaded_fields += QueryFieldList(fields, direction=QueryFieldList.EXCLUDE)
|
||||
fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields])
|
||||
return self.fields(**fields)
|
||||
|
||||
def fields(self, **kwargs):
|
||||
"""Manipulate how you load this document's fields. Used by `.only()`
|
||||
and `.exclude()` to manipulate which fields to retrieve. Fields also
|
||||
allows for a greater level of control for example:
|
||||
|
||||
Retrieving a Subrange of Array Elements
|
||||
---------------------------------------
|
||||
|
||||
You can use the $slice operator to retrieve a subrange of elements in
|
||||
an array ::
|
||||
|
||||
post = BlogPost.objects(...).fields(slice__comments=5) // first 5 comments
|
||||
|
||||
:param kwargs: A dictionary identifying what to include
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
operators = ["slice"]
|
||||
cleaned_fields = []
|
||||
for key, value in kwargs.items():
|
||||
parts = key.split('__')
|
||||
op = None
|
||||
if parts[0] in operators:
|
||||
op = parts.pop(0)
|
||||
value = {'$' + op: value}
|
||||
key = '.'.join(parts)
|
||||
cleaned_fields.append((key, value))
|
||||
|
||||
fields = sorted(cleaned_fields, key=operator.itemgetter(1))
|
||||
for value, group in itertools.groupby(fields, lambda x: x[1]):
|
||||
fields = [field for field, value in group]
|
||||
fields = self._fields_to_dbfields(fields)
|
||||
self._loaded_fields += QueryFieldList(fields, value=value)
|
||||
return self
|
||||
|
||||
def all_fields(self):
|
||||
@@ -1062,22 +1101,27 @@ class QuerySet(object):
|
||||
|
||||
return mongo_update
|
||||
|
||||
def update(self, safe_update=True, upsert=False, **update):
|
||||
def update(self, safe_update=True, upsert=False, write_options=None, **update):
|
||||
"""Perform an atomic update on the fields matched by the query. When
|
||||
``safe_update`` is used, the number of affected documents is returned.
|
||||
|
||||
:param safe: check if the operation succeeded before returning
|
||||
:param update: Django-style update keyword arguments
|
||||
:param safe_update: check if the operation succeeded before returning
|
||||
:param upsert: Any existing document with that "_id" is overwritten.
|
||||
:param write_options: extra keyword arguments for :meth:`~pymongo.collection.Collection.update`
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
if pymongo.version < '1.1.1':
|
||||
raise OperationError('update() method requires PyMongo 1.1.1+')
|
||||
|
||||
if not write_options:
|
||||
write_options = {}
|
||||
|
||||
update = QuerySet._transform_update(self._document, **update)
|
||||
try:
|
||||
ret = self._collection.update(self._query, update, multi=True,
|
||||
upsert=upsert, safe=safe_update)
|
||||
upsert=upsert, safe=safe_update,
|
||||
**write_options)
|
||||
if ret is not None and 'n' in ret:
|
||||
return ret['n']
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
@@ -1086,22 +1130,27 @@ class QuerySet(object):
|
||||
raise OperationError(message)
|
||||
raise OperationError(u'Update failed (%s)' % unicode(err))
|
||||
|
||||
def update_one(self, safe_update=True, upsert=False, **update):
|
||||
def update_one(self, safe_update=True, upsert=False, write_options=None, **update):
|
||||
"""Perform an atomic update on first field matched by the query. When
|
||||
``safe_update`` is used, the number of affected documents is returned.
|
||||
|
||||
:param safe: check if the operation succeeded before returning
|
||||
:param safe_update: check if the operation succeeded before returning
|
||||
:param upsert: Any existing document with that "_id" is overwritten.
|
||||
:param write_options: extra keyword arguments for :meth:`~pymongo.collection.Collection.update`
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
if not write_options:
|
||||
write_options = {}
|
||||
update = QuerySet._transform_update(self._document, **update)
|
||||
try:
|
||||
# Explicitly provide 'multi=False' to newer versions of PyMongo
|
||||
# as the default may change to 'True'
|
||||
if pymongo.version >= '1.1.1':
|
||||
ret = self._collection.update(self._query, update, multi=False,
|
||||
upsert=upsert, safe=safe_update)
|
||||
upsert=upsert, safe=safe_update,
|
||||
**write_options)
|
||||
else:
|
||||
# Older versions of PyMongo don't support 'multi'
|
||||
ret = self._collection.update(self._query, update,
|
||||
@@ -1284,7 +1333,7 @@ class QuerySetManager(object):
|
||||
# Create collection as a capped collection if specified
|
||||
if owner._meta['max_size'] or owner._meta['max_documents']:
|
||||
# Get max document limit and max byte size from meta
|
||||
max_size = owner._meta['max_size'] or 10000000 # 10MB default
|
||||
max_size = owner._meta['max_size'] or 10000000 # 10MB default
|
||||
max_documents = owner._meta['max_documents']
|
||||
|
||||
if collection in db.collection_names():
|
||||
|
Reference in New Issue
Block a user