Added a cleaner way to get collection names

Also handles dynamic collection naming - refs #180.
This commit is contained in:
Ross Lawley
2011-06-20 11:48:12 +01:00
parent 1b0323bc22
commit f41c5217c6
6 changed files with 130 additions and 73 deletions

View File

@@ -22,6 +22,7 @@ class ValidationError(Exception):
_document_registry = {}
def get_document(name):
doc = _document_registry.get(name, None)
if not doc:
@@ -195,7 +196,7 @@ class ComplexBaseField(BaseField):
elif isinstance(v, (dict, pymongo.son.SON)):
if '_ref' in v:
# generic reference
collection = get_document(v['_cls'])._meta['collection']
collection = get_document(v['_cls'])._get_collection_name()
collections.setdefault(collection, []).append((k,v))
else:
# Use BaseDict so can watch any changes
@@ -257,7 +258,7 @@ class ComplexBaseField(BaseField):
if v.pk is None:
raise ValidationError('You can only reference documents once '
'they have been saved to the database')
collection = v._meta['collection']
collection = v._get_collection_name()
value_dict[k] = pymongo.dbref.DBRef(collection, v.pk)
elif hasattr(v, 'to_python'):
value_dict[k] = v.to_python()
@@ -306,7 +307,7 @@ class ComplexBaseField(BaseField):
from fields import GenericReferenceField
value_dict[k] = GenericReferenceField().to_mongo(v)
else:
collection = v._meta['collection']
collection = v._get_collection_name()
value_dict[k] = pymongo.dbref.DBRef(collection, v.pk)
elif hasattr(v, 'to_mongo'):
value_dict[k] = v.to_mongo()
@@ -500,9 +501,14 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# Subclassed documents inherit collection from superclass
for base in bases:
if hasattr(base, '_meta'):
if 'collection' in base._meta:
collection = base._meta['collection']
if 'collection' in attrs.get('meta', {}) and not base._meta.get('abstract', False):
import warnings
msg = "Trying to set a collection on a subclass (%s)" % name
warnings.warn(msg, SyntaxWarning)
del(attrs['meta']['collection'])
if base._get_collection_name():
collection = base._get_collection_name()
# Propagate index options.
for key in ('index_background', 'index_drop_dups', 'index_opts'):
if key in base._meta:
@@ -539,6 +545,10 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# DocumentMetaclass before instantiating CollectionManager object
new_class = super_new(cls, name, bases, attrs)
collection = attrs['_meta'].get('collection', None)
if callable(collection):
new_class._meta['collection'] = collection(new_class)
# Provide a default queryset unless one has been manually provided
manager = attrs.get('objects', QuerySetManager())
if hasattr(manager, 'queryset_class'):
@@ -675,6 +685,12 @@ class BaseDocument(object):
elif field.required:
raise ValidationError('Field "%s" is required' % field.name)
@classmethod
def _get_collection_name(cls):
"""Returns the collection name for this class.
"""
return cls._meta.get('collection', None)
@classmethod
def _get_subclasses(cls):
"""Return a dictionary of all subclasses (found recursively).

View File

@@ -6,7 +6,12 @@ from connection import _get_db
import pymongo
__all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError']
__all__ = ['Document', 'EmbeddedDocument', 'ValidationError',
'OperationError', 'InvalidCollectionError']
class InvalidCollectionError(Exception):
pass
class EmbeddedDocument(BaseDocument):
@@ -72,6 +77,41 @@ class Document(BaseDocument):
"""
__metaclass__ = TopLevelDocumentMetaclass
@classmethod
def _get_collection(self):
"""Returns the collection for the document."""
db = _get_db()
collection_name = self._get_collection_name()
if not hasattr(self, '_collection') or self._collection is None:
# Create collection as a capped collection if specified
if self._meta['max_size'] or self._meta['max_documents']:
# Get max document limit and max byte size from meta
max_size = self._meta['max_size'] or 10000000 # 10MB default
max_documents = self._meta['max_documents']
if collection_name in db.collection_names():
self._collection = db[collection_name]
# The collection already exists, check if its capped
# options match the specified capped options
options = self._collection.options()
if options.get('max') != max_documents or \
options.get('size') != max_size:
msg = ('Cannot create collection "%s" as a capped '
'collection as it already exists') % self._collection
raise InvalidCollectionError(msg)
else:
# Create the collection as a capped collection
opts = {'capped': True, 'size': max_size}
if max_documents:
opts['max'] = max_documents
self._collection = db.create_collection(
collection_name, **opts
)
else:
self._collection = db[collection_name]
return self._collection
def save(self, safe=True, force_insert=False, validate=True, write_options=None):
"""Save the :class:`~mongoengine.Document` to the database. If the
document already exists, it will be updated, otherwise it will be
@@ -173,7 +213,7 @@ class Document(BaseDocument):
if not self.pk:
msg = "Only saved documents can have a valid dbref"
raise OperationError(msg)
return pymongo.dbref.DBRef(self.__class__._meta['collection'], self.pk)
return pymongo.dbref.DBRef(self.__class__._get_collection_name(), self.pk)
@classmethod
def register_delete_rule(cls, document_cls, field_name, rule):
@@ -188,7 +228,7 @@ class Document(BaseDocument):
:class:`~mongoengine.Document` type from the database.
"""
db = _get_db()
db.drop_collection(cls._meta['collection'])
db.drop_collection(cls._get_collection_name())
class MapReduceDocument(object):

View File

@@ -252,7 +252,7 @@ class DateTimeField(BaseField):
return datetime.datetime(value.year, value.month, value.day)
# Attempt to parse a datetime:
#value = smart_str(value)
# value = smart_str(value)
# split usecs, because they are not recognized by strptime.
if '.' in value:
try:
@@ -278,6 +278,7 @@ class DateTimeField(BaseField):
return None
class ComplexDateTimeField(StringField):
"""
ComplexDateTimeField handles microseconds exactly instead of rounding
@@ -526,6 +527,7 @@ class MapField(DictField):
super(MapField, self).__init__(field=field, *args, **kwargs)
class ReferenceField(BaseField):
"""A reference to a document that will be automatically dereferenced on
access (lazily).
@@ -595,7 +597,7 @@ class ReferenceField(BaseField):
id_ = document
id_ = id_field.to_mongo(id_)
collection = self.document_type._meta['collection']
collection = self.document_type._get_collection_name()
return pymongo.dbref.DBRef(collection, id_)
def prepare_query_value(self, op, value):
@@ -664,7 +666,7 @@ class GenericReferenceField(BaseField):
id_ = document
id_ = id_field.to_mongo(id_)
collection = document._meta['collection']
collection = document._get_collection_name()
ref = pymongo.dbref.DBRef(collection, id_)
return {'_cls': document._class_name, '_ref': ref}

View File

@@ -11,7 +11,7 @@ import itertools
import operator
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
'InvalidCollectionError', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY']
'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY']
# The maximum number of items to display in a QuerySet.__repr__
@@ -40,10 +40,6 @@ class OperationError(Exception):
pass
class InvalidCollectionError(Exception):
pass
RE_TYPE = type(re.compile(''))
@@ -1360,7 +1356,7 @@ class QuerySet(object):
fields = [QuerySet._translate_field_name(self._document, f)
for f in fields]
collection = self._document._meta['collection']
collection = self._document._get_collection_name()
scope = {
'collection': collection,
@@ -1550,39 +1546,9 @@ class QuerySetManager(object):
# Document class being used rather than a document object
return self
db = _get_db()
collection = owner._meta['collection']
if (db, collection) not in self._collections:
# Create collection as a capped collection if specified
if owner._meta['max_size'] or owner._meta['max_documents']:
# Get max document limit and max byte size from meta
max_size = owner._meta['max_size'] or 10000000 # 10MB default
max_documents = owner._meta['max_documents']
if collection in db.collection_names():
self._collections[(db, collection)] = db[collection]
# The collection already exists, check if its capped
# options match the specified capped options
options = self._collections[(db, collection)].options()
if options.get('max') != max_documents or \
options.get('size') != max_size:
msg = ('Cannot create collection "%s" as a capped '
'collection as it already exists') % collection
raise InvalidCollectionError(msg)
else:
# Create the collection as a capped collection
opts = {'capped': True, 'size': max_size}
if max_documents:
opts['max'] = max_documents
self._collections[(db, collection)] = db.create_collection(
collection, **opts
)
else:
self._collections[(db, collection)] = db[collection]
# owner is the document that contains the QuerySetManager
queryset_class = owner._meta['queryset_class'] or QuerySet
queryset = queryset_class(owner, self._collections[(db, collection)])
queryset = queryset_class(owner, owner._get_collection())
if self.get_queryset:
if self.get_queryset.func_code.co_argcount == 1:
queryset = self.get_queryset(queryset)