Added a Django storage backend.
- New GridFSStorage storage backend - New FileDocument document for storing files in GridFS - Whitespace cleaned up in various files
This commit is contained in:
parent
bd1bf9ba24
commit
b5eb3ea1cd
@ -4,6 +4,8 @@ Changelog
|
|||||||
|
|
||||||
Changes in v0.4
|
Changes in v0.4
|
||||||
===============
|
===============
|
||||||
|
- Added ``GridFSStorage`` Django storage backend
|
||||||
|
- Added ``FileField`` for GridFS support
|
||||||
- Added ``SortedListField``
|
- Added ``SortedListField``
|
||||||
- Added ``EmailField``
|
- Added ``EmailField``
|
||||||
- Added ``GeoPointField``
|
- Added ``GeoPointField``
|
||||||
|
@ -19,7 +19,7 @@ MongoDB but still use many of the Django authentication infrastucture (such as
|
|||||||
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
||||||
enable the MongoEngine auth backend, add the following to you **settings.py**
|
enable the MongoEngine auth backend, add the following to you **settings.py**
|
||||||
file::
|
file::
|
||||||
|
|
||||||
AUTHENTICATION_BACKENDS = (
|
AUTHENTICATION_BACKENDS = (
|
||||||
'mongoengine.django.auth.MongoEngineBackend',
|
'mongoengine.django.auth.MongoEngineBackend',
|
||||||
)
|
)
|
||||||
@ -44,3 +44,42 @@ into you settings module::
|
|||||||
SESSION_ENGINE = 'mongoengine.django.sessions'
|
SESSION_ENGINE = 'mongoengine.django.sessions'
|
||||||
|
|
||||||
.. versionadded:: 0.2.1
|
.. versionadded:: 0.2.1
|
||||||
|
|
||||||
|
Storage
|
||||||
|
=======
|
||||||
|
With MongoEngine's support for GridFS via the FileField, it is useful to have a
|
||||||
|
Django file storage backend that wraps this. The new storage module is called
|
||||||
|
GridFSStorage. Using it is very similar to using the default FileSystemStorage.::
|
||||||
|
|
||||||
|
fs = mongoengine.django.GridFSStorage()
|
||||||
|
|
||||||
|
filename = fs.save('hello.txt', 'Hello, World!')
|
||||||
|
|
||||||
|
All of the `Django Storage API methods
|
||||||
|
<http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been
|
||||||
|
implemented except ``path()``. If the filename provided already exists, an
|
||||||
|
underscore and a number (before # the file extension, if one exists) will be
|
||||||
|
appended to the filename until the generated filename doesn't exist. The
|
||||||
|
``save()`` method will return the new filename.::
|
||||||
|
|
||||||
|
> fs.exists('hello.txt')
|
||||||
|
True
|
||||||
|
> fs.open('hello.txt').read()
|
||||||
|
'Hello, World!'
|
||||||
|
> fs.size('hello.txt')
|
||||||
|
13
|
||||||
|
> fs.url('hello.txt')
|
||||||
|
'http://your_media_url/hello.txt'
|
||||||
|
> fs.open('hello.txt').name
|
||||||
|
'hello.txt'
|
||||||
|
> fs.listdir()
|
||||||
|
([], [u'hello.txt'])
|
||||||
|
|
||||||
|
All files will be saved and retrieved in GridFS via the ``FileDocument`` document,
|
||||||
|
allowing easy access to the files without the GridFSStorage backend.::
|
||||||
|
|
||||||
|
> from mongoengine.django.storage import FileDocument
|
||||||
|
> FileDocument.objects()
|
||||||
|
[<FileDocument: FileDocument object>]
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
|
@ -23,7 +23,7 @@ class BaseField(object):
|
|||||||
# Fields may have _types inserted into indexes by default
|
# Fields may have _types inserted into indexes by default
|
||||||
_index_with_types = True
|
_index_with_types = True
|
||||||
_geo_index = False
|
_geo_index = False
|
||||||
|
|
||||||
def __init__(self, db_field=None, name=None, required=False, default=None,
|
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||||
unique=False, unique_with=None, primary_key=False,
|
unique=False, unique_with=None, primary_key=False,
|
||||||
validation=None, choices=None):
|
validation=None, choices=None):
|
||||||
@ -89,7 +89,7 @@ class BaseField(object):
|
|||||||
if value not in self.choices:
|
if value not in self.choices:
|
||||||
raise ValidationError("Value must be one of %s."
|
raise ValidationError("Value must be one of %s."
|
||||||
% unicode(self.choices))
|
% unicode(self.choices))
|
||||||
|
|
||||||
# check validation argument
|
# check validation argument
|
||||||
if self.validation is not None:
|
if self.validation is not None:
|
||||||
if callable(self.validation):
|
if callable(self.validation):
|
||||||
@ -98,13 +98,13 @@ class BaseField(object):
|
|||||||
'validation method.')
|
'validation method.')
|
||||||
else:
|
else:
|
||||||
raise ValueError('validation argument must be a callable.')
|
raise ValueError('validation argument must be a callable.')
|
||||||
|
|
||||||
self.validate(value)
|
self.validate(value)
|
||||||
|
|
||||||
class ObjectIdField(BaseField):
|
class ObjectIdField(BaseField):
|
||||||
"""An field wrapper around MongoDB's ObjectIds.
|
"""An field wrapper around MongoDB's ObjectIds.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return value
|
return value
|
||||||
# return unicode(value)
|
# return unicode(value)
|
||||||
@ -150,7 +150,7 @@ class DocumentMetaclass(type):
|
|||||||
# Get superclasses from superclass
|
# Get superclasses from superclass
|
||||||
superclasses[base._class_name] = base
|
superclasses[base._class_name] = base
|
||||||
superclasses.update(base._superclasses)
|
superclasses.update(base._superclasses)
|
||||||
|
|
||||||
if hasattr(base, '_meta'):
|
if hasattr(base, '_meta'):
|
||||||
# Ensure that the Document class may be subclassed -
|
# Ensure that the Document class may be subclassed -
|
||||||
# inheritance may be disabled to remove dependency on
|
# inheritance may be disabled to remove dependency on
|
||||||
@ -191,20 +191,20 @@ class DocumentMetaclass(type):
|
|||||||
field.owner_document = new_class
|
field.owner_document = new_class
|
||||||
|
|
||||||
module = attrs.get('__module__')
|
module = attrs.get('__module__')
|
||||||
|
|
||||||
base_excs = tuple(base.DoesNotExist for base in bases
|
base_excs = tuple(base.DoesNotExist for base in bases
|
||||||
if hasattr(base, 'DoesNotExist')) or (DoesNotExist,)
|
if hasattr(base, 'DoesNotExist')) or (DoesNotExist,)
|
||||||
exc = subclass_exception('DoesNotExist', base_excs, module)
|
exc = subclass_exception('DoesNotExist', base_excs, module)
|
||||||
new_class.add_to_class('DoesNotExist', exc)
|
new_class.add_to_class('DoesNotExist', exc)
|
||||||
|
|
||||||
base_excs = tuple(base.MultipleObjectsReturned for base in bases
|
base_excs = tuple(base.MultipleObjectsReturned for base in bases
|
||||||
if hasattr(base, 'MultipleObjectsReturned'))
|
if hasattr(base, 'MultipleObjectsReturned'))
|
||||||
base_excs = base_excs or (MultipleObjectsReturned,)
|
base_excs = base_excs or (MultipleObjectsReturned,)
|
||||||
exc = subclass_exception('MultipleObjectsReturned', base_excs, module)
|
exc = subclass_exception('MultipleObjectsReturned', base_excs, module)
|
||||||
new_class.add_to_class('MultipleObjectsReturned', exc)
|
new_class.add_to_class('MultipleObjectsReturned', exc)
|
||||||
|
|
||||||
return new_class
|
return new_class
|
||||||
|
|
||||||
def add_to_class(self, name, value):
|
def add_to_class(self, name, value):
|
||||||
setattr(self, name, value)
|
setattr(self, name, value)
|
||||||
|
|
||||||
@ -227,7 +227,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
return super_new(cls, name, bases, attrs)
|
return super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
collection = name.lower()
|
collection = name.lower()
|
||||||
|
|
||||||
id_field = None
|
id_field = None
|
||||||
base_indexes = []
|
base_indexes = []
|
||||||
base_meta = {}
|
base_meta = {}
|
||||||
@ -265,7 +265,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
# Set up collection manager, needs the class to have fields so use
|
# Set up collection manager, needs the class to have fields so use
|
||||||
# DocumentMetaclass before instantiating CollectionManager object
|
# DocumentMetaclass before instantiating CollectionManager object
|
||||||
new_class = super_new(cls, name, bases, attrs)
|
new_class = super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
# Provide a default queryset unless one has been manually provided
|
# Provide a default queryset unless one has been manually provided
|
||||||
if not hasattr(new_class, 'objects'):
|
if not hasattr(new_class, 'objects'):
|
||||||
new_class.objects = QuerySetManager()
|
new_class.objects = QuerySetManager()
|
||||||
@ -273,7 +273,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
user_indexes = [QuerySet._build_index_spec(new_class, spec)
|
user_indexes = [QuerySet._build_index_spec(new_class, spec)
|
||||||
for spec in meta['indexes']] + base_indexes
|
for spec in meta['indexes']] + base_indexes
|
||||||
new_class._meta['indexes'] = user_indexes
|
new_class._meta['indexes'] = user_indexes
|
||||||
|
|
||||||
unique_indexes = []
|
unique_indexes = []
|
||||||
for field_name, field in new_class._fields.items():
|
for field_name, field in new_class._fields.items():
|
||||||
# Generate a list of indexes needed by uniqueness constraints
|
# Generate a list of indexes needed by uniqueness constraints
|
||||||
@ -431,7 +431,7 @@ class BaseDocument(object):
|
|||||||
if data.has_key('_id') and not data['_id']:
|
if data.has_key('_id') and not data['_id']:
|
||||||
del data['_id']
|
del data['_id']
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _from_son(cls, son):
|
def _from_son(cls, son):
|
||||||
"""Create an instance of a Document (subclass) from a PyMongo SON.
|
"""Create an instance of a Document (subclass) from a PyMongo SON.
|
||||||
@ -468,7 +468,7 @@ class BaseDocument(object):
|
|||||||
obj = cls(**data)
|
obj = cls(**data)
|
||||||
obj._present_fields = present_fields
|
obj._present_fields = present_fields
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if isinstance(other, self.__class__) and hasattr(other, 'id'):
|
if isinstance(other, self.__class__) and hasattr(other, 'id'):
|
||||||
if self.id == other.id:
|
if self.id == other.id:
|
||||||
|
112
mongoengine/django/storage.py
Normal file
112
mongoengine/django/storage.py
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
import os
|
||||||
|
import itertools
|
||||||
|
import urlparse
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.files.storage import Storage
|
||||||
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
|
|
||||||
|
|
||||||
|
class FileDocument(Document):
|
||||||
|
"""A document used to store a single file in GridFS.
|
||||||
|
"""
|
||||||
|
file = FileField()
|
||||||
|
|
||||||
|
|
||||||
|
class GridFSStorage(Storage):
|
||||||
|
"""A custom storage backend to store files in GridFS
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, base_url=None):
|
||||||
|
|
||||||
|
if base_url is None:
|
||||||
|
base_url = settings.MEDIA_URL
|
||||||
|
self.base_url = base_url
|
||||||
|
self.document = FileDocument
|
||||||
|
self.field = 'file'
|
||||||
|
|
||||||
|
def delete(self, name):
|
||||||
|
"""Deletes the specified file from the storage system.
|
||||||
|
"""
|
||||||
|
if self.exists(name):
|
||||||
|
doc = self.document.objects.first()
|
||||||
|
field = getattr(doc, self.field)
|
||||||
|
self._get_doc_with_name(name).delete() # Delete the FileField
|
||||||
|
field.delete() # Delete the FileDocument
|
||||||
|
|
||||||
|
def exists(self, name):
|
||||||
|
"""Returns True if a file referened by the given name already exists in the
|
||||||
|
storage system, or False if the name is available for a new file.
|
||||||
|
"""
|
||||||
|
doc = self._get_doc_with_name(name)
|
||||||
|
if doc:
|
||||||
|
field = getattr(doc, self.field)
|
||||||
|
return bool(field.name)
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def listdir(self, path=None):
|
||||||
|
"""Lists the contents of the specified path, returning a 2-tuple of lists;
|
||||||
|
the first item being directories, the second item being files.
|
||||||
|
"""
|
||||||
|
def name(doc):
|
||||||
|
return getattr(doc, self.field).name
|
||||||
|
docs = self.document.objects
|
||||||
|
return [], [name(d) for d in docs if name(d)]
|
||||||
|
|
||||||
|
def size(self, name):
|
||||||
|
"""Returns the total size, in bytes, of the file specified by name.
|
||||||
|
"""
|
||||||
|
doc = self._get_doc_with_name(name)
|
||||||
|
if doc:
|
||||||
|
return getattr(doc, self.field).length
|
||||||
|
else:
|
||||||
|
raise ValueError("No such file or directory: '%s'" % name)
|
||||||
|
|
||||||
|
def url(self, name):
|
||||||
|
"""Returns an absolute URL where the file's contents can be accessed
|
||||||
|
directly by a web browser.
|
||||||
|
"""
|
||||||
|
if self.base_url is None:
|
||||||
|
raise ValueError("This file is not accessible via a URL.")
|
||||||
|
return urlparse.urljoin(self.base_url, name).replace('\\', '/')
|
||||||
|
|
||||||
|
def _get_doc_with_name(self, name):
|
||||||
|
"""Find the documents in the store with the given name
|
||||||
|
"""
|
||||||
|
docs = self.document.objects
|
||||||
|
doc = [d for d in docs if getattr(d, self.field).name == name]
|
||||||
|
if doc:
|
||||||
|
return doc[0]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _open(self, name, mode='rb'):
|
||||||
|
doc = self._get_doc_with_name(name)
|
||||||
|
if doc:
|
||||||
|
return getattr(doc, self.field)
|
||||||
|
else:
|
||||||
|
raise ValueError("No file found with the name '%s'." % name)
|
||||||
|
|
||||||
|
def get_available_name(self, name):
|
||||||
|
"""Returns a filename that's free on the target storage system, and
|
||||||
|
available for new content to be written to.
|
||||||
|
"""
|
||||||
|
file_root, file_ext = os.path.splitext(name)
|
||||||
|
# If the filename already exists, add an underscore and a number (before
|
||||||
|
# the file extension, if one exists) to the filename until the generated
|
||||||
|
# filename doesn't exist.
|
||||||
|
count = itertools.count(1)
|
||||||
|
while self.exists(name):
|
||||||
|
# file_ext includes the dot.
|
||||||
|
name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext))
|
||||||
|
|
||||||
|
return name
|
||||||
|
|
||||||
|
def _save(self, name, content):
|
||||||
|
doc = self.document()
|
||||||
|
getattr(doc, self.field).put(content, filename=name)
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
return name
|
@ -15,7 +15,7 @@ class EmbeddedDocument(BaseDocument):
|
|||||||
fields on :class:`~mongoengine.Document`\ s through the
|
fields on :class:`~mongoengine.Document`\ s through the
|
||||||
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__metaclass__ = DocumentMetaclass
|
__metaclass__ = DocumentMetaclass
|
||||||
|
|
||||||
|
|
||||||
@ -119,23 +119,23 @@ class Document(BaseDocument):
|
|||||||
|
|
||||||
class MapReduceDocument(object):
|
class MapReduceDocument(object):
|
||||||
"""A document returned from a map/reduce query.
|
"""A document returned from a map/reduce query.
|
||||||
|
|
||||||
:param collection: An instance of :class:`~pymongo.Collection`
|
:param collection: An instance of :class:`~pymongo.Collection`
|
||||||
:param key: Document/result key, often an instance of
|
:param key: Document/result key, often an instance of
|
||||||
:class:`~pymongo.objectid.ObjectId`. If supplied as
|
:class:`~pymongo.objectid.ObjectId`. If supplied as
|
||||||
an ``ObjectId`` found in the given ``collection``,
|
an ``ObjectId`` found in the given ``collection``,
|
||||||
the object can be accessed via the ``object`` property.
|
the object can be accessed via the ``object`` property.
|
||||||
:param value: The result(s) for this key.
|
:param value: The result(s) for this key.
|
||||||
|
|
||||||
.. versionadded:: 0.3
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, document, collection, key, value):
|
def __init__(self, document, collection, key, value):
|
||||||
self._document = document
|
self._document = document
|
||||||
self._collection = collection
|
self._collection = collection
|
||||||
self.key = key
|
self.key = key
|
||||||
self.value = value
|
self.value = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def object(self):
|
def object(self):
|
||||||
"""Lazy-load the object referenced by ``self.key``. ``self.key``
|
"""Lazy-load the object referenced by ``self.key``. ``self.key``
|
||||||
@ -143,7 +143,7 @@ class MapReduceDocument(object):
|
|||||||
"""
|
"""
|
||||||
id_field = self._document()._meta['id_field']
|
id_field = self._document()._meta['id_field']
|
||||||
id_field_type = type(id_field)
|
id_field_type = type(id_field)
|
||||||
|
|
||||||
if not isinstance(self.key, id_field_type):
|
if not isinstance(self.key, id_field_type):
|
||||||
try:
|
try:
|
||||||
self.key = id_field_type(self.key)
|
self.key = id_field_type(self.key)
|
||||||
|
@ -16,11 +16,7 @@ __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
|||||||
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
||||||
'ObjectIdField', 'ReferenceField', 'ValidationError',
|
'ObjectIdField', 'ReferenceField', 'ValidationError',
|
||||||
'DecimalField', 'URLField', 'GenericReferenceField', 'FileField',
|
'DecimalField', 'URLField', 'GenericReferenceField', 'FileField',
|
||||||
<<<<<<< HEAD
|
|
||||||
'BinaryField', 'SortedListField', 'EmailField', 'GeoLocationField']
|
|
||||||
=======
|
|
||||||
'BinaryField', 'SortedListField', 'EmailField', 'GeoPointField']
|
'BinaryField', 'SortedListField', 'EmailField', 'GeoPointField']
|
||||||
>>>>>>> 32e66b29f44f3015be099851201241caee92054f
|
|
||||||
|
|
||||||
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
||||||
|
|
||||||
@ -42,7 +38,7 @@ class StringField(BaseField):
|
|||||||
|
|
||||||
if self.max_length is not None and len(value) > self.max_length:
|
if self.max_length is not None and len(value) > self.max_length:
|
||||||
raise ValidationError('String value is too long')
|
raise ValidationError('String value is too long')
|
||||||
|
|
||||||
if self.min_length is not None and len(value) < self.min_length:
|
if self.min_length is not None and len(value) < self.min_length:
|
||||||
raise ValidationError('String value is too short')
|
raise ValidationError('String value is too short')
|
||||||
|
|
||||||
@ -350,7 +346,8 @@ class SortedListField(ListField):
|
|||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
if self._ordering is not None:
|
if self._ordering is not None:
|
||||||
return sorted([self.field.to_mongo(item) for item in value], key=itemgetter(self._ordering))
|
return sorted([self.field.to_mongo(item) for item in value],
|
||||||
|
key=itemgetter(self._ordering))
|
||||||
return sorted([self.field.to_mongo(item) for item in value])
|
return sorted([self.field.to_mongo(item) for item in value])
|
||||||
|
|
||||||
class DictField(BaseField):
|
class DictField(BaseField):
|
||||||
@ -514,25 +511,17 @@ class BinaryField(BaseField):
|
|||||||
if self.max_bytes is not None and len(value) > self.max_bytes:
|
if self.max_bytes is not None and len(value) > self.max_bytes:
|
||||||
raise ValidationError('Binary value is too long')
|
raise ValidationError('Binary value is too long')
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
=======
|
|
||||||
|
|
||||||
>>>>>>> 32e66b29f44f3015be099851201241caee92054f
|
|
||||||
class GridFSProxy(object):
|
class GridFSProxy(object):
|
||||||
"""Proxy object to handle writing and reading of files to and from GridFS
|
"""Proxy object to handle writing and reading of files to and from GridFS
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
"""
|
"""
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
def __init__(self):
|
|
||||||
self.fs = gridfs.GridFS(_get_db()) # Filesystem instance
|
|
||||||
self.newfile = None # Used for partial writes
|
|
||||||
self.grid_id = None # Store GridFS id for file
|
|
||||||
=======
|
|
||||||
def __init__(self, grid_id=None):
|
def __init__(self, grid_id=None):
|
||||||
self.fs = gridfs.GridFS(_get_db()) # Filesystem instance
|
self.fs = gridfs.GridFS(_get_db()) # Filesystem instance
|
||||||
self.newfile = None # Used for partial writes
|
self.newfile = None # Used for partial writes
|
||||||
self.grid_id = grid_id # Store GridFS id for file
|
self.grid_id = grid_id # Store GridFS id for file
|
||||||
>>>>>>> 32e66b29f44f3015be099851201241caee92054f
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
obj = self.get()
|
obj = self.get()
|
||||||
@ -543,17 +532,13 @@ class GridFSProxy(object):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def get(self, id=None):
|
def get(self, id=None):
|
||||||
<<<<<<< HEAD
|
|
||||||
try: return self.fs.get(id or self.grid_id)
|
|
||||||
except: return None # File has been deleted
|
|
||||||
=======
|
|
||||||
if id:
|
if id:
|
||||||
self.grid_id = id
|
self.grid_id = id
|
||||||
try:
|
try:
|
||||||
return self.fs.get(id or self.grid_id)
|
return self.fs.get(id or self.grid_id)
|
||||||
except:
|
except:
|
||||||
return None # File has been deleted
|
# File has been deleted
|
||||||
>>>>>>> 32e66b29f44f3015be099851201241caee92054f
|
return None
|
||||||
|
|
||||||
def new_file(self, **kwargs):
|
def new_file(self, **kwargs):
|
||||||
self.newfile = self.fs.new_file(**kwargs)
|
self.newfile = self.fs.new_file(**kwargs)
|
||||||
@ -575,20 +560,19 @@ class GridFSProxy(object):
|
|||||||
self.newfile.writelines(lines)
|
self.newfile.writelines(lines)
|
||||||
|
|
||||||
def read(self):
|
def read(self):
|
||||||
<<<<<<< HEAD
|
|
||||||
try: return self.get().read()
|
|
||||||
except: return None
|
|
||||||
=======
|
|
||||||
try:
|
try:
|
||||||
return self.get().read()
|
return self.get().read()
|
||||||
except:
|
except:
|
||||||
return None
|
return None
|
||||||
>>>>>>> 32e66b29f44f3015be099851201241caee92054f
|
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
# Delete file from GridFS, FileField still remains
|
# Delete file from GridFS, FileField still remains
|
||||||
self.fs.delete(self.grid_id)
|
self.fs.delete(self.grid_id)
|
||||||
self.grid_id = None
|
|
||||||
|
#self.grid_id = None
|
||||||
|
# Doesn't make a difference because will be put back in when
|
||||||
|
# reinstantiated We should delete all the metadata stored with the
|
||||||
|
# file too
|
||||||
|
|
||||||
def replace(self, file, **kwargs):
|
def replace(self, file, **kwargs):
|
||||||
self.delete()
|
self.delete()
|
||||||
@ -601,41 +585,30 @@ class GridFSProxy(object):
|
|||||||
msg = "The close() method is only necessary after calling write()"
|
msg = "The close() method is only necessary after calling write()"
|
||||||
warnings.warn(msg)
|
warnings.warn(msg)
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
=======
|
|
||||||
|
|
||||||
>>>>>>> 32e66b29f44f3015be099851201241caee92054f
|
|
||||||
class FileField(BaseField):
|
class FileField(BaseField):
|
||||||
"""A GridFS storage field.
|
"""A GridFS storage field.
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
<<<<<<< HEAD
|
|
||||||
self.gridfs = GridFSProxy()
|
|
||||||
=======
|
|
||||||
>>>>>>> 32e66b29f44f3015be099851201241caee92054f
|
|
||||||
super(FileField, self).__init__(**kwargs)
|
super(FileField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance, owner):
|
||||||
if instance is None:
|
if instance is None:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
return self.gridfs
|
|
||||||
=======
|
|
||||||
# Check if a file already exists for this model
|
# Check if a file already exists for this model
|
||||||
grid_file = instance._data.get(self.name)
|
grid_file = instance._data.get(self.name)
|
||||||
if grid_file:
|
self.grid_file = grid_file
|
||||||
return grid_file
|
if self.grid_file:
|
||||||
|
return self.grid_file
|
||||||
return GridFSProxy()
|
return GridFSProxy()
|
||||||
>>>>>>> 32e66b29f44f3015be099851201241caee92054f
|
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
if isinstance(value, file) or isinstance(value, str):
|
if isinstance(value, file) or isinstance(value, str):
|
||||||
# using "FileField() = file/string" notation
|
# using "FileField() = file/string" notation
|
||||||
<<<<<<< HEAD
|
|
||||||
self.gridfs.put(value)
|
|
||||||
=======
|
|
||||||
grid_file = instance._data.get(self.name)
|
grid_file = instance._data.get(self.name)
|
||||||
# If a file already exists, delete it
|
# If a file already exists, delete it
|
||||||
if grid_file:
|
if grid_file:
|
||||||
@ -649,24 +622,11 @@ class FileField(BaseField):
|
|||||||
# Create a new proxy object as we don't already have one
|
# Create a new proxy object as we don't already have one
|
||||||
instance._data[self.name] = GridFSProxy()
|
instance._data[self.name] = GridFSProxy()
|
||||||
instance._data[self.name].put(value)
|
instance._data[self.name].put(value)
|
||||||
>>>>>>> 32e66b29f44f3015be099851201241caee92054f
|
|
||||||
else:
|
else:
|
||||||
instance._data[self.name] = value
|
instance._data[self.name] = value
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
# Store the GridFS file id in MongoDB
|
# Store the GridFS file id in MongoDB
|
||||||
<<<<<<< HEAD
|
|
||||||
return self.gridfs.grid_id
|
|
||||||
|
|
||||||
def to_python(self, value):
|
|
||||||
# Use stored value (id) to lookup file in GridFS
|
|
||||||
return self.gridfs.get()
|
|
||||||
|
|
||||||
def validate(self, value):
|
|
||||||
assert isinstance(value, GridFSProxy)
|
|
||||||
assert isinstance(value.grid_id, pymongo.objectid.ObjectId)
|
|
||||||
|
|
||||||
=======
|
|
||||||
if isinstance(value, GridFSProxy) and value.grid_id is not None:
|
if isinstance(value, GridFSProxy) and value.grid_id is not None:
|
||||||
return value.grid_id
|
return value.grid_id
|
||||||
return None
|
return None
|
||||||
@ -680,6 +640,7 @@ class FileField(BaseField):
|
|||||||
assert isinstance(value, GridFSProxy)
|
assert isinstance(value, GridFSProxy)
|
||||||
assert isinstance(value.grid_id, pymongo.objectid.ObjectId)
|
assert isinstance(value.grid_id, pymongo.objectid.ObjectId)
|
||||||
|
|
||||||
|
|
||||||
class GeoPointField(BaseField):
|
class GeoPointField(BaseField):
|
||||||
"""A list storing a latitude and longitude.
|
"""A list storing a latitude and longitude.
|
||||||
"""
|
"""
|
||||||
@ -692,10 +653,9 @@ class GeoPointField(BaseField):
|
|||||||
if not isinstance(value, (list, tuple)):
|
if not isinstance(value, (list, tuple)):
|
||||||
raise ValidationError('GeoPointField can only accept tuples or '
|
raise ValidationError('GeoPointField can only accept tuples or '
|
||||||
'lists of (x, y)')
|
'lists of (x, y)')
|
||||||
|
|
||||||
if not len(value) == 2:
|
if not len(value) == 2:
|
||||||
raise ValidationError('Value must be a two-dimensional point.')
|
raise ValidationError('Value must be a two-dimensional point.')
|
||||||
if (not isinstance(value[0], (float, int)) and
|
if (not isinstance(value[0], (float, int)) and
|
||||||
not isinstance(value[1], (float, int))):
|
not isinstance(value[1], (float, int))):
|
||||||
raise ValidationError('Both values in point must be float or int.')
|
raise ValidationError('Both values in point must be float or int.')
|
||||||
>>>>>>> 32e66b29f44f3015be099851201241caee92054f
|
|
||||||
|
@ -163,7 +163,7 @@ class QuerySet(object):
|
|||||||
self._where_clause = None
|
self._where_clause = None
|
||||||
self._loaded_fields = []
|
self._loaded_fields = []
|
||||||
self._ordering = []
|
self._ordering = []
|
||||||
|
|
||||||
# If inheritance is allowed, only return instances and instances of
|
# If inheritance is allowed, only return instances and instances of
|
||||||
# subclasses of the class being used
|
# subclasses of the class being used
|
||||||
if document._meta.get('allow_inheritance'):
|
if document._meta.get('allow_inheritance'):
|
||||||
@ -240,7 +240,7 @@ class QuerySet(object):
|
|||||||
"""An alias of :meth:`~mongoengine.queryset.QuerySet.__call__`
|
"""An alias of :meth:`~mongoengine.queryset.QuerySet.__call__`
|
||||||
"""
|
"""
|
||||||
return self.__call__(*q_objs, **query)
|
return self.__call__(*q_objs, **query)
|
||||||
|
|
||||||
def all(self):
|
def all(self):
|
||||||
"""Returns all documents."""
|
"""Returns all documents."""
|
||||||
return self.__call__()
|
return self.__call__()
|
||||||
@ -256,7 +256,7 @@ class QuerySet(object):
|
|||||||
background = self._document._meta.get('index_background', False)
|
background = self._document._meta.get('index_background', False)
|
||||||
drop_dups = self._document._meta.get('index_drop_dups', False)
|
drop_dups = self._document._meta.get('index_drop_dups', False)
|
||||||
index_opts = self._document._meta.get('index_options', {})
|
index_opts = self._document._meta.get('index_options', {})
|
||||||
|
|
||||||
# Ensure document-defined indexes are created
|
# Ensure document-defined indexes are created
|
||||||
if self._document._meta['indexes']:
|
if self._document._meta['indexes']:
|
||||||
for key_or_list in self._document._meta['indexes']:
|
for key_or_list in self._document._meta['indexes']:
|
||||||
@ -267,12 +267,12 @@ class QuerySet(object):
|
|||||||
for index in self._document._meta['unique_indexes']:
|
for index in self._document._meta['unique_indexes']:
|
||||||
self._collection.ensure_index(index, unique=True,
|
self._collection.ensure_index(index, unique=True,
|
||||||
background=background, drop_dups=drop_dups, **index_opts)
|
background=background, drop_dups=drop_dups, **index_opts)
|
||||||
|
|
||||||
# If _types is being used (for polymorphism), it needs an index
|
# If _types is being used (for polymorphism), it needs an index
|
||||||
if '_types' in self._query:
|
if '_types' in self._query:
|
||||||
self._collection.ensure_index('_types',
|
self._collection.ensure_index('_types',
|
||||||
background=background, **index_opts)
|
background=background, **index_opts)
|
||||||
|
|
||||||
# Ensure all needed field indexes are created
|
# Ensure all needed field indexes are created
|
||||||
for field in self._document._fields.values():
|
for field in self._document._fields.values():
|
||||||
if field.__class__._geo_index:
|
if field.__class__._geo_index:
|
||||||
@ -471,7 +471,7 @@ class QuerySet(object):
|
|||||||
|
|
||||||
def in_bulk(self, object_ids):
|
def in_bulk(self, object_ids):
|
||||||
"""Retrieve a set of documents by their ids.
|
"""Retrieve a set of documents by their ids.
|
||||||
|
|
||||||
:param object_ids: a list or tuple of ``ObjectId``\ s
|
:param object_ids: a list or tuple of ``ObjectId``\ s
|
||||||
:rtype: dict of ObjectIds as keys and collection-specific
|
:rtype: dict of ObjectIds as keys and collection-specific
|
||||||
Document subclasses as values.
|
Document subclasses as values.
|
||||||
@ -483,7 +483,7 @@ class QuerySet(object):
|
|||||||
docs = self._collection.find({'_id': {'$in': object_ids}})
|
docs = self._collection.find({'_id': {'$in': object_ids}})
|
||||||
for doc in docs:
|
for doc in docs:
|
||||||
doc_map[doc['_id']] = self._document._from_son(doc)
|
doc_map[doc['_id']] = self._document._from_son(doc)
|
||||||
|
|
||||||
return doc_map
|
return doc_map
|
||||||
|
|
||||||
def next(self):
|
def next(self):
|
||||||
@ -637,7 +637,7 @@ class QuerySet(object):
|
|||||||
# Integer index provided
|
# Integer index provided
|
||||||
elif isinstance(key, int):
|
elif isinstance(key, int):
|
||||||
return self._document._from_son(self._cursor[key])
|
return self._document._from_son(self._cursor[key])
|
||||||
|
|
||||||
def distinct(self, field):
|
def distinct(self, field):
|
||||||
"""Return a list of distinct values for a given field.
|
"""Return a list of distinct values for a given field.
|
||||||
|
|
||||||
@ -649,9 +649,9 @@ class QuerySet(object):
|
|||||||
|
|
||||||
def only(self, *fields):
|
def only(self, *fields):
|
||||||
"""Load only a subset of this document's fields. ::
|
"""Load only a subset of this document's fields. ::
|
||||||
|
|
||||||
post = BlogPost.objects(...).only("title")
|
post = BlogPost.objects(...).only("title")
|
||||||
|
|
||||||
:param fields: fields to include
|
:param fields: fields to include
|
||||||
|
|
||||||
.. versionadded:: 0.3
|
.. versionadded:: 0.3
|
||||||
|
44
tests/connnection.py
Normal file
44
tests/connnection.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
import unittest
|
||||||
|
import datetime
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
import mongoengine.connection
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import _get_db, _get_connection
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectionTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
mongoengine.connection._connection_settings = {}
|
||||||
|
mongoengine.connection._connections = {}
|
||||||
|
mongoengine.connection._dbs = {}
|
||||||
|
|
||||||
|
def test_connect(self):
|
||||||
|
"""Ensure that the connect() method works properly.
|
||||||
|
"""
|
||||||
|
connect('mongoenginetest')
|
||||||
|
|
||||||
|
conn = _get_connection()
|
||||||
|
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
||||||
|
|
||||||
|
db = _get_db()
|
||||||
|
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||||
|
self.assertEqual(db.name, 'mongoenginetest')
|
||||||
|
|
||||||
|
def test_register_connection(self):
|
||||||
|
"""Ensure that connections with different aliases may be registered.
|
||||||
|
"""
|
||||||
|
register_connection('testdb', 'mongoenginetest2')
|
||||||
|
|
||||||
|
self.assertRaises(ConnectionError, _get_connection)
|
||||||
|
conn = _get_connection('testdb')
|
||||||
|
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
||||||
|
|
||||||
|
db = _get_db('testdb')
|
||||||
|
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||||
|
self.assertEqual(db.name, 'mongoenginetest2')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -693,7 +693,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
testfile.name = "Hello, World!"
|
testfile.name = "Hello, World!"
|
||||||
testfile.file.put('Hello, World!')
|
testfile.file.put('Hello, World!')
|
||||||
testfile.save()
|
testfile.save()
|
||||||
|
|
||||||
# Second instance
|
# Second instance
|
||||||
testfiledupe = TestFile()
|
testfiledupe = TestFile()
|
||||||
data = testfiledupe.file.read() # Should be None
|
data = testfiledupe.file.read() # Should be None
|
||||||
|
Loading…
x
Reference in New Issue
Block a user