Compare commits
80 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
233b13d670 | ||
|
5bcbb4fdaa | ||
|
dbe2f5f2b8 | ||
|
ca8b58d66d | ||
|
f80f0b416f | ||
|
d7765511ee | ||
|
0240a09056 | ||
|
ab15c4eec9 | ||
|
4ce1ba81a6 | ||
|
530440b333 | ||
|
b80fda36af | ||
|
42d24263ef | ||
|
1e2797e7ce | ||
|
f7075766fc | ||
|
5647ca70bb | ||
|
2b8aa6bafc | ||
|
410443471c | ||
|
0bb9781b91 | ||
|
2769d6d7ca | ||
|
120b9433c2 | ||
|
605092bd88 | ||
|
a4a8c94374 | ||
|
0e93f6c0db | ||
|
aa2add39ad | ||
|
a928047147 | ||
|
c474ca0f13 | ||
|
88dc64653e | ||
|
5f4b70f3a9 | ||
|
51b429e5b0 | ||
|
360624eb6e | ||
|
d9d2291837 | ||
|
cbdf816232 | ||
|
2d71eb8a18 | ||
|
64d2532ce9 | ||
|
0376910f33 | ||
|
6d503119a1 | ||
|
bfae93e57e | ||
|
49a66ba81a | ||
|
a1d43fecd9 | ||
|
d0e42a4798 | ||
|
2a34358abc | ||
|
fd2bb8ea45 | ||
|
98e5daa0e0 | ||
|
ad2e119282 | ||
|
c20c30d8d1 | ||
|
66d215c9c1 | ||
|
46e088d379 | ||
|
bbdd15161a | ||
|
ea9dc8cfb8 | ||
|
6bd2ccc9bf | ||
|
56327c6b58 | ||
|
712e8a51e4 | ||
|
421f324f9e | ||
|
8fe4a70299 | ||
|
3af6d0dbfd | ||
|
e2bef076d3 | ||
|
1bf9f28f4b | ||
|
f1e7b97a93 | ||
|
8cfe13ad90 | ||
|
0f420abc8e | ||
|
3b5b715567 | ||
|
520051af25 | ||
|
7e376b40bb | ||
|
fd18a48608 | ||
|
64860c6287 | ||
|
58635b24ba | ||
|
3ec9dfc108 | ||
|
bd1572f11a | ||
|
540a0cc59c | ||
|
83eb4f6b16 | ||
|
95c58bd793 | ||
|
65591c7727 | ||
|
737cbf5f60 | ||
|
4c67cbb4b7 | ||
|
ed2cc2a60b | ||
|
859e9b3cc4 | ||
|
c34e79fad9 | ||
|
61411bb259 | ||
|
fcdb0eff8f | ||
|
30d9347272 |
6
AUTHORS
6
AUTHORS
@@ -97,3 +97,9 @@ that much better:
|
||||
* Shalabh Aggarwal
|
||||
* Chris Williams
|
||||
* Robert Kajic
|
||||
* Jacob Peddicord
|
||||
* Nils Hasenbanck
|
||||
* mostlystatic
|
||||
* Greg Banks
|
||||
* swashbuckler
|
||||
* Adam Reeve
|
@@ -31,6 +31,9 @@ Documents
|
||||
.. autoclass:: mongoengine.document.MapReduceDocument
|
||||
:members:
|
||||
|
||||
.. autoclass:: mongoengine.ValidationError
|
||||
:members:
|
||||
|
||||
Querying
|
||||
========
|
||||
|
||||
|
@@ -2,8 +2,52 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
Changes in 0.6.7
|
||||
================
|
||||
- Fixed indexing on '_id' or 'pk' or 'id'
|
||||
- Invalid data from the DB now raises a InvalidDocumentError
|
||||
- Cleaned up the Validation Error - docs and code
|
||||
- Added meta `auto_create_index` so you can disable index creation
|
||||
- Added write concern options to inserts
|
||||
- Fixed typo in meta for index options
|
||||
- Bug fix Read preference now passed correctly
|
||||
- Added support for File like objects for GridFS
|
||||
- Fix for #473 - Dereferencing abstracts
|
||||
|
||||
Changes in 0.6.6
|
||||
================
|
||||
- Django 1.4 fixed (finally)
|
||||
- Added tests for Django
|
||||
|
||||
Changes in 0.6.5
|
||||
================
|
||||
- More Django updates
|
||||
|
||||
Changes in 0.6.4
|
||||
================
|
||||
|
||||
- Refactored connection / fixed replicasetconnection
|
||||
- Bug fix for unknown connection alias error message
|
||||
- Sessions support Django 1.3 and Django 1.4
|
||||
- Minor fix for ReferenceField
|
||||
|
||||
Changes in 0.6.3
|
||||
================
|
||||
- Updated sessions for Django 1.4
|
||||
- Bug fix for updates where listfields contain embedded documents
|
||||
- Bug fix for collection naming and mixins
|
||||
|
||||
Changes in 0.6.2
|
||||
================
|
||||
- Updated documentation for ReplicaSet connections
|
||||
- Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems.
|
||||
|
||||
Changes in 0.6.1
|
||||
================
|
||||
- Fix for replicaSet connections
|
||||
|
||||
Changes in 0.6
|
||||
==============
|
||||
================
|
||||
|
||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||
- Added support for covered indexes when inheritance is off
|
||||
|
@@ -2,19 +2,21 @@
|
||||
Using MongoEngine with Django
|
||||
=============================
|
||||
|
||||
.. note :: Updated to support Django 1.4
|
||||
|
||||
Connecting
|
||||
==========
|
||||
In your **settings.py** file, ignore the standard database settings (unless you
|
||||
also plan to use the ORM in your project), and instead call
|
||||
also plan to use the ORM in your project), and instead call
|
||||
:func:`~mongoengine.connect` somewhere in the settings module.
|
||||
|
||||
Authentication
|
||||
==============
|
||||
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
||||
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
||||
:class:`~mongoengine.Document`, but implements most of the methods and
|
||||
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
||||
:class:`~mongoengine.Document`, but implements most of the methods and
|
||||
attributes that the standard Django :class:`User` model does - so the two are
|
||||
moderately compatible. Using this backend will allow you to store users in
|
||||
moderately compatible. Using this backend will allow you to store users in
|
||||
MongoDB but still use many of the Django authentication infrastucture (such as
|
||||
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
||||
enable the MongoEngine auth backend, add the following to you **settings.py**
|
||||
@@ -24,7 +26,7 @@ file::
|
||||
'mongoengine.django.auth.MongoEngineBackend',
|
||||
)
|
||||
|
||||
The :mod:`~mongoengine.django.auth` module also contains a
|
||||
The :mod:`~mongoengine.django.auth` module also contains a
|
||||
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
|
||||
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
|
||||
|
||||
@@ -49,9 +51,9 @@ Storage
|
||||
=======
|
||||
With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`,
|
||||
it is useful to have a Django file storage backend that wraps this. The new
|
||||
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
||||
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
||||
Using it is very similar to using the default FileSystemStorage.::
|
||||
|
||||
|
||||
from mongoengine.django.storage import GridFSStorage
|
||||
fs = GridFSStorage()
|
||||
|
||||
|
@@ -26,7 +26,12 @@ name - just supply the uri as the :attr:`host` to
|
||||
|
||||
connect('project1', host='mongodb://localhost/database_name')
|
||||
|
||||
ReplicaSets
|
||||
===========
|
||||
|
||||
MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection`
|
||||
to use them please use a URI style connection and provide the `replicaSet` name in the
|
||||
connection kwargs.
|
||||
|
||||
Multiple Databases
|
||||
==================
|
||||
|
@@ -98,7 +98,7 @@ arguments can be set on all fields:
|
||||
|
||||
:attr:`required` (Default: False)
|
||||
If set to True and the field is not set on the document instance, a
|
||||
:class:`~mongoengine.base.ValidationError` will be raised when the document is
|
||||
:class:`~mongoengine.ValidationError` will be raised when the document is
|
||||
validated.
|
||||
|
||||
:attr:`default` (Default: None)
|
||||
|
@@ -91,5 +91,5 @@ is an alias to :attr:`id`::
|
||||
.. note::
|
||||
|
||||
If you define your own primary key field, the field implicitly becomes
|
||||
required, so a :class:`ValidationError` will be thrown if you don't provide
|
||||
it.
|
||||
required, so a :class:`~mongoengine.ValidationError` will be thrown if
|
||||
you don't provide it.
|
||||
|
@@ -5,15 +5,13 @@ Signals
|
||||
|
||||
.. versionadded:: 0.5
|
||||
|
||||
Signal support is provided by the excellent `blinker`_ library and
|
||||
will gracefully fall back if it is not available.
|
||||
.. note::
|
||||
|
||||
Signal support is provided by the excellent `blinker`_ library and
|
||||
will gracefully fall back if it is not available.
|
||||
|
||||
|
||||
<<<<<<< HEAD
|
||||
The following document signals exist in MongoEngine and are pretty self explanatory:
|
||||
=======
|
||||
The following document signals exist in MongoEngine and are pretty self-explanatory:
|
||||
>>>>>>> master
|
||||
|
||||
* `mongoengine.signals.pre_init`
|
||||
* `mongoengine.signals.post_init`
|
||||
|
@@ -18,6 +18,8 @@ Document.objects.with_id - now raises an InvalidQueryError if used with a filter
|
||||
FutureWarning - A future warning has been added to all inherited classes that
|
||||
don't define `allow_inheritance` in their meta.
|
||||
|
||||
You may need to update pyMongo to 2.0 for use with Sharding.
|
||||
|
||||
0.4 to 0.5
|
||||
===========
|
||||
|
||||
@@ -74,7 +76,7 @@ To upgrade use a Mixin class to set meta like so ::
|
||||
class MyAceDocument(Document, BaseMixin):
|
||||
pass
|
||||
|
||||
MyAceDocument._get_collection_name() == myacedocument
|
||||
MyAceDocument._get_collection_name() == "myacedocument"
|
||||
|
||||
Alternatively, you can rename your collections eg ::
|
||||
|
||||
|
@@ -12,7 +12,7 @@ from signals import *
|
||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||
queryset.__all__ + signals.__all__)
|
||||
|
||||
VERSION = (0, 6, 0)
|
||||
VERSION = (0, 6, 7)
|
||||
|
||||
|
||||
def get_version():
|
||||
|
@@ -25,7 +25,15 @@ class InvalidDocumentError(Exception):
|
||||
|
||||
class ValidationError(AssertionError):
|
||||
"""Validation exception.
|
||||
|
||||
May represent an error validating a field or a
|
||||
document containing fields with validation errors.
|
||||
|
||||
:ivar errors: A dictionary of errors for fields within this
|
||||
document or list, or None if the error is for an
|
||||
individual field.
|
||||
"""
|
||||
|
||||
errors = {}
|
||||
field_name = None
|
||||
_message = None
|
||||
@@ -43,10 +51,12 @@ class ValidationError(AssertionError):
|
||||
|
||||
def __getattribute__(self, name):
|
||||
message = super(ValidationError, self).__getattribute__(name)
|
||||
if name == 'message' and self.field_name:
|
||||
return message + ' ("%s")' % self.field_name
|
||||
else:
|
||||
return message
|
||||
if name == 'message':
|
||||
if self.field_name:
|
||||
message = '%s ("%s")' % (message, self.field_name)
|
||||
if self.errors:
|
||||
message = '%s:\n%s' % (message, self._format_errors())
|
||||
return message
|
||||
|
||||
def _get_message(self):
|
||||
return self._message
|
||||
@@ -57,6 +67,13 @@ class ValidationError(AssertionError):
|
||||
message = property(_get_message, _set_message)
|
||||
|
||||
def to_dict(self):
|
||||
"""Returns a dictionary of all errors within a document
|
||||
|
||||
Keys are field names or list indices and values are the
|
||||
validation error messages, or a nested dictionary of
|
||||
errors for an embedded document or list.
|
||||
"""
|
||||
|
||||
def build_dict(source):
|
||||
errors_dict = {}
|
||||
if not source:
|
||||
@@ -73,6 +90,21 @@ class ValidationError(AssertionError):
|
||||
return {}
|
||||
return build_dict(self.errors)
|
||||
|
||||
def _format_errors(self):
|
||||
"""Returns a string listing all errors within a document"""
|
||||
|
||||
def format_error(field, value, prefix=''):
|
||||
prefix = "%s.%s" % (prefix, field) if prefix else "%s" % field
|
||||
if isinstance(value, dict):
|
||||
|
||||
return '\n'.join(
|
||||
[format_error(k, value[k], prefix) for k in value])
|
||||
else:
|
||||
return "%s: %s" % (prefix, value)
|
||||
|
||||
return '\n'.join(
|
||||
[format_error(k, v) for k, v in self.to_dict().items()])
|
||||
|
||||
|
||||
_document_registry = {}
|
||||
|
||||
@@ -478,13 +510,18 @@ class DocumentMetaclass(type):
|
||||
attrs.update(dict([(k, v) for k, v in base.__dict__.items()
|
||||
if issubclass(v.__class__, BaseField)]))
|
||||
|
||||
# Handle simple mixin's with meta
|
||||
if hasattr(base, 'meta') and not isinstance(base, DocumentMetaclass):
|
||||
meta = attrs.get('meta', {})
|
||||
meta.update(base.meta)
|
||||
attrs['meta'] = meta
|
||||
|
||||
for p_base in base.__bases__:
|
||||
#optimize :-)
|
||||
if p_base in (object, BaseDocument):
|
||||
continue
|
||||
|
||||
attrs.update(_get_mixin_fields(p_base))
|
||||
|
||||
return attrs
|
||||
|
||||
metaclass = attrs.get('__metaclass__')
|
||||
@@ -498,6 +535,7 @@ class DocumentMetaclass(type):
|
||||
simple_class = True
|
||||
|
||||
for base in bases:
|
||||
|
||||
# Include all fields present in superclasses
|
||||
if hasattr(base, '_fields'):
|
||||
doc_fields.update(base._fields)
|
||||
@@ -526,7 +564,8 @@ class DocumentMetaclass(type):
|
||||
simple_class = False
|
||||
|
||||
doc_class_name = '.'.join(reversed(class_name))
|
||||
meta = attrs.get('_meta', attrs.get('meta', {}))
|
||||
meta = attrs.get('_meta', {})
|
||||
meta.update(attrs.get('meta', {}))
|
||||
|
||||
if 'allow_inheritance' not in meta:
|
||||
meta['allow_inheritance'] = True
|
||||
@@ -940,8 +979,8 @@ class BaseDocument(object):
|
||||
"""
|
||||
# get the class name from the document, falling back to the given
|
||||
# class if unavailable
|
||||
class_name = son.get(u'_cls', cls._class_name)
|
||||
data = dict((str(key), value) for key, value in son.items())
|
||||
class_name = son.get('_cls', cls._class_name)
|
||||
data = dict(("%s" % key, value) for key, value in son.items())
|
||||
|
||||
if '_types' in data:
|
||||
del data['_types']
|
||||
@@ -954,11 +993,16 @@ class BaseDocument(object):
|
||||
cls = get_document(class_name)
|
||||
|
||||
changed_fields = []
|
||||
errors_dict = {}
|
||||
|
||||
for field_name, field in cls._fields.items():
|
||||
if field.db_field in data:
|
||||
value = data[field.db_field]
|
||||
data[field_name] = (value if value is None
|
||||
try:
|
||||
data[field_name] = (value if value is None
|
||||
else field.to_python(value))
|
||||
except (AttributeError, ValueError), e:
|
||||
errors_dict[field_name] = e
|
||||
elif field.default:
|
||||
default = field.default
|
||||
if callable(default):
|
||||
@@ -966,7 +1010,13 @@ class BaseDocument(object):
|
||||
if isinstance(default, BaseDocument):
|
||||
changed_fields.append(field_name)
|
||||
|
||||
if errors_dict:
|
||||
errors = "\n".join(["%s - %s" % (k, v) for k, v in errors_dict.items()])
|
||||
raise InvalidDocumentError("""
|
||||
Invalid data to create a `%s` instance.\n%s""".strip() % (cls._class_name, errors))
|
||||
|
||||
obj = cls(**data)
|
||||
|
||||
obj._changed_fields = changed_fields
|
||||
obj._created = False
|
||||
return obj
|
||||
|
@@ -39,22 +39,7 @@ def register_connection(alias, name, host='localhost', port=27017,
|
||||
"""
|
||||
global _connection_settings
|
||||
|
||||
# Handle uri style connections
|
||||
if "://" in host:
|
||||
uri_dict = uri_parser.parse_uri(host)
|
||||
if uri_dict.get('database') is None:
|
||||
raise ConnectionError("If using URI style connection include "\
|
||||
"database name in string")
|
||||
_connection_settings[alias] = {
|
||||
'host': host,
|
||||
'name': uri_dict.get('database'),
|
||||
'username': uri_dict.get('username'),
|
||||
'password': uri_dict.get('password')
|
||||
}
|
||||
_connection_settings[alias].update(kwargs)
|
||||
return
|
||||
|
||||
_connection_settings[alias] = {
|
||||
conn_settings = {
|
||||
'name': name,
|
||||
'host': host,
|
||||
'port': port,
|
||||
@@ -64,7 +49,23 @@ def register_connection(alias, name, host='localhost', port=27017,
|
||||
'password': password,
|
||||
'read_preference': read_preference
|
||||
}
|
||||
_connection_settings[alias].update(kwargs)
|
||||
|
||||
# Handle uri style connections
|
||||
if "://" in host:
|
||||
uri_dict = uri_parser.parse_uri(host)
|
||||
if uri_dict.get('database') is None:
|
||||
raise ConnectionError("If using URI style connection include "\
|
||||
"database name in string")
|
||||
conn_settings.update({
|
||||
'host': host,
|
||||
'name': uri_dict.get('database'),
|
||||
'username': uri_dict.get('username'),
|
||||
'password': uri_dict.get('password'),
|
||||
'read_preference': read_preference,
|
||||
})
|
||||
if "replicaSet" in host:
|
||||
conn_settings['replicaSet'] = True
|
||||
_connection_settings[alias] = conn_settings
|
||||
|
||||
|
||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||
@@ -86,7 +87,7 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
|
||||
if alias not in _connections:
|
||||
if alias not in _connection_settings:
|
||||
msg = 'Connection with alias "%s" has not been defined'
|
||||
msg = 'Connection with alias "%s" has not been defined' % alias
|
||||
if alias == DEFAULT_CONNECTION_NAME:
|
||||
msg = 'You have not defined a default connection'
|
||||
raise ConnectionError(msg)
|
||||
@@ -105,11 +106,18 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
for slave_alias in conn_settings['slaves']:
|
||||
slaves.append(get_connection(slave_alias))
|
||||
conn_settings['slaves'] = slaves
|
||||
conn_settings.pop('read_preference')
|
||||
conn_settings.pop('read_preference', None)
|
||||
|
||||
connection_class = Connection
|
||||
if 'replicaSet' in conn_settings:
|
||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||
# Discard port since it can't be used on ReplicaSetConnection
|
||||
conn_settings.pop('port', None)
|
||||
# Discard replicaSet if not base string
|
||||
if not isinstance(conn_settings['replicaSet'], basestring):
|
||||
conn_settings.pop('replicaSet', None)
|
||||
connection_class = ReplicaSetConnection
|
||||
|
||||
try:
|
||||
_connections[alias] = connection_class(**conn_settings)
|
||||
except Exception, e:
|
||||
|
@@ -112,6 +112,10 @@ class DeReference(object):
|
||||
for ref in references:
|
||||
if '_cls' in ref:
|
||||
doc = get_document(ref["_cls"])._from_son(ref)
|
||||
elif doc_type is None:
|
||||
doc = get_document(
|
||||
''.join(x.capitalize()
|
||||
for x in col.split('_')))._from_son(ref)
|
||||
else:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
|
@@ -1,23 +1,39 @@
|
||||
import datetime
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||
from django.utils.encoding import smart_str
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
import datetime
|
||||
try:
|
||||
from django.contrib.auth.hashers import check_password, make_password
|
||||
except ImportError:
|
||||
"""Handle older versions of Django"""
|
||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||
|
||||
def get_hexdigest(algorithm, salt, raw_password):
|
||||
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||
if algorithm == 'md5':
|
||||
return md5_constructor(salt + raw_password).hexdigest()
|
||||
elif algorithm == 'sha1':
|
||||
return sha_constructor(salt + raw_password).hexdigest()
|
||||
raise ValueError('Got unknown password algorithm type in password')
|
||||
|
||||
def check_password(raw_password, password):
|
||||
algo, salt, hash = password.split('$')
|
||||
return hash == get_hexdigest(algo, salt, raw_password)
|
||||
|
||||
def make_password(raw_password):
|
||||
from random import random
|
||||
algo = 'sha1'
|
||||
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||
hash = get_hexdigest(algo, salt, raw_password)
|
||||
return '%s$%s$%s' % (algo, salt, hash)
|
||||
|
||||
|
||||
REDIRECT_FIELD_NAME = 'next'
|
||||
|
||||
def get_hexdigest(algorithm, salt, raw_password):
|
||||
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||
if algorithm == 'md5':
|
||||
return md5_constructor(salt + raw_password).hexdigest()
|
||||
elif algorithm == 'sha1':
|
||||
return sha_constructor(salt + raw_password).hexdigest()
|
||||
raise ValueError('Got unknown password algorithm type in password')
|
||||
|
||||
|
||||
class User(Document):
|
||||
"""A User document that aims to mirror most of the API specified by Django
|
||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||
@@ -34,7 +50,7 @@ class User(Document):
|
||||
email = EmailField(verbose_name=_('e-mail address'))
|
||||
password = StringField(max_length=128,
|
||||
verbose_name=_('password'),
|
||||
help_text=_("Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
||||
help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
||||
is_staff = BooleanField(default=False,
|
||||
verbose_name=_('staff status'),
|
||||
help_text=_("Designates whether the user can log into this admin site."))
|
||||
@@ -75,11 +91,7 @@ class User(Document):
|
||||
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
||||
password is hashed before storage.
|
||||
"""
|
||||
from random import random
|
||||
algo = 'sha1'
|
||||
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||
hash = get_hexdigest(algo, salt, raw_password)
|
||||
self.password = '%s$%s$%s' % (algo, salt, hash)
|
||||
self.password = make_password(raw_password)
|
||||
self.save()
|
||||
return self
|
||||
|
||||
@@ -89,8 +101,7 @@ class User(Document):
|
||||
:attr:`~mongoengine.django.auth.User.password` as the password is
|
||||
hashed before storage.
|
||||
"""
|
||||
algo, salt, hash = self.password.split('$')
|
||||
return hash == get_hexdigest(algo, salt, raw_password)
|
||||
return check_password(raw_password, self.password)
|
||||
|
||||
@classmethod
|
||||
def create_user(cls, username, password, email=None):
|
||||
|
@@ -1,3 +1,6 @@
|
||||
from datetime import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
from django.utils.encoding import force_unicode
|
||||
@@ -6,18 +9,18 @@ from mongoengine.document import Document
|
||||
from mongoengine import fields
|
||||
from mongoengine.queryset import OperationError
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
||||
from django.conf import settings
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
||||
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
||||
DEFAULT_CONNECTION_NAME)
|
||||
|
||||
|
||||
class MongoSession(Document):
|
||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||
session_data = fields.StringField()
|
||||
expire_date = fields.DateTimeField()
|
||||
|
||||
|
||||
meta = {'collection': 'django_session',
|
||||
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
||||
'allow_inheritance': False}
|
||||
@@ -41,7 +44,7 @@ class SessionStore(SessionBase):
|
||||
|
||||
def create(self):
|
||||
while True:
|
||||
self.session_key = self._get_new_session_key()
|
||||
self._session_key = self._get_new_session_key()
|
||||
try:
|
||||
self.save(must_create=True)
|
||||
except CreateError:
|
||||
@@ -51,6 +54,8 @@ class SessionStore(SessionBase):
|
||||
return
|
||||
|
||||
def save(self, must_create=False):
|
||||
if self.session_key is None:
|
||||
self.create()
|
||||
s = MongoSession(session_key=self.session_key)
|
||||
s.session_data = self.encode(self._get_session(no_load=must_create))
|
||||
s.expire_date = self.get_expiry_date()
|
||||
|
@@ -74,6 +74,12 @@ class Document(BaseDocument):
|
||||
names. Index direction may be specified by prefixing the field names with
|
||||
a **+** or **-** sign.
|
||||
|
||||
Automatic index creation can be disabled by specifying
|
||||
attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
|
||||
False then indexes will not be created by MongoEngine. This is useful in
|
||||
production systems where index creation is performed as part of a deployment
|
||||
system.
|
||||
|
||||
By default, _types will be added to the start of every index (that
|
||||
doesn't contain a list) if allow_inheritence is True. This can be
|
||||
disabled by either setting types to False on the specific index or
|
||||
@@ -147,8 +153,9 @@ class Document(BaseDocument):
|
||||
:meth:`~pymongo.collection.Collection.save` OR
|
||||
:meth:`~pymongo.collection.Collection.insert`
|
||||
which will be used as options for the resultant ``getLastError`` command.
|
||||
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
|
||||
have recorded the write and will force an fsync on each server being written to.
|
||||
For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will
|
||||
wait until at least two servers have recorded the write and will force an
|
||||
fsync on each server being written to.
|
||||
:param cascade: Sets the flag for cascading saves. You can set a default by setting
|
||||
"cascade" in the document __meta__
|
||||
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
|
||||
|
@@ -8,7 +8,7 @@ import uuid
|
||||
from bson import Binary, DBRef, SON, ObjectId
|
||||
|
||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||
ValidationError, get_document)
|
||||
ValidationError, get_document, BaseDocument)
|
||||
from queryset import DO_NOTHING, QuerySet
|
||||
from document import Document, EmbeddedDocument
|
||||
from connection import get_db, DEFAULT_CONNECTION_NAME
|
||||
@@ -497,6 +497,7 @@ class ListField(ComplexBaseField):
|
||||
def prepare_query_value(self, op, value):
|
||||
if self.field:
|
||||
if op in ('set', 'unset') and (not isinstance(value, basestring)
|
||||
and not isinstance(value, BaseDocument)
|
||||
and hasattr(value, '__iter__')):
|
||||
return [self.field.prepare_query_value(op, v) for v in value]
|
||||
return self.field.prepare_query_value(op, value)
|
||||
@@ -656,6 +657,9 @@ class ReferenceField(BaseField):
|
||||
return super(ReferenceField, self).__get__(instance, owner)
|
||||
|
||||
def to_mongo(self, document):
|
||||
if isinstance(document, DBRef):
|
||||
return document
|
||||
|
||||
id_field_name = self.document_type._meta['id_field']
|
||||
id_field = self.document_type._fields[id_field_name]
|
||||
|
||||
@@ -871,10 +875,14 @@ class GridFSProxy(object):
|
||||
self.newfile.writelines(lines)
|
||||
|
||||
def read(self, size=-1):
|
||||
try:
|
||||
return self.get().read(size)
|
||||
except:
|
||||
gridout = self.get()
|
||||
if gridout is None:
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return gridout.read(size)
|
||||
except:
|
||||
return ""
|
||||
|
||||
def delete(self):
|
||||
# Delete file from GridFS, FileField still remains
|
||||
@@ -931,7 +939,7 @@ class FileField(BaseField):
|
||||
|
||||
def __set__(self, instance, value):
|
||||
key = self.name
|
||||
if isinstance(value, file) or isinstance(value, str):
|
||||
if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, str):
|
||||
# using "FileField() = file/string" notation
|
||||
grid_file = instance._data.get(self.name)
|
||||
# If a file already exists, delete it
|
||||
|
@@ -394,61 +394,6 @@ class QuerySet(object):
|
||||
unique=index_spec.get('unique', False))
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def _build_index_spec(cls, doc_cls, spec):
|
||||
"""Build a PyMongo index spec from a MongoEngine index spec.
|
||||
"""
|
||||
if isinstance(spec, basestring):
|
||||
spec = {'fields': [spec]}
|
||||
if isinstance(spec, (list, tuple)):
|
||||
spec = {'fields': spec}
|
||||
|
||||
index_list = []
|
||||
use_types = doc_cls._meta.get('allow_inheritance', True)
|
||||
for key in spec['fields']:
|
||||
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
|
||||
direction = pymongo.ASCENDING
|
||||
if key.startswith("-"):
|
||||
direction = pymongo.DESCENDING
|
||||
elif key.startswith("*"):
|
||||
direction = pymongo.GEO2D
|
||||
if key.startswith(("+", "-", "*")):
|
||||
key = key[1:]
|
||||
|
||||
# Use real field name, do it manually because we need field
|
||||
# objects for the next part (list field checking)
|
||||
parts = key.split('.')
|
||||
fields = QuerySet._lookup_field(doc_cls, parts)
|
||||
parts = [field.db_field for field in fields]
|
||||
key = '.'.join(parts)
|
||||
index_list.append((key, direction))
|
||||
|
||||
# Check if a list field is being used, don't use _types if it is
|
||||
if use_types and not all(f._index_with_types for f in fields):
|
||||
use_types = False
|
||||
|
||||
# If _types is being used, prepend it to every specified index
|
||||
index_types = doc_cls._meta.get('index_types', True)
|
||||
allow_inheritance = doc_cls._meta.get('allow_inheritance')
|
||||
if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D:
|
||||
index_list.insert(0, ('_types', 1))
|
||||
|
||||
spec['fields'] = index_list
|
||||
|
||||
if spec.get('sparse', False) and len(spec['fields']) > 1:
|
||||
raise ValueError(
|
||||
'Sparse indexes can only have one field in them. '
|
||||
'See https://jira.mongodb.org/browse/SERVER-2193')
|
||||
|
||||
return spec
|
||||
|
||||
@classmethod
|
||||
def _reset_already_indexed(cls, document=None):
|
||||
"""Helper to reset already indexed, can be useful for testing purposes"""
|
||||
if document:
|
||||
cls.__already_indexed.discard(document)
|
||||
cls.__already_indexed.clear()
|
||||
|
||||
def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query):
|
||||
"""Filter the selected documents by calling the
|
||||
:class:`~mongoengine.queryset.QuerySet` with a query.
|
||||
@@ -481,13 +426,124 @@ class QuerySet(object):
|
||||
"""Returns all documents."""
|
||||
return self.__call__()
|
||||
|
||||
def _ensure_indexes(self):
|
||||
"""Checks the document meta data and ensures all the indexes exist.
|
||||
|
||||
.. note:: You can disable automatic index creation by setting
|
||||
`auto_create_index` to False in the documents meta data
|
||||
"""
|
||||
background = self._document._meta.get('index_background', False)
|
||||
drop_dups = self._document._meta.get('index_drop_dups', False)
|
||||
index_opts = self._document._meta.get('index_opts', {})
|
||||
index_types = self._document._meta.get('index_types', True)
|
||||
|
||||
# determine if an index which we are creating includes
|
||||
# _type as its first field; if so, we can avoid creating
|
||||
# an extra index on _type, as mongodb will use the existing
|
||||
# index to service queries against _type
|
||||
types_indexed = False
|
||||
def includes_types(fields):
|
||||
first_field = None
|
||||
if len(fields):
|
||||
if isinstance(fields[0], basestring):
|
||||
first_field = fields[0]
|
||||
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||
first_field = fields[0][0]
|
||||
return first_field == '_types'
|
||||
|
||||
# Ensure indexes created by uniqueness constraints
|
||||
for index in self._document._meta['unique_indexes']:
|
||||
types_indexed = types_indexed or includes_types(index)
|
||||
self._collection.ensure_index(index, unique=True,
|
||||
background=background, drop_dups=drop_dups, **index_opts)
|
||||
|
||||
# Ensure document-defined indexes are created
|
||||
if self._document._meta['indexes']:
|
||||
for spec in self._document._meta['indexes']:
|
||||
types_indexed = types_indexed or includes_types(spec['fields'])
|
||||
opts = index_opts.copy()
|
||||
opts['unique'] = spec.get('unique', False)
|
||||
opts['sparse'] = spec.get('sparse', False)
|
||||
self._collection.ensure_index(spec['fields'],
|
||||
background=background, **opts)
|
||||
|
||||
# If _types is being used (for polymorphism), it needs an index,
|
||||
# only if another index doesn't begin with _types
|
||||
if index_types and '_types' in self._query and not types_indexed:
|
||||
self._collection.ensure_index('_types',
|
||||
background=background, **index_opts)
|
||||
|
||||
# Add geo indicies
|
||||
for field in self._document._geo_indices():
|
||||
index_spec = [(field.db_field, pymongo.GEO2D)]
|
||||
self._collection.ensure_index(index_spec,
|
||||
background=background, **index_opts)
|
||||
|
||||
|
||||
@classmethod
|
||||
def _build_index_spec(cls, doc_cls, spec):
|
||||
"""Build a PyMongo index spec from a MongoEngine index spec.
|
||||
"""
|
||||
if isinstance(spec, basestring):
|
||||
spec = {'fields': [spec]}
|
||||
if isinstance(spec, (list, tuple)):
|
||||
spec = {'fields': spec}
|
||||
|
||||
index_list = []
|
||||
use_types = doc_cls._meta.get('allow_inheritance', True)
|
||||
for key in spec['fields']:
|
||||
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
|
||||
direction = pymongo.ASCENDING
|
||||
if key.startswith("-"):
|
||||
direction = pymongo.DESCENDING
|
||||
elif key.startswith("*"):
|
||||
direction = pymongo.GEO2D
|
||||
if key.startswith(("+", "-", "*")):
|
||||
key = key[1:]
|
||||
|
||||
# Use real field name, do it manually because we need field
|
||||
# objects for the next part (list field checking)
|
||||
parts = key.split('.')
|
||||
if parts in (['pk'], ['id'], ['_id']):
|
||||
key = '_id'
|
||||
else:
|
||||
fields = QuerySet._lookup_field(doc_cls, parts)
|
||||
parts = [field if field == '_id' else field.db_field for field in fields]
|
||||
key = '.'.join(parts)
|
||||
index_list.append((key, direction))
|
||||
|
||||
# Check if a list field is being used, don't use _types if it is
|
||||
if use_types and not all(f._index_with_types for f in fields):
|
||||
use_types = False
|
||||
|
||||
# If _types is being used, prepend it to every specified index
|
||||
index_types = doc_cls._meta.get('index_types', True)
|
||||
allow_inheritance = doc_cls._meta.get('allow_inheritance')
|
||||
if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D:
|
||||
index_list.insert(0, ('_types', 1))
|
||||
|
||||
spec['fields'] = index_list
|
||||
if spec.get('sparse', False) and len(spec['fields']) > 1:
|
||||
raise ValueError(
|
||||
'Sparse indexes can only have one field in them. '
|
||||
'See https://jira.mongodb.org/browse/SERVER-2193')
|
||||
|
||||
return spec
|
||||
|
||||
@classmethod
|
||||
def _reset_already_indexed(cls, document=None):
|
||||
"""Helper to reset already indexed, can be useful for testing purposes"""
|
||||
if document:
|
||||
cls.__already_indexed.discard(document)
|
||||
cls.__already_indexed.clear()
|
||||
|
||||
|
||||
@property
|
||||
def _collection(self):
|
||||
"""Property that returns the collection object. This allows us to
|
||||
perform operations only if the collection is accessed.
|
||||
"""
|
||||
if self._document not in QuerySet.__already_indexed:
|
||||
|
||||
# Ensure collection exists
|
||||
db = self._document._get_db()
|
||||
if self._collection_obj.name not in db.collection_names():
|
||||
@@ -496,52 +552,8 @@ class QuerySet(object):
|
||||
|
||||
QuerySet.__already_indexed.add(self._document)
|
||||
|
||||
background = self._document._meta.get('index_background', False)
|
||||
drop_dups = self._document._meta.get('index_drop_dups', False)
|
||||
index_opts = self._document._meta.get('index_options', {})
|
||||
index_types = self._document._meta.get('index_types', True)
|
||||
|
||||
# determine if an index which we are creating includes
|
||||
# _type as its first field; if so, we can avoid creating
|
||||
# an extra index on _type, as mongodb will use the existing
|
||||
# index to service queries against _type
|
||||
types_indexed = False
|
||||
def includes_types(fields):
|
||||
first_field = None
|
||||
if len(fields):
|
||||
if isinstance(fields[0], basestring):
|
||||
first_field = fields[0]
|
||||
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||
first_field = fields[0][0]
|
||||
return first_field == '_types'
|
||||
|
||||
# Ensure indexes created by uniqueness constraints
|
||||
for index in self._document._meta['unique_indexes']:
|
||||
types_indexed = types_indexed or includes_types(index)
|
||||
self._collection.ensure_index(index, unique=True,
|
||||
background=background, drop_dups=drop_dups, **index_opts)
|
||||
|
||||
# Ensure document-defined indexes are created
|
||||
if self._document._meta['indexes']:
|
||||
for spec in self._document._meta['indexes']:
|
||||
types_indexed = types_indexed or includes_types(spec['fields'])
|
||||
opts = index_opts.copy()
|
||||
opts['unique'] = spec.get('unique', False)
|
||||
opts['sparse'] = spec.get('sparse', False)
|
||||
self._collection.ensure_index(spec['fields'],
|
||||
background=background, **opts)
|
||||
|
||||
# If _types is being used (for polymorphism), it needs an index,
|
||||
# only if another index doesn't begin with _types
|
||||
if index_types and '_types' in self._query and not types_indexed:
|
||||
self._collection.ensure_index('_types',
|
||||
background=background, **index_opts)
|
||||
|
||||
# Add geo indicies
|
||||
for field in self._document._geo_indices():
|
||||
index_spec = [(field.db_field, pymongo.GEO2D)]
|
||||
self._collection.ensure_index(index_spec,
|
||||
background=background, **index_opts)
|
||||
if self._document._meta.get('auto_create_index', True):
|
||||
self._ensure_indexes()
|
||||
|
||||
return self._collection_obj
|
||||
|
||||
@@ -824,11 +836,21 @@ class QuerySet(object):
|
||||
result = None
|
||||
return result
|
||||
|
||||
def insert(self, doc_or_docs, load_bulk=True):
|
||||
def insert(self, doc_or_docs, load_bulk=True, safe=False, write_options=None):
|
||||
"""bulk insert documents
|
||||
|
||||
If ``safe=True`` and the operation is unsuccessful, an
|
||||
:class:`~mongoengine.OperationError` will be raised.
|
||||
|
||||
:param docs_or_doc: a document or list of documents to be inserted
|
||||
:param load_bulk (optional): If True returns the list of document instances
|
||||
:param safe: check if the operation succeeded before returning
|
||||
:param write_options: Extra keyword arguments are passed down to
|
||||
:meth:`~pymongo.collection.Collection.insert`
|
||||
which will be used as options for the resultant ``getLastError`` command.
|
||||
For example, ``insert(..., {w: 2, fsync: True})`` will wait until at least two
|
||||
servers have recorded the write and will force an fsync on each server being
|
||||
written to.
|
||||
|
||||
By default returns document instances, set ``load_bulk`` to False to
|
||||
return just ``ObjectIds``
|
||||
@@ -837,6 +859,10 @@ class QuerySet(object):
|
||||
"""
|
||||
from document import Document
|
||||
|
||||
if not write_options:
|
||||
write_options = {}
|
||||
write_options.update({'safe': safe})
|
||||
|
||||
docs = doc_or_docs
|
||||
return_one = False
|
||||
if isinstance(docs, Document) or issubclass(docs.__class__, Document):
|
||||
@@ -854,7 +880,13 @@ class QuerySet(object):
|
||||
raw.append(doc.to_mongo())
|
||||
|
||||
signals.pre_bulk_insert.send(self._document, documents=docs)
|
||||
ids = self._collection.insert(raw)
|
||||
try:
|
||||
ids = self._collection.insert(raw, **write_options)
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
message = 'Could not save document (%s)'
|
||||
if u'duplicate key' in unicode(err):
|
||||
message = u'Tried to save duplicate unique keys (%s)'
|
||||
raise OperationError(message % unicode(err))
|
||||
|
||||
if not load_bulk:
|
||||
signals.post_bulk_insert.send(
|
||||
@@ -1371,8 +1403,15 @@ class QuerySet(object):
|
||||
write_options = {}
|
||||
|
||||
update = QuerySet._transform_update(self._document, **update)
|
||||
query = self._query
|
||||
|
||||
# SERVER-5247 hack
|
||||
remove_types = "_types" in query and ".$." in unicode(update)
|
||||
if remove_types:
|
||||
del query["_types"]
|
||||
|
||||
try:
|
||||
ret = self._collection.update(self._query, update, multi=multi,
|
||||
ret = self._collection.update(query, update, multi=multi,
|
||||
upsert=upsert, safe=safe_update,
|
||||
**write_options)
|
||||
if ret is not None and 'n' in ret:
|
||||
@@ -1400,10 +1439,17 @@ class QuerySet(object):
|
||||
if not write_options:
|
||||
write_options = {}
|
||||
update = QuerySet._transform_update(self._document, **update)
|
||||
query = self._query
|
||||
|
||||
# SERVER-5247 hack
|
||||
remove_types = "_types" in query and ".$." in unicode(update)
|
||||
if remove_types:
|
||||
del query["_types"]
|
||||
|
||||
try:
|
||||
# Explicitly provide 'multi=False' to newer versions of PyMongo
|
||||
# as the default may change to 'True'
|
||||
ret = self._collection.update(self._query, update, multi=False,
|
||||
ret = self._collection.update(query, update, multi=False,
|
||||
upsert=upsert, safe=safe_update,
|
||||
**write_options)
|
||||
|
||||
|
@@ -5,7 +5,7 @@
|
||||
%define srcname mongoengine
|
||||
|
||||
Name: python-%{srcname}
|
||||
Version: 0.6
|
||||
Version: 0.6.7
|
||||
Release: 1%{?dist}
|
||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||
|
||||
@@ -51,12 +51,4 @@ rm -rf $RPM_BUILD_ROOT
|
||||
# %{python_sitearch}/*
|
||||
|
||||
%changelog
|
||||
* Mon Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6
|
||||
- 0.6 released
|
||||
* Thu Oct 27 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.3-1
|
||||
- Update to latest dev version
|
||||
- Add PIL dependency for ImageField
|
||||
* Wed Oct 12 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.2-1
|
||||
- Update version
|
||||
* Fri Sep 23 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.0-1
|
||||
- Initial version
|
||||
* See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html
|
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@@ -0,0 +1 @@
|
||||
pymongo
|
@@ -810,3 +810,34 @@ class FieldTest(unittest.TestCase):
|
||||
room = Room.objects.first().select_related()
|
||||
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
|
||||
self.assertEquals(room.staffs_with_position[1]['staff'], bob)
|
||||
|
||||
def test_document_reload_no_inheritance(self):
|
||||
class Foo(Document):
|
||||
meta = {'allow_inheritance': False}
|
||||
bar = ReferenceField('Bar')
|
||||
baz = ReferenceField('Baz')
|
||||
|
||||
class Bar(Document):
|
||||
meta = {'allow_inheritance': False}
|
||||
msg = StringField(required=True, default='Blammo!')
|
||||
|
||||
class Baz(Document):
|
||||
meta = {'allow_inheritance': False}
|
||||
msg = StringField(required=True, default='Kaboom!')
|
||||
|
||||
Foo.drop_collection()
|
||||
Bar.drop_collection()
|
||||
Baz.drop_collection()
|
||||
|
||||
bar = Bar()
|
||||
bar.save()
|
||||
baz = Baz()
|
||||
baz.save()
|
||||
foo = Foo()
|
||||
foo.bar = bar
|
||||
foo.baz = baz
|
||||
foo.save()
|
||||
foo.reload()
|
||||
|
||||
self.assertEquals(type(foo.bar), Bar)
|
||||
self.assertEquals(type(foo.baz), Baz)
|
||||
|
@@ -12,6 +12,10 @@ from django.core.paginator import Paginator
|
||||
|
||||
settings.configure()
|
||||
|
||||
from django.contrib.sessions.tests import SessionTestsMixin
|
||||
from mongoengine.django.sessions import SessionStore, MongoSession
|
||||
|
||||
|
||||
class QuerySetTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
@@ -88,3 +92,14 @@ class QuerySetTest(unittest.TestCase):
|
||||
end = p * 2
|
||||
start = end - 1
|
||||
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
||||
|
||||
|
||||
|
||||
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
|
||||
backend = SessionStore
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
MongoSession.drop_collection()
|
||||
super(MongoDBSessionTest, self).setUp()
|
||||
|
||||
|
@@ -96,7 +96,7 @@ class DocumentTest(unittest.TestCase):
|
||||
# Ensure Document isn't treated like an actual document
|
||||
self.assertFalse(hasattr(Document, '_fields'))
|
||||
|
||||
def test_collection_name(self):
|
||||
def test_collection_naming(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
"""
|
||||
|
||||
@@ -157,11 +157,12 @@ class DocumentTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
class BaseDocument(Document, BaseMixin):
|
||||
pass
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class MyDocument(BaseDocument):
|
||||
pass
|
||||
self.assertEquals('mydocument', OldMixinNamingConvention._get_collection_name())
|
||||
|
||||
self.assertEquals('basedocument', MyDocument._get_collection_name())
|
||||
|
||||
def test_get_superclasses(self):
|
||||
"""Ensure that the correct list of superclasses is assembled.
|
||||
@@ -740,6 +741,28 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEqual(info.keys(), ['_types_1_user_guid_1', '_id_', '_types_1_name_1'])
|
||||
Person.drop_collection()
|
||||
|
||||
def test_disable_index_creation(self):
|
||||
"""Tests setting auto_create_index to False on the connection will
|
||||
disable any index generation.
|
||||
"""
|
||||
class User(Document):
|
||||
meta = {
|
||||
'indexes': ['user_guid'],
|
||||
'auto_create_index': False
|
||||
}
|
||||
user_guid = StringField(required=True)
|
||||
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
u = User(user_guid='123')
|
||||
u.save()
|
||||
|
||||
self.assertEquals(1, User.objects.count())
|
||||
info = User.objects._collection.index_information()
|
||||
self.assertEqual(info.keys(), ['_id_'])
|
||||
User.drop_collection()
|
||||
|
||||
def test_embedded_document_index(self):
|
||||
"""Tests settings an index on an embedded document
|
||||
"""
|
||||
@@ -841,6 +864,26 @@ class DocumentTest(unittest.TestCase):
|
||||
query_plan = Test.objects(a=1).only('a').exclude('id').explain()
|
||||
self.assertTrue(query_plan['indexOnly'])
|
||||
|
||||
def test_index_on_id(self):
|
||||
|
||||
class BlogPost(Document):
|
||||
meta = {
|
||||
'indexes': [
|
||||
['categories', 'id']
|
||||
],
|
||||
'allow_inheritance': False
|
||||
}
|
||||
|
||||
title = StringField(required=True)
|
||||
description = StringField(required=True)
|
||||
categories = ListField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
indexes = BlogPost.objects._collection.index_information()
|
||||
self.assertEquals(indexes['categories_1__id_1']['key'],
|
||||
[('categories', 1), ('_id', 1)])
|
||||
|
||||
def test_hint(self):
|
||||
|
||||
class BlogPost(Document):
|
||||
@@ -2375,6 +2418,22 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(InvalidDocumentError, throw_invalid_document_error)
|
||||
|
||||
def test_invalid_son(self):
|
||||
"""Raise an error if loading invalid data"""
|
||||
class Occurrence(EmbeddedDocument):
|
||||
number = IntField()
|
||||
|
||||
class Word(Document):
|
||||
stem = StringField()
|
||||
count = IntField(default=1)
|
||||
forms = ListField(StringField(), default=list)
|
||||
occurs = ListField(EmbeddedDocumentField(Occurrence), default=list)
|
||||
|
||||
def raise_invalid_document():
|
||||
Word._from_son({'stem': [1,2,3], 'forms': 1, 'count': 'one', 'occurs': {"hello": None}})
|
||||
|
||||
self.assertRaises(InvalidDocumentError, raise_invalid_document)
|
||||
|
||||
def test_reverse_delete_rule_cascade_and_nullify(self):
|
||||
"""Ensure that a referenced document is also deleted upon deletion.
|
||||
"""
|
||||
@@ -2838,5 +2897,62 @@ class DocumentTest(unittest.TestCase):
|
||||
}
|
||||
) ]), "1,2")
|
||||
|
||||
|
||||
class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
def test_to_dict(self):
|
||||
"""Ensure a ValidationError handles error to_dict correctly.
|
||||
"""
|
||||
error = ValidationError('root')
|
||||
self.assertEquals(error.to_dict(), {})
|
||||
|
||||
# 1st level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertEquals(error.to_dict()['1st'], 'bad 1st')
|
||||
|
||||
# 2nd level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd'),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertEquals(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||
|
||||
# moar levels
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd', errors={
|
||||
'3rd': ValidationError('bad 3rd', errors={
|
||||
'4th': ValidationError('Inception'),
|
||||
}),
|
||||
}),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
||||
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
||||
self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||
'Inception')
|
||||
|
||||
self.assertEquals(error.message, "root:\n1st.2nd.3rd.4th: Inception")
|
||||
|
||||
def test_model_validation(self):
|
||||
|
||||
class User(Document):
|
||||
username = StringField(primary_key=True)
|
||||
name = StringField(required=True)
|
||||
|
||||
try:
|
||||
User().validate()
|
||||
except ValidationError, e:
|
||||
expected_error_message = """Errors encountered validating document:
|
||||
username: Field is required ("username")
|
||||
name: Field is required ("name")"""
|
||||
self.assertEquals(e.message, expected_error_message)
|
||||
self.assertEquals(e.to_dict(), {
|
||||
'username': 'Field is required ("username")',
|
||||
'name': u'Field is required ("name")'})
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -2,6 +2,7 @@ import datetime
|
||||
import os
|
||||
import unittest
|
||||
import uuid
|
||||
import StringIO
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
@@ -1481,6 +1482,21 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEquals(result.file.read(), text)
|
||||
self.assertEquals(result.file.content_type, content_type)
|
||||
result.file.delete() # Remove file from GridFS
|
||||
PutFile.objects.delete()
|
||||
|
||||
# Ensure file-like objects are stored
|
||||
putfile = PutFile()
|
||||
putstring = StringIO.StringIO()
|
||||
putstring.write(text)
|
||||
putstring.seek(0)
|
||||
putfile.file.put(putstring, content_type=content_type)
|
||||
putfile.save()
|
||||
putfile.validate()
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEquals(result.file.read(), text)
|
||||
self.assertEquals(result.file.content_type, content_type)
|
||||
result.file.delete()
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.file.new_file(content_type=content_type)
|
||||
@@ -1886,43 +1902,5 @@ class FieldTest(unittest.TestCase):
|
||||
post.validate()
|
||||
|
||||
|
||||
class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
def test_to_dict(self):
|
||||
"""Ensure a ValidationError handles error to_dict correctly.
|
||||
"""
|
||||
error = ValidationError('root')
|
||||
self.assertEquals(error.to_dict(), {})
|
||||
|
||||
# 1st level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertEquals(error.to_dict()['1st'], 'bad 1st')
|
||||
|
||||
# 2nd level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd'),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertEquals(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||
|
||||
# moar levels
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd', errors={
|
||||
'3rd': ValidationError('bad 3rd', errors={
|
||||
'4th': ValidationError('Inception'),
|
||||
}),
|
||||
}),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
||||
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
||||
self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||
'Inception')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -329,11 +329,11 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
BlogPost(title="ABC", comments=[c1, c2]).save()
|
||||
|
||||
BlogPost.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
||||
BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1)
|
||||
|
||||
post = BlogPost.objects.first()
|
||||
self.assertEquals(post.comments[0].by, 'joe')
|
||||
self.assertEquals(post.comments[0].votes, 4)
|
||||
self.assertEquals(post.comments[1].by, 'jane')
|
||||
self.assertEquals(post.comments[1].votes, 8)
|
||||
|
||||
# Currently the $ operator only applies to the first matched item in
|
||||
# the query
|
||||
@@ -480,7 +480,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(person.name, "User C")
|
||||
|
||||
def test_bulk_insert(self):
|
||||
"""Ensure that query by array position works.
|
||||
"""Ensure that bulk insert works
|
||||
"""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
@@ -490,7 +490,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
class Blog(Document):
|
||||
title = StringField()
|
||||
title = StringField(unique=True)
|
||||
tags = ListField(StringField())
|
||||
posts = ListField(EmbeddedDocumentField(Post))
|
||||
|
||||
@@ -563,6 +563,23 @@ class QuerySetTest(unittest.TestCase):
|
||||
obj_id = Blog.objects.insert(blog1, load_bulk=False)
|
||||
self.assertEquals(obj_id.__class__.__name__, 'ObjectId')
|
||||
|
||||
Blog.drop_collection()
|
||||
post3 = Post(comments=[comment1, comment1])
|
||||
blog1 = Blog(title="foo", posts=[post1, post2])
|
||||
blog2 = Blog(title="bar", posts=[post2, post3])
|
||||
blog3 = Blog(title="baz", posts=[post1, post2])
|
||||
Blog.objects.insert([blog1, blog2])
|
||||
|
||||
def throw_operation_error_not_unique():
|
||||
Blog.objects.insert([blog2, blog3], safe=True)
|
||||
|
||||
self.assertRaises(OperationError, throw_operation_error_not_unique)
|
||||
self.assertEqual(Blog.objects.count(), 2)
|
||||
|
||||
Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True})
|
||||
self.assertEqual(Blog.objects.count(), 3)
|
||||
|
||||
|
||||
def test_slave_okay(self):
|
||||
"""Ensures that a query can take slave_okay syntax
|
||||
"""
|
||||
@@ -1518,6 +1535,37 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
|
||||
def test_set_list_embedded_documents(self):
|
||||
|
||||
class Author(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Message(Document):
|
||||
title = StringField()
|
||||
authors = ListField(EmbeddedDocumentField('Author'))
|
||||
|
||||
Message.drop_collection()
|
||||
|
||||
message = Message(title="hello", authors=[Author(name="Harry")])
|
||||
message.save()
|
||||
|
||||
Message.objects(authors__name="Harry").update_one(
|
||||
set__authors__S=Author(name="Ross"))
|
||||
|
||||
message = message.reload()
|
||||
self.assertEquals(message.authors[0].name, "Ross")
|
||||
|
||||
Message.objects(authors__name="Ross").update_one(
|
||||
set__authors=[Author(name="Harry"),
|
||||
Author(name="Ross"),
|
||||
Author(name="Adam")])
|
||||
|
||||
message = message.reload()
|
||||
self.assertEquals(message.authors[0].name, "Harry")
|
||||
self.assertEquals(message.authors[1].name, "Ross")
|
||||
self.assertEquals(message.authors[2].name, "Adam")
|
||||
|
||||
def test_order_by(self):
|
||||
"""Ensure that QuerySets may be ordered.
|
||||
"""
|
||||
|
32
tests/replicaset_connection.py
Normal file
32
tests/replicaset_connection.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import unittest
|
||||
import pymongo
|
||||
from pymongo import ReadPreference, ReplicaSetConnection
|
||||
|
||||
import mongoengine
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def tearDown(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
mongoengine.connection._connections = {}
|
||||
mongoengine.connection._dbs = {}
|
||||
|
||||
def test_replicaset_uri_passes_read_preference(self):
|
||||
"""Requires a replica set called "rs" on port 27017
|
||||
"""
|
||||
|
||||
try:
|
||||
conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY)
|
||||
except ConnectionError, e:
|
||||
return
|
||||
|
||||
if not isinstance(conn, ReplicaSetConnection):
|
||||
return
|
||||
|
||||
self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Reference in New Issue
Block a user