Compare commits
88 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
ede9fcfb00 | ||
|
a3d43b77ca | ||
|
e2b32b4bb3 | ||
|
025c16c95d | ||
|
000eff73cc | ||
|
254efdde79 | ||
|
f0d4e76418 | ||
|
ba7101ff92 | ||
|
a2457df45e | ||
|
305540f0fd | ||
|
c2928d8a57 | ||
|
7451244cd2 | ||
|
d935b5764a | ||
|
f3af76e38c | ||
|
a7631223a3 | ||
|
8aae4f0ed0 | ||
|
542049f252 | ||
|
9f3394dc6d | ||
|
06f5dc6ad7 | ||
|
dc3b09c218 | ||
|
ad15781d8f | ||
|
ea53612822 | ||
|
c3a065dd33 | ||
|
5cb2812231 | ||
|
f8904a5504 | ||
|
eb1df23e68 | ||
|
e5648a4af9 | ||
|
a246154961 | ||
|
ce44843e27 | ||
|
1a54dad643 | ||
|
940dfff625 | ||
|
c2b15183cb | ||
|
27e8aa9c68 | ||
|
e1d8c6516a | ||
|
eba81e368b | ||
|
74a3fd7596 | ||
|
eeb5a83e98 | ||
|
d47134bbf1 | ||
|
ee725354db | ||
|
985bfd22de | ||
|
0d35e3a3e9 | ||
|
d94a191656 | ||
|
0eafa4acd8 | ||
|
f27a53653b | ||
|
3b60adc8da | ||
|
626a3369b5 | ||
|
4244e7569b | ||
|
ef4b32aca7 | ||
|
dcd23a0b4d | ||
|
5447c6e947 | ||
|
f1b97fbc8b | ||
|
4c8dfc3fc2 | ||
|
ceece5a7e2 | ||
|
7e6b035ca2 | ||
|
fbc46a52af | ||
|
8d2e7b4372 | ||
|
e7da9144f5 | ||
|
2128e169f3 | ||
|
8410d64daa | ||
|
b2f78fadd9 | ||
|
3656323f25 | ||
|
2fe1c20475 | ||
|
0fb976a80a | ||
|
3cf62de753 | ||
|
06119b306d | ||
|
0493bbbc76 | ||
|
4c9e90732e | ||
|
35f084ba76 | ||
|
f28f336026 | ||
|
122d75f677 | ||
|
12f6a3f5a3 | ||
|
5d44e1d6ca | ||
|
04592c876b | ||
|
c0571beec8 | ||
|
1302316eb0 | ||
|
18d8008b89 | ||
|
4670f09a67 | ||
|
159ef12ed7 | ||
|
7a760f5640 | ||
|
2b6c42a56c | ||
|
ab4ff99105 | ||
|
774895ec8c | ||
|
c5ce96c391 | ||
|
b4a98a4000 | ||
|
5f0d86f509 | ||
|
c96a1b00cf | ||
|
1eb6436682 | ||
|
a84e1f17bb |
11
AUTHORS
11
AUTHORS
@@ -77,7 +77,7 @@ that much better:
|
|||||||
* Adam Parrish
|
* Adam Parrish
|
||||||
* jpfarias
|
* jpfarias
|
||||||
* jonrscott
|
* jonrscott
|
||||||
* Alice Zoë Bevan-McGregor
|
* Alice Zoë Bevan-McGregor (https://github.com/amcgregor/)
|
||||||
* Stephen Young
|
* Stephen Young
|
||||||
* tkloc
|
* tkloc
|
||||||
* aid
|
* aid
|
||||||
@@ -161,3 +161,12 @@ that much better:
|
|||||||
* Jin Zhang
|
* Jin Zhang
|
||||||
* Daniel Axtens
|
* Daniel Axtens
|
||||||
* Leo-Naeka
|
* Leo-Naeka
|
||||||
|
* Ryan Witt (https://github.com/ryanwitt)
|
||||||
|
* Jiequan (https://github.com/Jiequan)
|
||||||
|
* hensom (https://github.com/hensom)
|
||||||
|
* zhy0216 (https://github.com/zhy0216)
|
||||||
|
* istinspring (https://github.com/istinspring)
|
||||||
|
* Massimo Santini (https://github.com/mapio)
|
||||||
|
* Nigel McNie (https://github.com/nigelmcnie)
|
||||||
|
* ygbourhis (https://github.com/ygbourhis)
|
||||||
|
* Bob Dickinson (https://github.com/BobDickinson)
|
@@ -54,6 +54,7 @@ Querying
|
|||||||
Fields
|
Fields
|
||||||
======
|
======
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.base.fields.BaseField
|
||||||
.. autoclass:: mongoengine.fields.StringField
|
.. autoclass:: mongoengine.fields.StringField
|
||||||
.. autoclass:: mongoengine.fields.URLField
|
.. autoclass:: mongoengine.fields.URLField
|
||||||
.. autoclass:: mongoengine.fields.EmailField
|
.. autoclass:: mongoengine.fields.EmailField
|
||||||
@@ -87,3 +88,8 @@ Fields
|
|||||||
.. autoclass:: mongoengine.fields.GridFSProxy
|
.. autoclass:: mongoengine.fields.GridFSProxy
|
||||||
.. autoclass:: mongoengine.fields.ImageGridFsProxy
|
.. autoclass:: mongoengine.fields.ImageGridFsProxy
|
||||||
.. autoclass:: mongoengine.fields.ImproperlyConfigured
|
.. autoclass:: mongoengine.fields.ImproperlyConfigured
|
||||||
|
|
||||||
|
Misc
|
||||||
|
====
|
||||||
|
|
||||||
|
.. autofunction:: mongoengine.common._import_class
|
||||||
|
@@ -2,6 +2,29 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
|
||||||
|
Changes in 0.8.2
|
||||||
|
================
|
||||||
|
- Added compare_indexes helper (#361)
|
||||||
|
- Fixed cascading saves which weren't turned off as planned (#291)
|
||||||
|
- Fixed Datastructures so instances are a Document or EmbeddedDocument (#363)
|
||||||
|
- Improved cascading saves write performance (#361)
|
||||||
|
- Fixed amibiguity and differing behaviour regarding field defaults (#349)
|
||||||
|
- ImageFields now include PIL error messages if invalid error (#353)
|
||||||
|
- Added lock when calling doc.Delete() for when signals have no sender (#350)
|
||||||
|
- Reload forces read preference to be PRIMARY (#355)
|
||||||
|
- Querysets are now lest restrictive when querying duplicate fields (#332, #333)
|
||||||
|
- FileField now honouring db_alias (#341)
|
||||||
|
- Removed customised __set__ change tracking in ComplexBaseField (#344)
|
||||||
|
- Removed unused var in _get_changed_fields (#347)
|
||||||
|
- Added pre_save_post_validation signal (#345)
|
||||||
|
- DateTimeField now auto converts valid datetime isostrings into dates (#343)
|
||||||
|
- DateTimeField now uses dateutil for parsing if available (#343)
|
||||||
|
- Fixed Doc.objects(read_preference=X) not setting read preference (#352)
|
||||||
|
- Django session ttl index expiry fixed (#329)
|
||||||
|
- Fixed pickle.loads (#342)
|
||||||
|
- Documentation fixes
|
||||||
|
|
||||||
Changes in 0.8.1
|
Changes in 0.8.1
|
||||||
================
|
================
|
||||||
- Fixed Python 2.6 django auth importlib issue (#326)
|
- Fixed Python 2.6 django auth importlib issue (#326)
|
||||||
|
@@ -27,9 +27,9 @@ MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
|||||||
:class:`~mongoengine.Document`, but implements most of the methods and
|
:class:`~mongoengine.Document`, but implements most of the methods and
|
||||||
attributes that the standard Django :class:`User` model does - so the two are
|
attributes that the standard Django :class:`User` model does - so the two are
|
||||||
moderately compatible. Using this backend will allow you to store users in
|
moderately compatible. Using this backend will allow you to store users in
|
||||||
MongoDB but still use many of the Django authentication infrastucture (such as
|
MongoDB but still use many of the Django authentication infrastructure (such as
|
||||||
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
||||||
enable the MongoEngine auth backend, add the following to you **settings.py**
|
enable the MongoEngine auth backend, add the following to your **settings.py**
|
||||||
file::
|
file::
|
||||||
|
|
||||||
AUTHENTICATION_BACKENDS = (
|
AUTHENTICATION_BACKENDS = (
|
||||||
@@ -46,7 +46,7 @@ Custom User model
|
|||||||
=================
|
=================
|
||||||
Django 1.5 introduced `Custom user Models
|
Django 1.5 introduced `Custom user Models
|
||||||
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/#auth-custom-user>`
|
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/#auth-custom-user>`
|
||||||
which can be used as an alternative the Mongoengine authentication backend.
|
which can be used as an alternative to the MongoEngine authentication backend.
|
||||||
|
|
||||||
The main advantage of this option is that other components relying on
|
The main advantage of this option is that other components relying on
|
||||||
:mod:`django.contrib.auth` and supporting the new swappable user model are more
|
:mod:`django.contrib.auth` and supporting the new swappable user model are more
|
||||||
@@ -82,16 +82,16 @@ Sessions
|
|||||||
========
|
========
|
||||||
Django allows the use of different backend stores for its sessions. MongoEngine
|
Django allows the use of different backend stores for its sessions. MongoEngine
|
||||||
provides a MongoDB-based session backend for Django, which allows you to use
|
provides a MongoDB-based session backend for Django, which allows you to use
|
||||||
sessions in you Django application with just MongoDB. To enable the MongoEngine
|
sessions in your Django application with just MongoDB. To enable the MongoEngine
|
||||||
session backend, ensure that your settings module has
|
session backend, ensure that your settings module has
|
||||||
``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
|
``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
|
||||||
``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
|
``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
|
||||||
``INSTALLED_APPS``. From there, all you need to do is add the following line
|
``INSTALLED_APPS``. From there, all you need to do is add the following line
|
||||||
into you settings module::
|
into your settings module::
|
||||||
|
|
||||||
SESSION_ENGINE = 'mongoengine.django.sessions'
|
SESSION_ENGINE = 'mongoengine.django.sessions'
|
||||||
|
|
||||||
Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesnt delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL
|
Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesn't delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL
|
||||||
<http://docs.mongodb.org/manual/tutorial/expire-data/>`_.
|
<http://docs.mongodb.org/manual/tutorial/expire-data/>`_.
|
||||||
|
|
||||||
.. versionadded:: 0.2.1
|
.. versionadded:: 0.2.1
|
||||||
|
@@ -36,7 +36,7 @@ MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetCl
|
|||||||
to use them please use a URI style connection and provide the `replicaSet` name in the
|
to use them please use a URI style connection and provide the `replicaSet` name in the
|
||||||
connection kwargs.
|
connection kwargs.
|
||||||
|
|
||||||
Read preferences are supported throught the connection or via individual
|
Read preferences are supported through the connection or via individual
|
||||||
queries by passing the read_preference ::
|
queries by passing the read_preference ::
|
||||||
|
|
||||||
Bar.objects().read_preference(ReadPreference.PRIMARY)
|
Bar.objects().read_preference(ReadPreference.PRIMARY)
|
||||||
@@ -83,7 +83,7 @@ reasons.
|
|||||||
|
|
||||||
The :class:`~mongoengine.context_managers.switch_db` context manager allows
|
The :class:`~mongoengine.context_managers.switch_db` context manager allows
|
||||||
you to change the database alias for a given class allowing quick and easy
|
you to change the database alias for a given class allowing quick and easy
|
||||||
access to the same User document across databases.eg ::
|
access to the same User document across databases::
|
||||||
|
|
||||||
from mongoengine.context_managers import switch_db
|
from mongoengine.context_managers import switch_db
|
||||||
|
|
||||||
|
@@ -100,9 +100,6 @@ arguments can be set on all fields:
|
|||||||
:attr:`db_field` (Default: None)
|
:attr:`db_field` (Default: None)
|
||||||
The MongoDB field name.
|
The MongoDB field name.
|
||||||
|
|
||||||
:attr:`name` (Default: None)
|
|
||||||
The mongoengine field name.
|
|
||||||
|
|
||||||
:attr:`required` (Default: False)
|
:attr:`required` (Default: False)
|
||||||
If set to True and the field is not set on the document instance, a
|
If set to True and the field is not set on the document instance, a
|
||||||
:class:`~mongoengine.ValidationError` will be raised when the document is
|
:class:`~mongoengine.ValidationError` will be raised when the document is
|
||||||
@@ -129,6 +126,7 @@ arguments can be set on all fields:
|
|||||||
# instead to just an object
|
# instead to just an object
|
||||||
values = ListField(IntField(), default=[1,2,3])
|
values = ListField(IntField(), default=[1,2,3])
|
||||||
|
|
||||||
|
.. note:: Unsetting a field with a default value will revert back to the default.
|
||||||
|
|
||||||
:attr:`unique` (Default: False)
|
:attr:`unique` (Default: False)
|
||||||
When True, no documents in the collection will have the same value for this
|
When True, no documents in the collection will have the same value for this
|
||||||
@@ -403,7 +401,7 @@ either a single field name, or a list or tuple of field names::
|
|||||||
Skipping Document validation on save
|
Skipping Document validation on save
|
||||||
------------------------------------
|
------------------------------------
|
||||||
You can also skip the whole document validation process by setting
|
You can also skip the whole document validation process by setting
|
||||||
``validate=False`` when caling the :meth:`~mongoengine.document.Document.save`
|
``validate=False`` when calling the :meth:`~mongoengine.document.Document.save`
|
||||||
method::
|
method::
|
||||||
|
|
||||||
class Recipient(Document):
|
class Recipient(Document):
|
||||||
@@ -499,7 +497,6 @@ in this case use 'dot' notation to identify the value to index eg: `rank.title`
|
|||||||
Geospatial indexes
|
Geospatial indexes
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
|
|
||||||
The best geo index for mongodb is the new "2dsphere", which has an improved
|
The best geo index for mongodb is the new "2dsphere", which has an improved
|
||||||
spherical model and provides better performance and more options when querying.
|
spherical model and provides better performance and more options when querying.
|
||||||
The following fields will explicitly add a "2dsphere" index:
|
The following fields will explicitly add a "2dsphere" index:
|
||||||
@@ -561,6 +558,14 @@ documentation for more information. A common usecase might be session data::
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Comparing Indexes
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
Use :func:`mongoengine.Document.compare_indexes` to compare actual indexes in
|
||||||
|
the database to those that your document definitions define. This is useful
|
||||||
|
for maintenance purposes and ensuring you have the correct indexes for your
|
||||||
|
schema.
|
||||||
|
|
||||||
Ordering
|
Ordering
|
||||||
========
|
========
|
||||||
A default ordering can be specified for your
|
A default ordering can be specified for your
|
||||||
|
@@ -15,11 +15,8 @@ fetch documents from the database::
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Once the iteration finishes (when :class:`StopIteration` is raised),
|
As of MongoEngine 0.8 the querysets utilise a local cache. So iterating
|
||||||
:meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the
|
it multiple times will only cause a single query.
|
||||||
:class:`~mongoengine.queryset.QuerySet` may be iterated over again. The
|
|
||||||
results of the first iteration are *not* cached, so the database will be hit
|
|
||||||
each time the :class:`~mongoengine.queryset.QuerySet` is iterated over.
|
|
||||||
|
|
||||||
Filtering queries
|
Filtering queries
|
||||||
=================
|
=================
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
.. _signals:
|
.. _signals:
|
||||||
|
|
||||||
|
=======
|
||||||
Signals
|
Signals
|
||||||
=======
|
=======
|
||||||
|
|
||||||
@@ -7,32 +8,95 @@ Signals
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Signal support is provided by the excellent `blinker`_ library and
|
Signal support is provided by the excellent `blinker`_ library. If you wish
|
||||||
will gracefully fall back if it is not available.
|
to enable signal support this library must be installed, though it is not
|
||||||
|
required for MongoEngine to function.
|
||||||
|
|
||||||
|
Overview
|
||||||
|
--------
|
||||||
|
|
||||||
The following document signals exist in MongoEngine and are pretty self-explanatory:
|
Signals are found within the `mongoengine.signals` module. Unless
|
||||||
|
specified signals receive no additional arguments beyond the `sender` class and
|
||||||
|
`document` instance. Post-signals are only called if there were no exceptions
|
||||||
|
raised during the processing of their related function.
|
||||||
|
|
||||||
* `mongoengine.signals.pre_init`
|
Available signals include:
|
||||||
* `mongoengine.signals.post_init`
|
|
||||||
* `mongoengine.signals.pre_save`
|
|
||||||
* `mongoengine.signals.post_save`
|
|
||||||
* `mongoengine.signals.pre_delete`
|
|
||||||
* `mongoengine.signals.post_delete`
|
|
||||||
* `mongoengine.signals.pre_bulk_insert`
|
|
||||||
* `mongoengine.signals.post_bulk_insert`
|
|
||||||
|
|
||||||
Example usage::
|
`pre_init`
|
||||||
|
Called during the creation of a new :class:`~mongoengine.Document` or
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` instance, after the constructor
|
||||||
|
arguments have been collected but before any additional processing has been
|
||||||
|
done to them. (I.e. assignment of default values.) Handlers for this signal
|
||||||
|
are passed the dictionary of arguments using the `values` keyword argument
|
||||||
|
and may modify this dictionary prior to returning.
|
||||||
|
|
||||||
|
`post_init`
|
||||||
|
Called after all processing of a new :class:`~mongoengine.Document` or
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` instance has been completed.
|
||||||
|
|
||||||
|
`pre_save`
|
||||||
|
Called within :meth:`~mongoengine.document.Document.save` prior to performing
|
||||||
|
any actions.
|
||||||
|
|
||||||
|
`pre_save_post_validation`
|
||||||
|
Called within :meth:`~mongoengine.document.Document.save` after validation
|
||||||
|
has taken place but before saving.
|
||||||
|
|
||||||
|
`post_save`
|
||||||
|
Called within :meth:`~mongoengine.document.Document.save` after all actions
|
||||||
|
(validation, insert/update, cascades, clearing dirty flags) have completed
|
||||||
|
successfully. Passed the additional boolean keyword argument `created` to
|
||||||
|
indicate if the save was an insert or an update.
|
||||||
|
|
||||||
|
`pre_delete`
|
||||||
|
Called within :meth:`~mongoengine.document.Document.delete` prior to
|
||||||
|
attempting the delete operation.
|
||||||
|
|
||||||
|
`post_delete`
|
||||||
|
Called within :meth:`~mongoengine.document.Document.delete` upon successful
|
||||||
|
deletion of the record.
|
||||||
|
|
||||||
|
`pre_bulk_insert`
|
||||||
|
Called after validation of the documents to insert, but prior to any data
|
||||||
|
being written. In this case, the `document` argument is replaced by a
|
||||||
|
`documents` argument representing the list of documents being inserted.
|
||||||
|
|
||||||
|
`post_bulk_insert`
|
||||||
|
Called after a successful bulk insert operation. As per `pre_bulk_insert`,
|
||||||
|
the `document` argument is omitted and replaced with a `documents` argument.
|
||||||
|
An additional boolean argument, `loaded`, identifies the contents of
|
||||||
|
`documents` as either :class:`~mongoengine.Document` instances when `True` or
|
||||||
|
simply a list of primary key values for the inserted records if `False`.
|
||||||
|
|
||||||
|
Attaching Events
|
||||||
|
----------------
|
||||||
|
|
||||||
|
After writing a handler function like the following::
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
|
|
||||||
|
def update_modified(sender, document):
|
||||||
|
document.modified = datetime.utcnow()
|
||||||
|
|
||||||
|
You attach the event handler to your :class:`~mongoengine.Document` or
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` subclass::
|
||||||
|
|
||||||
|
class Record(Document):
|
||||||
|
modified = DateTimeField()
|
||||||
|
|
||||||
|
signals.pre_save.connect(update_modified)
|
||||||
|
|
||||||
|
While this is not the most elaborate document model, it does demonstrate the
|
||||||
|
concepts involved. As a more complete demonstration you can also define your
|
||||||
|
handlers within your subclass::
|
||||||
|
|
||||||
class Author(Document):
|
class Author(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def pre_save(cls, sender, document, **kwargs):
|
def pre_save(cls, sender, document, **kwargs):
|
||||||
logging.debug("Pre Save: %s" % document.name)
|
logging.debug("Pre Save: %s" % document.name)
|
||||||
@@ -49,12 +113,40 @@ Example usage::
|
|||||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||||
signals.post_save.connect(Author.post_save, sender=Author)
|
signals.post_save.connect(Author.post_save, sender=Author)
|
||||||
|
|
||||||
|
Finally, you can also use this small decorator to quickly create a number of
|
||||||
|
signals and attach them to your :class:`~mongoengine.Document` or
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators::
|
||||||
|
|
||||||
ReferenceFields and signals
|
def handler(event):
|
||||||
|
"""Signal decorator to allow use of callback functions as class decorators."""
|
||||||
|
|
||||||
|
def decorator(fn):
|
||||||
|
def apply(cls):
|
||||||
|
event.connect(fn, sender=cls)
|
||||||
|
return cls
|
||||||
|
|
||||||
|
fn.apply = apply
|
||||||
|
return fn
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
Using the first example of updating a modification time the code is now much
|
||||||
|
cleaner looking while still allowing manual execution of the callback::
|
||||||
|
|
||||||
|
@handler(signals.pre_save)
|
||||||
|
def update_modified(sender, document):
|
||||||
|
document.modified = datetime.utcnow()
|
||||||
|
|
||||||
|
@update_modified.apply
|
||||||
|
class Record(Document):
|
||||||
|
modified = DateTimeField()
|
||||||
|
|
||||||
|
|
||||||
|
ReferenceFields and Signals
|
||||||
---------------------------
|
---------------------------
|
||||||
|
|
||||||
Currently `reverse_delete_rules` do not trigger signals on the other part of
|
Currently `reverse_delete_rules` do not trigger signals on the other part of
|
||||||
the relationship. If this is required you must manually handled the
|
the relationship. If this is required you must manually handle the
|
||||||
reverse deletion.
|
reverse deletion.
|
||||||
|
|
||||||
.. _blinker: http://pypi.python.org/pypi/blinker
|
.. _blinker: http://pypi.python.org/pypi/blinker
|
||||||
|
@@ -298,5 +298,5 @@ Learning more about mongoengine
|
|||||||
-------------------------------
|
-------------------------------
|
||||||
|
|
||||||
If you got this far you've made a great start, so well done! The next step on
|
If you got this far you've made a great start, so well done! The next step on
|
||||||
your mongoengine journey is the `full user guide <guide/index>`_, where you
|
your mongoengine journey is the `full user guide <guide/index.html>`_, where you
|
||||||
can learn indepth about how to use mongoengine and mongodb.
|
can learn indepth about how to use mongoengine and mongodb.
|
||||||
|
@@ -91,6 +91,13 @@ the case and the data is set only in the ``document._data`` dictionary: ::
|
|||||||
File "<stdin>", line 1, in <module>
|
File "<stdin>", line 1, in <module>
|
||||||
AttributeError: 'Animal' object has no attribute 'size'
|
AttributeError: 'Animal' object has no attribute 'size'
|
||||||
|
|
||||||
|
The Document class has introduced a reserved function `clean()`, which will be
|
||||||
|
called before saving the document. If your document class happen to have a method
|
||||||
|
with the same name, please try rename it.
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
pass
|
||||||
|
|
||||||
ReferenceField
|
ReferenceField
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
@@ -116,13 +123,17 @@ eg::
|
|||||||
|
|
||||||
# Mark all ReferenceFields as dirty and save
|
# Mark all ReferenceFields as dirty and save
|
||||||
for p in Person.objects:
|
for p in Person.objects:
|
||||||
p._mark_as_dirty('parent')
|
p._mark_as_changed('parent')
|
||||||
p._mark_as_dirty('friends')
|
p._mark_as_changed('friends')
|
||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
`An example test migration for ReferenceFields is available on github
|
`An example test migration for ReferenceFields is available on github
|
||||||
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/refrencefield_dbref_to_object_id.py>`_.
|
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/refrencefield_dbref_to_object_id.py>`_.
|
||||||
|
|
||||||
|
.. Note:: Internally mongoengine handles ReferenceFields the same, so they are
|
||||||
|
converted to DBRef on loading and ObjectIds or DBRefs depending on settings
|
||||||
|
on storage.
|
||||||
|
|
||||||
UUIDField
|
UUIDField
|
||||||
---------
|
---------
|
||||||
|
|
||||||
@@ -143,9 +154,9 @@ eg::
|
|||||||
class Animal(Document):
|
class Animal(Document):
|
||||||
uuid = UUIDField()
|
uuid = UUIDField()
|
||||||
|
|
||||||
# Mark all ReferenceFields as dirty and save
|
# Mark all UUIDFields as dirty and save
|
||||||
for a in Animal.objects:
|
for a in Animal.objects:
|
||||||
a._mark_as_dirty('uuid')
|
a._mark_as_changed('uuid')
|
||||||
a.save()
|
a.save()
|
||||||
|
|
||||||
`An example test migration for UUIDFields is available on github
|
`An example test migration for UUIDFields is available on github
|
||||||
@@ -172,9 +183,9 @@ eg::
|
|||||||
class Person(Document):
|
class Person(Document):
|
||||||
balance = DecimalField()
|
balance = DecimalField()
|
||||||
|
|
||||||
# Mark all ReferenceFields as dirty and save
|
# Mark all DecimalField's as dirty and save
|
||||||
for p in Person.objects:
|
for p in Person.objects:
|
||||||
p._mark_as_dirty('balance')
|
p._mark_as_changed('balance')
|
||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
.. note:: DecimalField's have also been improved with the addition of precision
|
.. note:: DecimalField's have also been improved with the addition of precision
|
||||||
|
@@ -15,7 +15,7 @@ import django
|
|||||||
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
|
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
|
||||||
list(queryset.__all__) + signals.__all__ + list(errors.__all__))
|
list(queryset.__all__) + signals.__all__ + list(errors.__all__))
|
||||||
|
|
||||||
VERSION = (0, 8, 1)
|
VERSION = (0, 8, 2)
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
|
@@ -13,7 +13,11 @@ class BaseDict(dict):
|
|||||||
_name = None
|
_name = None
|
||||||
|
|
||||||
def __init__(self, dict_items, instance, name):
|
def __init__(self, dict_items, instance, name):
|
||||||
self._instance = weakref.proxy(instance)
|
Document = _import_class('Document')
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
|
||||||
|
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||||
|
self._instance = weakref.proxy(instance)
|
||||||
self._name = name
|
self._name = name
|
||||||
return super(BaseDict, self).__init__(dict_items)
|
return super(BaseDict, self).__init__(dict_items)
|
||||||
|
|
||||||
@@ -80,7 +84,11 @@ class BaseList(list):
|
|||||||
_name = None
|
_name = None
|
||||||
|
|
||||||
def __init__(self, list_items, instance, name):
|
def __init__(self, list_items, instance, name):
|
||||||
self._instance = weakref.proxy(instance)
|
Document = _import_class('Document')
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
|
||||||
|
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||||
|
self._instance = weakref.proxy(instance)
|
||||||
self._name = name
|
self._name = name
|
||||||
return super(BaseList, self).__init__(list_items)
|
return super(BaseList, self).__init__(list_items)
|
||||||
|
|
||||||
|
@@ -152,7 +152,8 @@ class BaseDocument(object):
|
|||||||
if isinstance(data["_data"], SON):
|
if isinstance(data["_data"], SON):
|
||||||
data["_data"] = self.__class__._from_son(data["_data"])._data
|
data["_data"] = self.__class__._from_son(data["_data"])._data
|
||||||
for k in ('_changed_fields', '_initialised', '_created', '_data'):
|
for k in ('_changed_fields', '_initialised', '_created', '_data'):
|
||||||
setattr(self, k, data[k])
|
if k in data:
|
||||||
|
setattr(self, k, data[k])
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
if 'id' in self._fields and 'id' not in self._fields_ordered:
|
if 'id' in self._fields and 'id' not in self._fields_ordered:
|
||||||
@@ -214,7 +215,7 @@ class BaseDocument(object):
|
|||||||
return not self.__eq__(other)
|
return not self.__eq__(other)
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
if self.pk is None:
|
if getattr(self, 'pk', None) is None:
|
||||||
# For new object
|
# For new object
|
||||||
return super(BaseDocument, self).__hash__()
|
return super(BaseDocument, self).__hash__()
|
||||||
else:
|
else:
|
||||||
@@ -391,7 +392,7 @@ class BaseDocument(object):
|
|||||||
if field_value:
|
if field_value:
|
||||||
field_value._clear_changed_fields()
|
field_value._clear_changed_fields()
|
||||||
|
|
||||||
def _get_changed_fields(self, key='', inspected=None):
|
def _get_changed_fields(self, inspected=None):
|
||||||
"""Returns a list of all fields that have explicitly been changed.
|
"""Returns a list of all fields that have explicitly been changed.
|
||||||
"""
|
"""
|
||||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
@@ -422,7 +423,7 @@ class BaseDocument(object):
|
|||||||
if (isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument))
|
if (isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument))
|
||||||
and db_field_name not in _changed_fields):
|
and db_field_name not in _changed_fields):
|
||||||
# Find all embedded fields that have been changed
|
# Find all embedded fields that have been changed
|
||||||
changed = field._get_changed_fields(key, inspected)
|
changed = field._get_changed_fields(inspected)
|
||||||
_changed_fields += ["%s%s" % (key, k) for k in changed if k]
|
_changed_fields += ["%s%s" % (key, k) for k in changed if k]
|
||||||
elif (isinstance(field, (list, tuple, dict)) and
|
elif (isinstance(field, (list, tuple, dict)) and
|
||||||
db_field_name not in _changed_fields):
|
db_field_name not in _changed_fields):
|
||||||
@@ -436,7 +437,7 @@ class BaseDocument(object):
|
|||||||
if not hasattr(value, '_get_changed_fields'):
|
if not hasattr(value, '_get_changed_fields'):
|
||||||
continue
|
continue
|
||||||
list_key = "%s%s." % (key, index)
|
list_key = "%s%s." % (key, index)
|
||||||
changed = value._get_changed_fields(list_key, inspected)
|
changed = value._get_changed_fields(inspected)
|
||||||
_changed_fields += ["%s%s" % (list_key, k)
|
_changed_fields += ["%s%s" % (list_key, k)
|
||||||
for k in changed if k]
|
for k in changed if k]
|
||||||
return _changed_fields
|
return _changed_fields
|
||||||
|
@@ -36,6 +36,29 @@ class BaseField(object):
|
|||||||
unique=False, unique_with=None, primary_key=False,
|
unique=False, unique_with=None, primary_key=False,
|
||||||
validation=None, choices=None, verbose_name=None,
|
validation=None, choices=None, verbose_name=None,
|
||||||
help_text=None):
|
help_text=None):
|
||||||
|
"""
|
||||||
|
:param db_field: The database field to store this field in
|
||||||
|
(defaults to the name of the field)
|
||||||
|
:param name: Depreciated - use db_field
|
||||||
|
:param required: If the field is required. Whether it has to have a
|
||||||
|
value or not. Defaults to False.
|
||||||
|
:param default: (optional) The default value for this field if no value
|
||||||
|
has been set (or if the value has been unset). It Can be a
|
||||||
|
callable.
|
||||||
|
:param unique: Is the field value unique or not. Defaults to False.
|
||||||
|
:param unique_with: (optional) The other field this field should be
|
||||||
|
unique with.
|
||||||
|
:param primary_key: Mark this field as the primary key. Defaults to False.
|
||||||
|
:param validation: (optional) A callable to validate the value of the
|
||||||
|
field. Generally this is deprecated in favour of the
|
||||||
|
`FIELD.validate` method
|
||||||
|
:param choices: (optional) The valid choices
|
||||||
|
:param verbose_name: (optional) The verbose name for the field.
|
||||||
|
Designed to be human readable and is often used when generating
|
||||||
|
model forms from the document model.
|
||||||
|
:param help_text: (optional) The help text for this field and is often
|
||||||
|
used when generating model forms from the document model.
|
||||||
|
"""
|
||||||
self.db_field = (db_field or name) if not primary_key else '_id'
|
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||||
if name:
|
if name:
|
||||||
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
|
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
|
||||||
@@ -59,20 +82,14 @@ class BaseField(object):
|
|||||||
BaseField.creation_counter += 1
|
BaseField.creation_counter += 1
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance, owner):
|
||||||
"""Descriptor for retrieving a value from a field in a document. Do
|
"""Descriptor for retrieving a value from a field in a document.
|
||||||
any necessary conversion between Python and MongoDB types.
|
|
||||||
"""
|
"""
|
||||||
if instance is None:
|
if instance is None:
|
||||||
# Document class being used rather than a document object
|
# Document class being used rather than a document object
|
||||||
return self
|
return self
|
||||||
# Get value from document instance if available, if not use default
|
|
||||||
value = instance._data.get(self.name)
|
|
||||||
|
|
||||||
if value is None:
|
# Get value from document instance if available
|
||||||
value = self.default
|
value = instance._data.get(self.name)
|
||||||
# Allow callable default values
|
|
||||||
if callable(value):
|
|
||||||
value = value()
|
|
||||||
|
|
||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
@@ -82,6 +99,14 @@ class BaseField(object):
|
|||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
"""Descriptor for assigning a value to a field in a document.
|
"""Descriptor for assigning a value to a field in a document.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# If setting to None and theres a default
|
||||||
|
# Then set the value to the default value
|
||||||
|
if value is None and self.default is not None:
|
||||||
|
value = self.default
|
||||||
|
if callable(value):
|
||||||
|
value = value()
|
||||||
|
|
||||||
if instance._initialised:
|
if instance._initialised:
|
||||||
try:
|
try:
|
||||||
if (self.name not in instance._data or
|
if (self.name not in instance._data or
|
||||||
@@ -205,12 +230,6 @@ class ComplexBaseField(BaseField):
|
|||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
|
||||||
"""Descriptor for assigning a value to a field in a document.
|
|
||||||
"""
|
|
||||||
instance._data[self.name] = value
|
|
||||||
instance._mark_as_changed(self.name)
|
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
"""Convert a MongoDB-compatible type to a Python type.
|
"""Convert a MongoDB-compatible type to a Python type.
|
||||||
"""
|
"""
|
||||||
|
@@ -2,7 +2,19 @@ _class_registry_cache = {}
|
|||||||
|
|
||||||
|
|
||||||
def _import_class(cls_name):
|
def _import_class(cls_name):
|
||||||
"""Cached mechanism for imports"""
|
"""Cache mechanism for imports.
|
||||||
|
|
||||||
|
Due to complications of circular imports mongoengine needs to do lots of
|
||||||
|
inline imports in functions. This is inefficient as classes are
|
||||||
|
imported repeated throughout the mongoengine code. This is
|
||||||
|
compounded by some recursive functions requiring inline imports.
|
||||||
|
|
||||||
|
:mod:`mongoengine.common` provides a single point to import all these
|
||||||
|
classes. Circular imports aren't an issue as it dynamically imports the
|
||||||
|
class when first needed. Subsequent calls to the
|
||||||
|
:func:`~mongoengine.common._import_class` can then directly retrieve the
|
||||||
|
class from the :data:`mongoengine.common._class_registry_cache`.
|
||||||
|
"""
|
||||||
if cls_name in _class_registry_cache:
|
if cls_name in _class_registry_cache:
|
||||||
return _class_registry_cache.get(cls_name)
|
return _class_registry_cache.get(cls_name)
|
||||||
|
|
||||||
|
@@ -189,7 +189,8 @@ class query_counter(object):
|
|||||||
|
|
||||||
def __eq__(self, value):
|
def __eq__(self, value):
|
||||||
""" == Compare querycounter. """
|
""" == Compare querycounter. """
|
||||||
return value == self._get_count()
|
counter = self._get_count()
|
||||||
|
return value == counter
|
||||||
|
|
||||||
def __ne__(self, value):
|
def __ne__(self, value):
|
||||||
""" != Compare querycounter. """
|
""" != Compare querycounter. """
|
||||||
@@ -221,6 +222,7 @@ class query_counter(object):
|
|||||||
|
|
||||||
def _get_count(self):
|
def _get_count(self):
|
||||||
""" Get the number of queries. """
|
""" Get the number of queries. """
|
||||||
count = self.db.system.profile.find().count() - self.counter
|
ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}}
|
||||||
|
count = self.db.system.profile.find(ignore_query).count() - self.counter
|
||||||
self.counter += 1
|
self.counter += 1
|
||||||
return count
|
return count
|
||||||
|
@@ -39,7 +39,7 @@ class MongoSession(Document):
|
|||||||
'indexes': [
|
'indexes': [
|
||||||
{
|
{
|
||||||
'fields': ['expire_date'],
|
'fields': ['expire_date'],
|
||||||
'expireAfterSeconds': settings.SESSION_COOKIE_AGE
|
'expireAfterSeconds': 0
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@@ -1,10 +1,14 @@
|
|||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
import hashlib
|
||||||
import pymongo
|
import pymongo
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from pymongo.read_preferences import ReadPreference
|
||||||
|
from bson import ObjectId
|
||||||
from bson.dbref import DBRef
|
from bson.dbref import DBRef
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass,
|
from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass,
|
||||||
BaseDocument, BaseDict, BaseList,
|
BaseDocument, BaseDict, BaseList,
|
||||||
ALLOW_INHERITANCE, get_document)
|
ALLOW_INHERITANCE, get_document)
|
||||||
@@ -17,6 +21,19 @@ __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument',
|
|||||||
'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument')
|
'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument')
|
||||||
|
|
||||||
|
|
||||||
|
def includes_cls(fields):
|
||||||
|
""" Helper function used for ensuring and comparing indexes
|
||||||
|
"""
|
||||||
|
|
||||||
|
first_field = None
|
||||||
|
if len(fields):
|
||||||
|
if isinstance(fields[0], basestring):
|
||||||
|
first_field = fields[0]
|
||||||
|
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||||
|
first_field = fields[0][0]
|
||||||
|
return first_field == '_cls'
|
||||||
|
|
||||||
|
|
||||||
class InvalidCollectionError(Exception):
|
class InvalidCollectionError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -52,6 +69,9 @@ class EmbeddedDocument(BaseDocument):
|
|||||||
return self._data == other._data
|
return self._data == other._data
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self.__eq__(other)
|
||||||
|
|
||||||
|
|
||||||
class Document(BaseDocument):
|
class Document(BaseDocument):
|
||||||
"""The base class used for defining the structure and properties of
|
"""The base class used for defining the structure and properties of
|
||||||
@@ -179,8 +199,8 @@ class Document(BaseDocument):
|
|||||||
will force an fsync on the primary server.
|
will force an fsync on the primary server.
|
||||||
:param cascade: Sets the flag for cascading saves. You can set a
|
:param cascade: Sets the flag for cascading saves. You can set a
|
||||||
default by setting "cascade" in the document __meta__
|
default by setting "cascade" in the document __meta__
|
||||||
:param cascade_kwargs: optional kwargs dictionary to be passed throw
|
:param cascade_kwargs: (optional) kwargs dictionary to be passed throw
|
||||||
to cascading saves
|
to cascading saves. Implies ``cascade=True``.
|
||||||
:param _refs: A list of processed references used in cascading saves
|
:param _refs: A list of processed references used in cascading saves
|
||||||
|
|
||||||
.. versionchanged:: 0.5
|
.. versionchanged:: 0.5
|
||||||
@@ -189,24 +209,28 @@ class Document(BaseDocument):
|
|||||||
:class:`~bson.dbref.DBRef` objects that have changes are
|
:class:`~bson.dbref.DBRef` objects that have changes are
|
||||||
saved as well.
|
saved as well.
|
||||||
.. versionchanged:: 0.6
|
.. versionchanged:: 0.6
|
||||||
Cascade saves are optional = defaults to True, if you want
|
Added cascading saves
|
||||||
|
.. versionchanged:: 0.8
|
||||||
|
Cascade saves are optional and default to False. If you want
|
||||||
fine grain control then you can turn off using document
|
fine grain control then you can turn off using document
|
||||||
meta['cascade'] = False Also you can pass different kwargs to
|
meta['cascade'] = True. Also you can pass different kwargs to
|
||||||
the cascade save using cascade_kwargs which overwrites the
|
the cascade save using cascade_kwargs which overwrites the
|
||||||
existing kwargs with custom values
|
existing kwargs with custom values.
|
||||||
"""
|
"""
|
||||||
signals.pre_save.send(self.__class__, document=self)
|
signals.pre_save.send(self.__class__, document=self)
|
||||||
|
|
||||||
if validate:
|
if validate:
|
||||||
self.validate(clean=clean)
|
self.validate(clean=clean)
|
||||||
|
|
||||||
if not write_concern:
|
if write_concern is None:
|
||||||
write_concern = {}
|
write_concern = {"w": 1}
|
||||||
|
|
||||||
doc = self.to_mongo()
|
doc = self.to_mongo()
|
||||||
|
|
||||||
created = ('_id' not in doc or self._created or force_insert)
|
created = ('_id' not in doc or self._created or force_insert)
|
||||||
|
|
||||||
|
signals.pre_save_post_validation.send(self.__class__, document=self, created=created)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
collection = self._get_collection()
|
collection = self._get_collection()
|
||||||
if created:
|
if created:
|
||||||
@@ -242,8 +266,10 @@ class Document(BaseDocument):
|
|||||||
upsert=True, **write_concern)
|
upsert=True, **write_concern)
|
||||||
created = is_new_object(last_error)
|
created = is_new_object(last_error)
|
||||||
|
|
||||||
cascade = (self._meta.get('cascade', True)
|
|
||||||
if cascade is None else cascade)
|
if cascade is None:
|
||||||
|
cascade = self._meta.get('cascade', False) or cascade_kwargs is not None
|
||||||
|
|
||||||
if cascade:
|
if cascade:
|
||||||
kwargs = {
|
kwargs = {
|
||||||
"force_insert": force_insert,
|
"force_insert": force_insert,
|
||||||
@@ -276,15 +302,17 @@ class Document(BaseDocument):
|
|||||||
def cascade_save(self, *args, **kwargs):
|
def cascade_save(self, *args, **kwargs):
|
||||||
"""Recursively saves any references /
|
"""Recursively saves any references /
|
||||||
generic references on an objects"""
|
generic references on an objects"""
|
||||||
import fields
|
|
||||||
_refs = kwargs.get('_refs', []) or []
|
_refs = kwargs.get('_refs', []) or []
|
||||||
|
|
||||||
|
ReferenceField = _import_class('ReferenceField')
|
||||||
|
GenericReferenceField = _import_class('GenericReferenceField')
|
||||||
|
|
||||||
for name, cls in self._fields.items():
|
for name, cls in self._fields.items():
|
||||||
if not isinstance(cls, (fields.ReferenceField,
|
if not isinstance(cls, (ReferenceField,
|
||||||
fields.GenericReferenceField)):
|
GenericReferenceField)):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ref = getattr(self, name)
|
ref = self._data.get(name)
|
||||||
if not ref or isinstance(ref, DBRef):
|
if not ref or isinstance(ref, DBRef):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -344,11 +372,10 @@ class Document(BaseDocument):
|
|||||||
signals.pre_delete.send(self.__class__, document=self)
|
signals.pre_delete.send(self.__class__, document=self)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._qs.filter(**self._object_key).delete(write_concern=write_concern)
|
self._qs.filter(**self._object_key).delete(write_concern=write_concern, _from_doc_delete=True)
|
||||||
except pymongo.errors.OperationFailure, err:
|
except pymongo.errors.OperationFailure, err:
|
||||||
message = u'Could not delete document (%s)' % err.message
|
message = u'Could not delete document (%s)' % err.message
|
||||||
raise OperationError(message)
|
raise OperationError(message)
|
||||||
|
|
||||||
signals.post_delete.send(self.__class__, document=self)
|
signals.post_delete.send(self.__class__, document=self)
|
||||||
|
|
||||||
def switch_db(self, db_alias):
|
def switch_db(self, db_alias):
|
||||||
@@ -419,8 +446,9 @@ class Document(BaseDocument):
|
|||||||
.. versionchanged:: 0.6 Now chainable
|
.. versionchanged:: 0.6 Now chainable
|
||||||
"""
|
"""
|
||||||
id_field = self._meta['id_field']
|
id_field = self._meta['id_field']
|
||||||
obj = self._qs.filter(**{id_field: self[id_field]}
|
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
|
||||||
).limit(1).select_related(max_depth=max_depth)
|
**{id_field: self[id_field]}).limit(1).select_related(max_depth=max_depth)
|
||||||
|
|
||||||
if obj:
|
if obj:
|
||||||
obj = obj[0]
|
obj = obj[0]
|
||||||
else:
|
else:
|
||||||
@@ -521,14 +549,6 @@ class Document(BaseDocument):
|
|||||||
# an extra index on _cls, as mongodb will use the existing
|
# an extra index on _cls, as mongodb will use the existing
|
||||||
# index to service queries against _cls
|
# index to service queries against _cls
|
||||||
cls_indexed = False
|
cls_indexed = False
|
||||||
def includes_cls(fields):
|
|
||||||
first_field = None
|
|
||||||
if len(fields):
|
|
||||||
if isinstance(fields[0], basestring):
|
|
||||||
first_field = fields[0]
|
|
||||||
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
|
||||||
first_field = fields[0][0]
|
|
||||||
return first_field == '_cls'
|
|
||||||
|
|
||||||
# Ensure document-defined indexes are created
|
# Ensure document-defined indexes are created
|
||||||
if cls._meta['index_specs']:
|
if cls._meta['index_specs']:
|
||||||
@@ -549,6 +569,90 @@ class Document(BaseDocument):
|
|||||||
collection.ensure_index('_cls', background=background,
|
collection.ensure_index('_cls', background=background,
|
||||||
**index_opts)
|
**index_opts)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def list_indexes(cls, go_up=True, go_down=True):
|
||||||
|
""" Lists all of the indexes that should be created for given
|
||||||
|
collection. It includes all the indexes from super- and sub-classes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if cls._meta.get('abstract'):
|
||||||
|
return []
|
||||||
|
|
||||||
|
# get all the base classes, subclasses and sieblings
|
||||||
|
classes = []
|
||||||
|
def get_classes(cls):
|
||||||
|
|
||||||
|
if (cls not in classes and
|
||||||
|
isinstance(cls, TopLevelDocumentMetaclass)):
|
||||||
|
classes.append(cls)
|
||||||
|
|
||||||
|
for base_cls in cls.__bases__:
|
||||||
|
if (isinstance(base_cls, TopLevelDocumentMetaclass) and
|
||||||
|
base_cls != Document and
|
||||||
|
not base_cls._meta.get('abstract') and
|
||||||
|
base_cls._get_collection().full_name == cls._get_collection().full_name and
|
||||||
|
base_cls not in classes):
|
||||||
|
classes.append(base_cls)
|
||||||
|
get_classes(base_cls)
|
||||||
|
for subclass in cls.__subclasses__():
|
||||||
|
if (isinstance(base_cls, TopLevelDocumentMetaclass) and
|
||||||
|
subclass._get_collection().full_name == cls._get_collection().full_name and
|
||||||
|
subclass not in classes):
|
||||||
|
classes.append(subclass)
|
||||||
|
get_classes(subclass)
|
||||||
|
|
||||||
|
get_classes(cls)
|
||||||
|
|
||||||
|
# get the indexes spec for all of the gathered classes
|
||||||
|
def get_indexes_spec(cls):
|
||||||
|
indexes = []
|
||||||
|
|
||||||
|
if cls._meta['index_specs']:
|
||||||
|
index_spec = cls._meta['index_specs']
|
||||||
|
for spec in index_spec:
|
||||||
|
spec = spec.copy()
|
||||||
|
fields = spec.pop('fields')
|
||||||
|
indexes.append(fields)
|
||||||
|
return indexes
|
||||||
|
|
||||||
|
indexes = []
|
||||||
|
for cls in classes:
|
||||||
|
for index in get_indexes_spec(cls):
|
||||||
|
if index not in indexes:
|
||||||
|
indexes.append(index)
|
||||||
|
|
||||||
|
# finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed
|
||||||
|
if [(u'_id', 1)] not in indexes:
|
||||||
|
indexes.append([(u'_id', 1)])
|
||||||
|
if (cls._meta.get('index_cls', True) and
|
||||||
|
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
|
||||||
|
indexes.append([(u'_cls', 1)])
|
||||||
|
|
||||||
|
return indexes
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def compare_indexes(cls):
|
||||||
|
""" Compares the indexes defined in MongoEngine with the ones existing
|
||||||
|
in the database. Returns any missing/extra indexes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
required = cls.list_indexes()
|
||||||
|
existing = [info['key'] for info in cls._get_collection().index_information().values()]
|
||||||
|
missing = [index for index in required if index not in existing]
|
||||||
|
extra = [index for index in existing if index not in required]
|
||||||
|
|
||||||
|
# if { _cls: 1 } is missing, make sure it's *really* necessary
|
||||||
|
if [(u'_cls', 1)] in missing:
|
||||||
|
cls_obsolete = False
|
||||||
|
for index in existing:
|
||||||
|
if includes_cls(index) and index not in extra:
|
||||||
|
cls_obsolete = True
|
||||||
|
break
|
||||||
|
if cls_obsolete:
|
||||||
|
missing.remove([(u'_cls', 1)])
|
||||||
|
|
||||||
|
return {'missing': missing, 'extra': extra}
|
||||||
|
|
||||||
|
|
||||||
class DynamicDocument(Document):
|
class DynamicDocument(Document):
|
||||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||||
|
@@ -8,6 +8,13 @@ import uuid
|
|||||||
import warnings
|
import warnings
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
|
try:
|
||||||
|
import dateutil
|
||||||
|
except ImportError:
|
||||||
|
dateutil = None
|
||||||
|
else:
|
||||||
|
import dateutil.parser
|
||||||
|
|
||||||
import pymongo
|
import pymongo
|
||||||
import gridfs
|
import gridfs
|
||||||
from bson import Binary, DBRef, SON, ObjectId
|
from bson import Binary, DBRef, SON, ObjectId
|
||||||
@@ -347,6 +354,11 @@ class BooleanField(BaseField):
|
|||||||
class DateTimeField(BaseField):
|
class DateTimeField(BaseField):
|
||||||
"""A datetime field.
|
"""A datetime field.
|
||||||
|
|
||||||
|
Uses the python-dateutil library if available alternatively use time.strptime
|
||||||
|
to parse the dates. Note: python-dateutil's parser is fully featured and when
|
||||||
|
installed you can utilise it to convert varing types of date formats into valid
|
||||||
|
python datetime objects.
|
||||||
|
|
||||||
Note: Microseconds are rounded to the nearest millisecond.
|
Note: Microseconds are rounded to the nearest millisecond.
|
||||||
Pre UTC microsecond support is effecively broken.
|
Pre UTC microsecond support is effecively broken.
|
||||||
Use :class:`~mongoengine.fields.ComplexDateTimeField` if you
|
Use :class:`~mongoengine.fields.ComplexDateTimeField` if you
|
||||||
@@ -354,13 +366,11 @@ class DateTimeField(BaseField):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, (datetime.datetime, datetime.date)):
|
new_value = self.to_mongo(value)
|
||||||
|
if not isinstance(new_value, (datetime.datetime, datetime.date)):
|
||||||
self.error(u'cannot parse date "%s"' % value)
|
self.error(u'cannot parse date "%s"' % value)
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
return self.prepare_query_value(None, value)
|
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
|
||||||
if value is None:
|
if value is None:
|
||||||
return value
|
return value
|
||||||
if isinstance(value, datetime.datetime):
|
if isinstance(value, datetime.datetime):
|
||||||
@@ -370,8 +380,16 @@ class DateTimeField(BaseField):
|
|||||||
if callable(value):
|
if callable(value):
|
||||||
return value()
|
return value()
|
||||||
|
|
||||||
|
if not isinstance(value, basestring):
|
||||||
|
return None
|
||||||
|
|
||||||
# Attempt to parse a datetime:
|
# Attempt to parse a datetime:
|
||||||
# value = smart_str(value)
|
if dateutil:
|
||||||
|
try:
|
||||||
|
return dateutil.parser.parse(value)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
# split usecs, because they are not recognized by strptime.
|
# split usecs, because they are not recognized by strptime.
|
||||||
if '.' in value:
|
if '.' in value:
|
||||||
try:
|
try:
|
||||||
@@ -396,6 +414,9 @@ class DateTimeField(BaseField):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
|
||||||
class ComplexDateTimeField(StringField):
|
class ComplexDateTimeField(StringField):
|
||||||
"""
|
"""
|
||||||
@@ -1194,6 +1215,7 @@ class FileField(BaseField):
|
|||||||
|
|
||||||
# Create a new proxy object as we don't already have one
|
# Create a new proxy object as we don't already have one
|
||||||
instance._data[key] = self.proxy_class(key=key, instance=instance,
|
instance._data[key] = self.proxy_class(key=key, instance=instance,
|
||||||
|
db_alias=self.db_alias,
|
||||||
collection_name=self.collection_name)
|
collection_name=self.collection_name)
|
||||||
instance._data[key].put(value)
|
instance._data[key].put(value)
|
||||||
else:
|
else:
|
||||||
@@ -1237,8 +1259,8 @@ class ImageGridFsProxy(GridFSProxy):
|
|||||||
try:
|
try:
|
||||||
img = Image.open(file_obj)
|
img = Image.open(file_obj)
|
||||||
img_format = img.format
|
img_format = img.format
|
||||||
except:
|
except Exception, e:
|
||||||
raise ValidationError('Invalid image')
|
raise ValidationError('Invalid image: %s' % e)
|
||||||
|
|
||||||
if (field.size and (img.size[0] > field.size['width'] or
|
if (field.size and (img.size[0] > field.size['width'] or
|
||||||
img.size[1] > field.size['height'])):
|
img.size[1] > field.size['height'])):
|
||||||
|
@@ -71,7 +71,10 @@ class QuerySet(object):
|
|||||||
# If inheritance is allowed, only return instances and instances of
|
# If inheritance is allowed, only return instances and instances of
|
||||||
# subclasses of the class being used
|
# subclasses of the class being used
|
||||||
if document._meta.get('allow_inheritance') is True:
|
if document._meta.get('allow_inheritance') is True:
|
||||||
self._initial_query = {"_cls": {"$in": self._document._subclasses}}
|
if len(self._document._subclasses) == 1:
|
||||||
|
self._initial_query = {"_cls": self._document._subclasses[0]}
|
||||||
|
else:
|
||||||
|
self._initial_query = {"_cls": {"$in": self._document._subclasses}}
|
||||||
self._loaded_fields = QueryFieldList(always_include=['_cls'])
|
self._loaded_fields = QueryFieldList(always_include=['_cls'])
|
||||||
self._cursor_obj = None
|
self._cursor_obj = None
|
||||||
self._limit = None
|
self._limit = None
|
||||||
@@ -104,13 +107,17 @@ class QuerySet(object):
|
|||||||
raise InvalidQueryError(msg)
|
raise InvalidQueryError(msg)
|
||||||
query &= q_obj
|
query &= q_obj
|
||||||
|
|
||||||
queryset = self.clone()
|
if read_preference is None:
|
||||||
|
queryset = self.clone()
|
||||||
|
else:
|
||||||
|
# Use the clone provided when setting read_preference
|
||||||
|
queryset = self.read_preference(read_preference)
|
||||||
|
|
||||||
queryset._query_obj &= query
|
queryset._query_obj &= query
|
||||||
queryset._mongo_query = None
|
queryset._mongo_query = None
|
||||||
queryset._cursor_obj = None
|
queryset._cursor_obj = None
|
||||||
if read_preference is not None:
|
|
||||||
queryset.read_preference(read_preference)
|
|
||||||
queryset._class_check = class_check
|
queryset._class_check = class_check
|
||||||
|
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
@@ -178,7 +185,6 @@ class QuerySet(object):
|
|||||||
try:
|
try:
|
||||||
queryset._cursor_obj = queryset._cursor[key]
|
queryset._cursor_obj = queryset._cursor[key]
|
||||||
queryset._skip, queryset._limit = key.start, key.stop
|
queryset._skip, queryset._limit = key.start, key.stop
|
||||||
queryset._limit
|
|
||||||
if key.start and key.stop:
|
if key.start and key.stop:
|
||||||
queryset._limit = key.stop - key.start
|
queryset._limit = key.stop - key.start
|
||||||
except IndexError, err:
|
except IndexError, err:
|
||||||
@@ -342,7 +348,7 @@ class QuerySet(object):
|
|||||||
"""
|
"""
|
||||||
Document = _import_class('Document')
|
Document = _import_class('Document')
|
||||||
|
|
||||||
if not write_concern:
|
if write_concern is None:
|
||||||
write_concern = {}
|
write_concern = {}
|
||||||
|
|
||||||
docs = doc_or_docs
|
docs = doc_or_docs
|
||||||
@@ -403,7 +409,7 @@ class QuerySet(object):
|
|||||||
self._len = count
|
self._len = count
|
||||||
return count
|
return count
|
||||||
|
|
||||||
def delete(self, write_concern=None):
|
def delete(self, write_concern=None, _from_doc_delete=False):
|
||||||
"""Delete the documents matched by the query.
|
"""Delete the documents matched by the query.
|
||||||
|
|
||||||
:param write_concern: Extra keyword arguments are passed down which
|
:param write_concern: Extra keyword arguments are passed down which
|
||||||
@@ -412,20 +418,25 @@ class QuerySet(object):
|
|||||||
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||||
wait until at least two servers have recorded the write and
|
wait until at least two servers have recorded the write and
|
||||||
will force an fsync on the primary server.
|
will force an fsync on the primary server.
|
||||||
|
:param _from_doc_delete: True when called from document delete therefore
|
||||||
|
signals will have been triggered so don't loop.
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
doc = queryset._document
|
doc = queryset._document
|
||||||
|
|
||||||
|
if write_concern is None:
|
||||||
|
write_concern = {}
|
||||||
|
|
||||||
|
# Handle deletes where skips or limits have been applied or
|
||||||
|
# there is an untriggered delete signal
|
||||||
has_delete_signal = signals.signals_available and (
|
has_delete_signal = signals.signals_available and (
|
||||||
signals.pre_delete.has_receivers_for(self._document) or
|
signals.pre_delete.has_receivers_for(self._document) or
|
||||||
signals.post_delete.has_receivers_for(self._document))
|
signals.post_delete.has_receivers_for(self._document))
|
||||||
|
|
||||||
if not write_concern:
|
call_document_delete = (queryset._skip or queryset._limit or
|
||||||
write_concern = {}
|
has_delete_signal) and not _from_doc_delete
|
||||||
|
|
||||||
# Handle deletes where skips or limits have been applied or has a
|
if call_document_delete:
|
||||||
# delete signal
|
|
||||||
if queryset._skip or queryset._limit or has_delete_signal:
|
|
||||||
for doc in queryset:
|
for doc in queryset:
|
||||||
doc.delete(write_concern=write_concern)
|
doc.delete(write_concern=write_concern)
|
||||||
return
|
return
|
||||||
@@ -479,7 +490,7 @@ class QuerySet(object):
|
|||||||
if not update and not upsert:
|
if not update and not upsert:
|
||||||
raise OperationError("No update parameters, would remove data")
|
raise OperationError("No update parameters, would remove data")
|
||||||
|
|
||||||
if not write_concern:
|
if write_concern is None:
|
||||||
write_concern = {}
|
write_concern = {}
|
||||||
|
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
@@ -1479,7 +1490,7 @@ class QuerySet(object):
|
|||||||
|
|
||||||
# Deprecated
|
# Deprecated
|
||||||
def ensure_index(self, **kwargs):
|
def ensure_index(self, **kwargs):
|
||||||
"""Deprecated use :func:`~Document.ensure_index`"""
|
"""Deprecated use :func:`Document.ensure_index`"""
|
||||||
msg = ("Doc.objects()._ensure_index() is deprecated. "
|
msg = ("Doc.objects()._ensure_index() is deprecated. "
|
||||||
"Use Doc.ensure_index() instead.")
|
"Use Doc.ensure_index() instead.")
|
||||||
warnings.warn(msg, DeprecationWarning)
|
warnings.warn(msg, DeprecationWarning)
|
||||||
|
@@ -23,6 +23,10 @@ class QNodeVisitor(object):
|
|||||||
return query
|
return query
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateQueryConditionsError(InvalidQueryError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SimplificationVisitor(QNodeVisitor):
|
class SimplificationVisitor(QNodeVisitor):
|
||||||
"""Simplifies query trees by combinging unnecessary 'and' connection nodes
|
"""Simplifies query trees by combinging unnecessary 'and' connection nodes
|
||||||
into a single Q-object.
|
into a single Q-object.
|
||||||
@@ -33,7 +37,11 @@ class SimplificationVisitor(QNodeVisitor):
|
|||||||
# The simplification only applies to 'simple' queries
|
# The simplification only applies to 'simple' queries
|
||||||
if all(isinstance(node, Q) for node in combination.children):
|
if all(isinstance(node, Q) for node in combination.children):
|
||||||
queries = [n.query for n in combination.children]
|
queries = [n.query for n in combination.children]
|
||||||
return Q(**self._query_conjunction(queries))
|
try:
|
||||||
|
return Q(**self._query_conjunction(queries))
|
||||||
|
except DuplicateQueryConditionsError:
|
||||||
|
# Cannot be simplified
|
||||||
|
pass
|
||||||
return combination
|
return combination
|
||||||
|
|
||||||
def _query_conjunction(self, queries):
|
def _query_conjunction(self, queries):
|
||||||
@@ -47,8 +55,7 @@ class SimplificationVisitor(QNodeVisitor):
|
|||||||
# to a single field
|
# to a single field
|
||||||
intersection = ops.intersection(query_ops)
|
intersection = ops.intersection(query_ops)
|
||||||
if intersection:
|
if intersection:
|
||||||
msg = 'Duplicate query conditions: '
|
raise DuplicateQueryConditionsError()
|
||||||
raise InvalidQueryError(msg + ', '.join(intersection))
|
|
||||||
|
|
||||||
query_ops.update(ops)
|
query_ops.update(ops)
|
||||||
combined_query.update(copy.deepcopy(query))
|
combined_query.update(copy.deepcopy(query))
|
||||||
@@ -122,8 +129,7 @@ class QCombination(QNode):
|
|||||||
# If the child is a combination of the same type, we can merge its
|
# If the child is a combination of the same type, we can merge its
|
||||||
# children directly into this combinations children
|
# children directly into this combinations children
|
||||||
if isinstance(node, QCombination) and node.operation == operation:
|
if isinstance(node, QCombination) and node.operation == operation:
|
||||||
# self.children += node.children
|
self.children += node.children
|
||||||
self.children.append(node)
|
|
||||||
else:
|
else:
|
||||||
self.children.append(node)
|
self.children.append(node)
|
||||||
|
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
__all__ = ['pre_init', 'post_init', 'pre_save', 'post_save',
|
__all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
|
||||||
'pre_delete', 'post_delete']
|
'post_save', 'pre_delete', 'post_delete']
|
||||||
|
|
||||||
signals_available = False
|
signals_available = False
|
||||||
try:
|
try:
|
||||||
@@ -39,6 +39,7 @@ _signals = Namespace()
|
|||||||
pre_init = _signals.signal('pre_init')
|
pre_init = _signals.signal('pre_init')
|
||||||
post_init = _signals.signal('post_init')
|
post_init = _signals.signal('post_init')
|
||||||
pre_save = _signals.signal('pre_save')
|
pre_save = _signals.signal('pre_save')
|
||||||
|
pre_save_post_validation = _signals.signal('pre_save_post_validation')
|
||||||
post_save = _signals.signal('post_save')
|
post_save = _signals.signal('post_save')
|
||||||
pre_delete = _signals.signal('pre_delete')
|
pre_delete = _signals.signal('pre_delete')
|
||||||
post_delete = _signals.signal('post_delete')
|
post_delete = _signals.signal('post_delete')
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
%define srcname mongoengine
|
%define srcname mongoengine
|
||||||
|
|
||||||
Name: python-%{srcname}
|
Name: python-%{srcname}
|
||||||
Version: 0.8.1
|
Version: 0.8.2
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||||
|
|
||||||
|
2
setup.py
2
setup.py
@@ -57,7 +57,7 @@ if sys.version_info[0] == 3:
|
|||||||
extra_opts['packages'].append("tests")
|
extra_opts['packages'].append("tests")
|
||||||
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
|
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
|
||||||
else:
|
else:
|
||||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2==2.6']
|
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2==2.6', 'python-dateutil']
|
||||||
extra_opts['packages'] = find_packages(exclude=('tests',))
|
extra_opts['packages'] = find_packages(exclude=('tests',))
|
||||||
|
|
||||||
setup(name='mongoengine',
|
setup(name='mongoengine',
|
||||||
|
@@ -85,6 +85,153 @@ class ClassMethodsTest(unittest.TestCase):
|
|||||||
self.assertEqual(self.Person._meta['delete_rules'],
|
self.assertEqual(self.Person._meta['delete_rules'],
|
||||||
{(Job, 'employee'): NULLIFY})
|
{(Job, 'employee'): NULLIFY})
|
||||||
|
|
||||||
|
def test_compare_indexes(self):
|
||||||
|
""" Ensure that the indexes are properly created and that
|
||||||
|
compare_indexes identifies the missing/extra indexes
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
tags = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'title')]
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
|
||||||
|
BlogPost.ensure_index(['author', 'description'])
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] })
|
||||||
|
|
||||||
|
BlogPost._get_collection().drop_index('author_1_description_1')
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
|
||||||
|
BlogPost._get_collection().drop_index('author_1_title_1')
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] })
|
||||||
|
|
||||||
|
def test_compare_indexes_inheritance(self):
|
||||||
|
""" Ensure that the indexes are properly created and that
|
||||||
|
compare_indexes identifies the missing/extra indexes for subclassed
|
||||||
|
documents (_cls included)
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True
|
||||||
|
}
|
||||||
|
|
||||||
|
class BlogPostWithTags(BlogPost):
|
||||||
|
tags = StringField()
|
||||||
|
tag_list = ListField(StringField())
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'tags')]
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
BlogPostWithTags.ensure_indexes()
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
|
||||||
|
BlogPostWithTags.ensure_index(['author', 'tag_list'])
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] })
|
||||||
|
|
||||||
|
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1')
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
|
||||||
|
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1')
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] })
|
||||||
|
|
||||||
|
def test_compare_indexes_multiple_subclasses(self):
|
||||||
|
""" Ensure that compare_indexes behaves correctly if called from a
|
||||||
|
class, which base class has multiple subclasses
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True
|
||||||
|
}
|
||||||
|
|
||||||
|
class BlogPostWithTags(BlogPost):
|
||||||
|
tags = StringField()
|
||||||
|
tag_list = ListField(StringField())
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'tags')]
|
||||||
|
}
|
||||||
|
|
||||||
|
class BlogPostWithCustomField(BlogPost):
|
||||||
|
custom = DictField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'custom')]
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
BlogPostWithTags.ensure_indexes()
|
||||||
|
BlogPostWithCustomField.ensure_indexes()
|
||||||
|
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
|
||||||
|
def test_list_indexes_inheritance(self):
|
||||||
|
""" ensure that all of the indexes are listed regardless of the super-
|
||||||
|
or sub-class that we call it from
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True
|
||||||
|
}
|
||||||
|
|
||||||
|
class BlogPostWithTags(BlogPost):
|
||||||
|
tags = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'tags')]
|
||||||
|
}
|
||||||
|
|
||||||
|
class BlogPostWithTagsAndExtraText(BlogPostWithTags):
|
||||||
|
extra_text = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'tags', 'extra_text')]
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
BlogPostWithTags.ensure_indexes()
|
||||||
|
BlogPostWithTagsAndExtraText.ensure_indexes()
|
||||||
|
|
||||||
|
self.assertEqual(BlogPost.list_indexes(),
|
||||||
|
BlogPostWithTags.list_indexes())
|
||||||
|
self.assertEqual(BlogPost.list_indexes(),
|
||||||
|
BlogPostWithTagsAndExtraText.list_indexes())
|
||||||
|
self.assertEqual(BlogPost.list_indexes(),
|
||||||
|
[[('_cls', 1), ('author', 1), ('tags', 1)],
|
||||||
|
[('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)],
|
||||||
|
[(u'_id', 1)], [('_cls', 1)]])
|
||||||
|
|
||||||
def test_register_delete_rule_inherited(self):
|
def test_register_delete_rule_inherited(self):
|
||||||
|
|
||||||
class Vaccine(Document):
|
class Vaccine(Document):
|
||||||
|
@@ -189,6 +189,41 @@ class InheritanceTest(unittest.TestCase):
|
|||||||
self.assertEqual(Employee._get_collection_name(),
|
self.assertEqual(Employee._get_collection_name(),
|
||||||
Person._get_collection_name())
|
Person._get_collection_name())
|
||||||
|
|
||||||
|
def test_indexes_and_multiple_inheritance(self):
|
||||||
|
""" Ensure that all of the indexes are created for a document with
|
||||||
|
multiple inheritance.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class A(Document):
|
||||||
|
a = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['a']
|
||||||
|
}
|
||||||
|
|
||||||
|
class B(Document):
|
||||||
|
b = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['b']
|
||||||
|
}
|
||||||
|
|
||||||
|
class C(A, B):
|
||||||
|
pass
|
||||||
|
|
||||||
|
A.drop_collection()
|
||||||
|
B.drop_collection()
|
||||||
|
C.drop_collection()
|
||||||
|
|
||||||
|
C.ensure_indexes()
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
sorted([idx['key'] for idx in C._get_collection().index_information().values()]),
|
||||||
|
sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]])
|
||||||
|
)
|
||||||
|
|
||||||
def test_polymorphic_queries(self):
|
def test_polymorphic_queries(self):
|
||||||
"""Ensure that the correct subclasses are returned from a query
|
"""Ensure that the correct subclasses are returned from a query
|
||||||
"""
|
"""
|
||||||
|
@@ -9,6 +9,7 @@ import unittest
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from bson import DBRef
|
||||||
from tests.fixtures import PickleEmbedded, PickleTest, PickleSignalsTest
|
from tests.fixtures import PickleEmbedded, PickleTest, PickleSignalsTest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
@@ -664,7 +665,7 @@ class InstanceTest(unittest.TestCase):
|
|||||||
|
|
||||||
p = Person.objects(name="Wilson Jr").get()
|
p = Person.objects(name="Wilson Jr").get()
|
||||||
p.parent.name = "Daddy Wilson"
|
p.parent.name = "Daddy Wilson"
|
||||||
p.save()
|
p.save(cascade=True)
|
||||||
|
|
||||||
p1.reload()
|
p1.reload()
|
||||||
self.assertEqual(p1.name, p.parent.name)
|
self.assertEqual(p1.name, p.parent.name)
|
||||||
@@ -683,14 +684,12 @@ class InstanceTest(unittest.TestCase):
|
|||||||
|
|
||||||
p2 = Person(name="Wilson Jr")
|
p2 = Person(name="Wilson Jr")
|
||||||
p2.parent = p1
|
p2.parent = p1
|
||||||
|
p1.name = "Daddy Wilson"
|
||||||
p2.save(force_insert=True, cascade_kwargs={"force_insert": False})
|
p2.save(force_insert=True, cascade_kwargs={"force_insert": False})
|
||||||
|
|
||||||
p = Person.objects(name="Wilson Jr").get()
|
|
||||||
p.parent.name = "Daddy Wilson"
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p1.reload()
|
p1.reload()
|
||||||
self.assertEqual(p1.name, p.parent.name)
|
p2.reload()
|
||||||
|
self.assertEqual(p1.name, p2.parent.name)
|
||||||
|
|
||||||
def test_save_cascade_meta_false(self):
|
def test_save_cascade_meta_false(self):
|
||||||
|
|
||||||
@@ -765,6 +764,10 @@ class InstanceTest(unittest.TestCase):
|
|||||||
p.parent.name = "Daddy Wilson"
|
p.parent.name = "Daddy Wilson"
|
||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
|
p1.reload()
|
||||||
|
self.assertNotEqual(p1.name, p.parent.name)
|
||||||
|
|
||||||
|
p.save(cascade=True)
|
||||||
p1.reload()
|
p1.reload()
|
||||||
self.assertEqual(p1.name, p.parent.name)
|
self.assertEqual(p1.name, p.parent.name)
|
||||||
|
|
||||||
@@ -1018,6 +1021,99 @@ class InstanceTest(unittest.TestCase):
|
|||||||
self.assertEqual(person.age, 21)
|
self.assertEqual(person.age, 21)
|
||||||
self.assertEqual(person.active, False)
|
self.assertEqual(person.active, False)
|
||||||
|
|
||||||
|
def test_query_count_when_saving(self):
|
||||||
|
"""Ensure references don't cause extra fetches when saving"""
|
||||||
|
class Organization(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
orgs = ListField(ReferenceField('Organization'))
|
||||||
|
|
||||||
|
class Feed(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class UserSubscription(Document):
|
||||||
|
name = StringField()
|
||||||
|
user = ReferenceField(User)
|
||||||
|
feed = ReferenceField(Feed)
|
||||||
|
|
||||||
|
Organization.drop_collection()
|
||||||
|
User.drop_collection()
|
||||||
|
Feed.drop_collection()
|
||||||
|
UserSubscription.drop_collection()
|
||||||
|
|
||||||
|
o1 = Organization(name="o1").save()
|
||||||
|
o2 = Organization(name="o2").save()
|
||||||
|
|
||||||
|
u1 = User(name="Ross", orgs=[o1, o2]).save()
|
||||||
|
f1 = Feed(name="MongoEngine").save()
|
||||||
|
|
||||||
|
sub = UserSubscription(user=u1, feed=f1).save()
|
||||||
|
|
||||||
|
user = User.objects.first()
|
||||||
|
# Even if stored as ObjectId's internally mongoengine uses DBRefs
|
||||||
|
# As ObjectId's aren't automatically derefenced
|
||||||
|
self.assertTrue(isinstance(user._data['orgs'][0], DBRef))
|
||||||
|
self.assertTrue(isinstance(user.orgs[0], Organization))
|
||||||
|
self.assertTrue(isinstance(user._data['orgs'][0], Organization))
|
||||||
|
|
||||||
|
# Changing a value
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
sub = UserSubscription.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
sub.name = "Test Sub"
|
||||||
|
sub.save()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
# Changing a value that will cascade
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
sub = UserSubscription.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
sub.user.name = "Test"
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
sub.save(cascade=True)
|
||||||
|
self.assertEqual(q, 3)
|
||||||
|
|
||||||
|
# Changing a value and one that will cascade
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
sub = UserSubscription.objects.first()
|
||||||
|
sub.name = "Test Sub 2"
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
sub.user.name = "Test 2"
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
sub.save(cascade=True)
|
||||||
|
self.assertEqual(q, 4) # One for the UserSub and one for the User
|
||||||
|
|
||||||
|
# Saving with just the refs
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
sub = UserSubscription(user=u1.pk, feed=f1.pk)
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
sub.save()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
# Saving with just the refs on a ListField
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
User(name="Bob", orgs=[o1.pk, o2.pk]).save()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
# Saving new objects
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
user = User.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
feed = Feed.objects.first()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
sub = UserSubscription(user=user, feed=feed)
|
||||||
|
self.assertEqual(q, 2) # Check no change
|
||||||
|
sub.save()
|
||||||
|
self.assertEqual(q, 3)
|
||||||
|
|
||||||
def test_set_unset_one_operation(self):
|
def test_set_unset_one_operation(self):
|
||||||
"""Ensure that $set and $unset actions are performed in the same
|
"""Ensure that $set and $unset actions are performed in the same
|
||||||
operation.
|
operation.
|
||||||
|
@@ -6,6 +6,11 @@ import datetime
|
|||||||
import unittest
|
import unittest
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
try:
|
||||||
|
import dateutil
|
||||||
|
except ImportError:
|
||||||
|
dateutil = None
|
||||||
|
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
|
||||||
from bson import Binary, DBRef, ObjectId
|
from bson import Binary, DBRef, ObjectId
|
||||||
@@ -29,20 +34,137 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.db.drop_collection('fs.files')
|
self.db.drop_collection('fs.files')
|
||||||
self.db.drop_collection('fs.chunks')
|
self.db.drop_collection('fs.chunks')
|
||||||
|
|
||||||
def test_default_values(self):
|
def test_default_values_nothing_set(self):
|
||||||
"""Ensure that default field values are used when creating a document.
|
"""Ensure that default field values are used when creating a document.
|
||||||
"""
|
"""
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
age = IntField(default=30, help_text="Your real age")
|
age = IntField(default=30, required=False)
|
||||||
userid = StringField(default=lambda: 'test', verbose_name="User Identity")
|
userid = StringField(default=lambda: 'test', required=True)
|
||||||
|
created = DateTimeField(default=datetime.datetime.utcnow)
|
||||||
|
|
||||||
person = Person(name='Test Person')
|
person = Person(name="Ross")
|
||||||
self.assertEqual(person._data['age'], 30)
|
|
||||||
self.assertEqual(person._data['userid'], 'test')
|
# Confirm saving now would store values
|
||||||
self.assertEqual(person._fields['name'].help_text, None)
|
data_to_be_saved = sorted(person.to_mongo().keys())
|
||||||
self.assertEqual(person._fields['age'].help_text, "Your real age")
|
self.assertEqual(data_to_be_saved, ['age', 'created', 'name', 'userid'])
|
||||||
self.assertEqual(person._fields['userid'].verbose_name, "User Identity")
|
|
||||||
|
self.assertTrue(person.validate() is None)
|
||||||
|
|
||||||
|
self.assertEqual(person.name, person.name)
|
||||||
|
self.assertEqual(person.age, person.age)
|
||||||
|
self.assertEqual(person.userid, person.userid)
|
||||||
|
self.assertEqual(person.created, person.created)
|
||||||
|
|
||||||
|
self.assertEqual(person._data['name'], person.name)
|
||||||
|
self.assertEqual(person._data['age'], person.age)
|
||||||
|
self.assertEqual(person._data['userid'], person.userid)
|
||||||
|
self.assertEqual(person._data['created'], person.created)
|
||||||
|
|
||||||
|
# Confirm introspection changes nothing
|
||||||
|
data_to_be_saved = sorted(person.to_mongo().keys())
|
||||||
|
self.assertEqual(data_to_be_saved, ['age', 'created', 'name', 'userid'])
|
||||||
|
|
||||||
|
def test_default_values_set_to_None(self):
|
||||||
|
"""Ensure that default field values are used when creating a document.
|
||||||
|
"""
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField(default=30, required=False)
|
||||||
|
userid = StringField(default=lambda: 'test', required=True)
|
||||||
|
created = DateTimeField(default=datetime.datetime.utcnow)
|
||||||
|
|
||||||
|
# Trying setting values to None
|
||||||
|
person = Person(name=None, age=None, userid=None, created=None)
|
||||||
|
|
||||||
|
# Confirm saving now would store values
|
||||||
|
data_to_be_saved = sorted(person.to_mongo().keys())
|
||||||
|
self.assertEqual(data_to_be_saved, ['age', 'created', 'userid'])
|
||||||
|
|
||||||
|
self.assertTrue(person.validate() is None)
|
||||||
|
|
||||||
|
self.assertEqual(person.name, person.name)
|
||||||
|
self.assertEqual(person.age, person.age)
|
||||||
|
self.assertEqual(person.userid, person.userid)
|
||||||
|
self.assertEqual(person.created, person.created)
|
||||||
|
|
||||||
|
self.assertEqual(person._data['name'], person.name)
|
||||||
|
self.assertEqual(person._data['age'], person.age)
|
||||||
|
self.assertEqual(person._data['userid'], person.userid)
|
||||||
|
self.assertEqual(person._data['created'], person.created)
|
||||||
|
|
||||||
|
# Confirm introspection changes nothing
|
||||||
|
data_to_be_saved = sorted(person.to_mongo().keys())
|
||||||
|
self.assertEqual(data_to_be_saved, ['age', 'created', 'userid'])
|
||||||
|
|
||||||
|
def test_default_values_when_setting_to_None(self):
|
||||||
|
"""Ensure that default field values are used when creating a document.
|
||||||
|
"""
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField(default=30, required=False)
|
||||||
|
userid = StringField(default=lambda: 'test', required=True)
|
||||||
|
created = DateTimeField(default=datetime.datetime.utcnow)
|
||||||
|
|
||||||
|
person = Person()
|
||||||
|
person.name = None
|
||||||
|
person.age = None
|
||||||
|
person.userid = None
|
||||||
|
person.created = None
|
||||||
|
|
||||||
|
# Confirm saving now would store values
|
||||||
|
data_to_be_saved = sorted(person.to_mongo().keys())
|
||||||
|
self.assertEqual(data_to_be_saved, ['age', 'created', 'userid'])
|
||||||
|
|
||||||
|
self.assertTrue(person.validate() is None)
|
||||||
|
|
||||||
|
self.assertEqual(person.name, person.name)
|
||||||
|
self.assertEqual(person.age, person.age)
|
||||||
|
self.assertEqual(person.userid, person.userid)
|
||||||
|
self.assertEqual(person.created, person.created)
|
||||||
|
|
||||||
|
self.assertEqual(person._data['name'], person.name)
|
||||||
|
self.assertEqual(person._data['age'], person.age)
|
||||||
|
self.assertEqual(person._data['userid'], person.userid)
|
||||||
|
self.assertEqual(person._data['created'], person.created)
|
||||||
|
|
||||||
|
# Confirm introspection changes nothing
|
||||||
|
data_to_be_saved = sorted(person.to_mongo().keys())
|
||||||
|
self.assertEqual(data_to_be_saved, ['age', 'created', 'userid'])
|
||||||
|
|
||||||
|
def test_default_values_when_deleting_value(self):
|
||||||
|
"""Ensure that default field values are used when creating a document.
|
||||||
|
"""
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField(default=30, required=False)
|
||||||
|
userid = StringField(default=lambda: 'test', required=True)
|
||||||
|
created = DateTimeField(default=datetime.datetime.utcnow)
|
||||||
|
|
||||||
|
person = Person(name="Ross")
|
||||||
|
del person.name
|
||||||
|
del person.age
|
||||||
|
del person.userid
|
||||||
|
del person.created
|
||||||
|
|
||||||
|
data_to_be_saved = sorted(person.to_mongo().keys())
|
||||||
|
self.assertEqual(data_to_be_saved, ['age', 'created', 'userid'])
|
||||||
|
|
||||||
|
self.assertTrue(person.validate() is None)
|
||||||
|
|
||||||
|
self.assertEqual(person.name, person.name)
|
||||||
|
self.assertEqual(person.age, person.age)
|
||||||
|
self.assertEqual(person.userid, person.userid)
|
||||||
|
self.assertEqual(person.created, person.created)
|
||||||
|
|
||||||
|
self.assertEqual(person._data['name'], person.name)
|
||||||
|
self.assertEqual(person._data['age'], person.age)
|
||||||
|
self.assertEqual(person._data['userid'], person.userid)
|
||||||
|
self.assertEqual(person._data['created'], person.created)
|
||||||
|
|
||||||
|
# Confirm introspection changes nothing
|
||||||
|
data_to_be_saved = sorted(person.to_mongo().keys())
|
||||||
|
self.assertEqual(data_to_be_saved, ['age', 'created', 'userid'])
|
||||||
|
|
||||||
def test_required_values(self):
|
def test_required_values(self):
|
||||||
"""Ensure that required field constraints are enforced.
|
"""Ensure that required field constraints are enforced.
|
||||||
@@ -403,9 +525,16 @@ class FieldTest(unittest.TestCase):
|
|||||||
log.time = datetime.date.today()
|
log.time = datetime.date.today()
|
||||||
log.validate()
|
log.validate()
|
||||||
|
|
||||||
|
log.time = datetime.datetime.now().isoformat(' ')
|
||||||
|
log.validate()
|
||||||
|
|
||||||
|
if dateutil:
|
||||||
|
log.time = datetime.datetime.now().isoformat('T')
|
||||||
|
log.validate()
|
||||||
|
|
||||||
log.time = -1
|
log.time = -1
|
||||||
self.assertRaises(ValidationError, log.validate)
|
self.assertRaises(ValidationError, log.validate)
|
||||||
log.time = '1pm'
|
log.time = 'ABC'
|
||||||
self.assertRaises(ValidationError, log.validate)
|
self.assertRaises(ValidationError, log.validate)
|
||||||
|
|
||||||
def test_datetime_tz_aware_mark_as_changed(self):
|
def test_datetime_tz_aware_mark_as_changed(self):
|
||||||
@@ -482,6 +611,66 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
LogEntry.drop_collection()
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
|
def test_datetime_usage(self):
|
||||||
|
"""Tests for regular datetime fields"""
|
||||||
|
class LogEntry(Document):
|
||||||
|
date = DateTimeField()
|
||||||
|
|
||||||
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
|
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01)
|
||||||
|
log = LogEntry()
|
||||||
|
log.date = d1
|
||||||
|
log.validate()
|
||||||
|
log.save()
|
||||||
|
|
||||||
|
for query in (d1, d1.isoformat(' ')):
|
||||||
|
log1 = LogEntry.objects.get(date=query)
|
||||||
|
self.assertEqual(log, log1)
|
||||||
|
|
||||||
|
if dateutil:
|
||||||
|
log1 = LogEntry.objects.get(date=d1.isoformat('T'))
|
||||||
|
self.assertEqual(log, log1)
|
||||||
|
|
||||||
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
|
# create 60 log entries
|
||||||
|
for i in xrange(1950, 2010):
|
||||||
|
d = datetime.datetime(i, 01, 01, 00, 00, 01)
|
||||||
|
LogEntry(date=d).save()
|
||||||
|
|
||||||
|
self.assertEqual(LogEntry.objects.count(), 60)
|
||||||
|
|
||||||
|
# Test ordering
|
||||||
|
logs = LogEntry.objects.order_by("date")
|
||||||
|
count = logs.count()
|
||||||
|
i = 0
|
||||||
|
while i == count - 1:
|
||||||
|
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
logs = LogEntry.objects.order_by("-date")
|
||||||
|
count = logs.count()
|
||||||
|
i = 0
|
||||||
|
while i == count - 1:
|
||||||
|
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
# Test searching
|
||||||
|
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||||
|
self.assertEqual(logs.count(), 30)
|
||||||
|
|
||||||
|
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1))
|
||||||
|
self.assertEqual(logs.count(), 30)
|
||||||
|
|
||||||
|
logs = LogEntry.objects.filter(
|
||||||
|
date__lte=datetime.datetime(2011, 1, 1),
|
||||||
|
date__gte=datetime.datetime(2000, 1, 1),
|
||||||
|
)
|
||||||
|
self.assertEqual(logs.count(), 10)
|
||||||
|
|
||||||
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
def test_complexdatetime_storage(self):
|
def test_complexdatetime_storage(self):
|
||||||
"""Tests for complex datetime fields - which can handle microseconds
|
"""Tests for complex datetime fields - which can handle microseconds
|
||||||
without rounding.
|
without rounding.
|
||||||
@@ -808,6 +997,27 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertRaises(ValidationError, e.save)
|
self.assertRaises(ValidationError, e.save)
|
||||||
|
|
||||||
|
def test_complex_field_same_value_not_changed(self):
|
||||||
|
"""
|
||||||
|
If a complex field is set to the same value, it should not be marked as
|
||||||
|
changed.
|
||||||
|
"""
|
||||||
|
class Simple(Document):
|
||||||
|
mapping = ListField()
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
e = Simple().save()
|
||||||
|
e.mapping = []
|
||||||
|
self.assertEqual([], e._changed_fields)
|
||||||
|
|
||||||
|
class Simple(Document):
|
||||||
|
mapping = DictField()
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
e = Simple().save()
|
||||||
|
e.mapping = {}
|
||||||
|
self.assertEqual([], e._changed_fields)
|
||||||
|
|
||||||
def test_list_field_complex(self):
|
def test_list_field_complex(self):
|
||||||
"""Ensure that the list fields can handle the complex types."""
|
"""Ensure that the list fields can handle the complex types."""
|
||||||
|
|
||||||
@@ -1929,7 +2139,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.db['mongoengine.counters'].drop()
|
self.db['mongoengine.counters'].drop()
|
||||||
|
|
||||||
self.assertEqual(Person.id.get_next_value(), '1')
|
self.assertEqual(Person.id.get_next_value(), '1')
|
||||||
|
|
||||||
def test_sequence_field_sequence_name(self):
|
def test_sequence_field_sequence_name(self):
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
id = SequenceField(primary_key=True, sequence_name='jelly')
|
id = SequenceField(primary_key=True, sequence_name='jelly')
|
||||||
|
@@ -14,6 +14,12 @@ from mongoengine import *
|
|||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from mongoengine.python_support import PY3, b, StringIO
|
from mongoengine.python_support import PY3, b, StringIO
|
||||||
|
|
||||||
|
try:
|
||||||
|
from PIL import Image
|
||||||
|
HAS_PIL = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_PIL = False
|
||||||
|
|
||||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
||||||
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png')
|
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png')
|
||||||
|
|
||||||
@@ -255,14 +261,25 @@ class FileTest(unittest.TestCase):
|
|||||||
self.assertFalse(test_file.the_file in [{"test": 1}])
|
self.assertFalse(test_file.the_file in [{"test": 1}])
|
||||||
|
|
||||||
def test_image_field(self):
|
def test_image_field(self):
|
||||||
if PY3:
|
if not HAS_PIL:
|
||||||
raise SkipTest('PIL does not have Python 3 support')
|
raise SkipTest('PIL not installed')
|
||||||
|
|
||||||
class TestImage(Document):
|
class TestImage(Document):
|
||||||
image = ImageField()
|
image = ImageField()
|
||||||
|
|
||||||
TestImage.drop_collection()
|
TestImage.drop_collection()
|
||||||
|
|
||||||
|
with tempfile.TemporaryFile() as f:
|
||||||
|
f.write(b("Hello World!"))
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
t = TestImage()
|
||||||
|
try:
|
||||||
|
t.image.put(f)
|
||||||
|
self.fail("Should have raised an invalidation error")
|
||||||
|
except ValidationError, e:
|
||||||
|
self.assertEquals("%s" % e, "Invalid image: cannot identify image file")
|
||||||
|
|
||||||
t = TestImage()
|
t = TestImage()
|
||||||
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||||
t.save()
|
t.save()
|
||||||
@@ -278,8 +295,8 @@ class FileTest(unittest.TestCase):
|
|||||||
t.image.delete()
|
t.image.delete()
|
||||||
|
|
||||||
def test_image_field_reassigning(self):
|
def test_image_field_reassigning(self):
|
||||||
if PY3:
|
if not HAS_PIL:
|
||||||
raise SkipTest('PIL does not have Python 3 support')
|
raise SkipTest('PIL not installed')
|
||||||
|
|
||||||
class TestFile(Document):
|
class TestFile(Document):
|
||||||
the_file = ImageField()
|
the_file = ImageField()
|
||||||
@@ -294,8 +311,8 @@ class FileTest(unittest.TestCase):
|
|||||||
self.assertEqual(test_file.the_file.size, (45, 101))
|
self.assertEqual(test_file.the_file.size, (45, 101))
|
||||||
|
|
||||||
def test_image_field_resize(self):
|
def test_image_field_resize(self):
|
||||||
if PY3:
|
if not HAS_PIL:
|
||||||
raise SkipTest('PIL does not have Python 3 support')
|
raise SkipTest('PIL not installed')
|
||||||
|
|
||||||
class TestImage(Document):
|
class TestImage(Document):
|
||||||
image = ImageField(size=(185, 37))
|
image = ImageField(size=(185, 37))
|
||||||
@@ -317,8 +334,8 @@ class FileTest(unittest.TestCase):
|
|||||||
t.image.delete()
|
t.image.delete()
|
||||||
|
|
||||||
def test_image_field_resize_force(self):
|
def test_image_field_resize_force(self):
|
||||||
if PY3:
|
if not HAS_PIL:
|
||||||
raise SkipTest('PIL does not have Python 3 support')
|
raise SkipTest('PIL not installed')
|
||||||
|
|
||||||
class TestImage(Document):
|
class TestImage(Document):
|
||||||
image = ImageField(size=(185, 37, True))
|
image = ImageField(size=(185, 37, True))
|
||||||
@@ -340,8 +357,8 @@ class FileTest(unittest.TestCase):
|
|||||||
t.image.delete()
|
t.image.delete()
|
||||||
|
|
||||||
def test_image_field_thumbnail(self):
|
def test_image_field_thumbnail(self):
|
||||||
if PY3:
|
if not HAS_PIL:
|
||||||
raise SkipTest('PIL does not have Python 3 support')
|
raise SkipTest('PIL not installed')
|
||||||
|
|
||||||
class TestImage(Document):
|
class TestImage(Document):
|
||||||
image = ImageField(thumbnail_size=(92, 18))
|
image = ImageField(thumbnail_size=(92, 18))
|
||||||
@@ -388,6 +405,14 @@ class FileTest(unittest.TestCase):
|
|||||||
self.assertEqual(test_file.the_file.read(),
|
self.assertEqual(test_file.the_file.read(),
|
||||||
b('Hello, World!'))
|
b('Hello, World!'))
|
||||||
|
|
||||||
|
test_file = TestFile.objects.first()
|
||||||
|
test_file.the_file = b('HELLO, WORLD!')
|
||||||
|
test_file.save()
|
||||||
|
|
||||||
|
test_file = TestFile.objects.first()
|
||||||
|
self.assertEqual(test_file.the_file.read(),
|
||||||
|
b('HELLO, WORLD!'))
|
||||||
|
|
||||||
def test_copyable(self):
|
def test_copyable(self):
|
||||||
class PutFile(Document):
|
class PutFile(Document):
|
||||||
the_file = FileField()
|
the_file = FileField()
|
||||||
@@ -407,6 +432,28 @@ class FileTest(unittest.TestCase):
|
|||||||
self.assertEqual(putfile, copy.copy(putfile))
|
self.assertEqual(putfile, copy.copy(putfile))
|
||||||
self.assertEqual(putfile, copy.deepcopy(putfile))
|
self.assertEqual(putfile, copy.deepcopy(putfile))
|
||||||
|
|
||||||
|
def test_get_image_by_grid_id(self):
|
||||||
|
|
||||||
|
if not HAS_PIL:
|
||||||
|
raise SkipTest('PIL not installed')
|
||||||
|
|
||||||
|
class TestImage(Document):
|
||||||
|
|
||||||
|
image1 = ImageField()
|
||||||
|
image2 = ImageField()
|
||||||
|
|
||||||
|
TestImage.drop_collection()
|
||||||
|
|
||||||
|
t = TestImage()
|
||||||
|
t.image1.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||||
|
t.image2.put(open(TEST_IMAGE2_PATH, 'rb'))
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
test = TestImage.objects.first()
|
||||||
|
grid_id = test.image1.grid_id
|
||||||
|
|
||||||
|
self.assertEqual(1, TestImage.objects(Q(image1=grid_id)
|
||||||
|
or Q(image2=grid_id)).count())
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@@ -545,6 +545,15 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual("Bob", bob.name)
|
self.assertEqual("Bob", bob.name)
|
||||||
self.assertEqual(30, bob.age)
|
self.assertEqual(30, bob.age)
|
||||||
|
|
||||||
|
def test_upsert_one(self):
|
||||||
|
self.Person.drop_collection()
|
||||||
|
|
||||||
|
self.Person.objects(name="Bob", age=30).update_one(upsert=True)
|
||||||
|
|
||||||
|
bob = self.Person.objects.first()
|
||||||
|
self.assertEqual("Bob", bob.name)
|
||||||
|
self.assertEqual(30, bob.age)
|
||||||
|
|
||||||
def test_set_on_insert(self):
|
def test_set_on_insert(self):
|
||||||
self.Person.drop_collection()
|
self.Person.drop_collection()
|
||||||
|
|
||||||
@@ -622,14 +631,13 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(q, 1) # 1 for the insert
|
self.assertEqual(q, 1) # 1 for the insert
|
||||||
|
|
||||||
Blog.drop_collection()
|
Blog.drop_collection()
|
||||||
|
Blog.ensure_indexes()
|
||||||
|
|
||||||
with query_counter() as q:
|
with query_counter() as q:
|
||||||
self.assertEqual(q, 0)
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
Blog.ensure_indexes()
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
Blog.objects.insert(blogs)
|
Blog.objects.insert(blogs)
|
||||||
self.assertEqual(q, 3) # 1 for insert, and 1 for in bulk fetch (3 in total)
|
self.assertEqual(q, 2) # 1 for insert, and 1 for in bulk fetch
|
||||||
|
|
||||||
Blog.drop_collection()
|
Blog.drop_collection()
|
||||||
|
|
||||||
@@ -3089,7 +3097,10 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual([], bars)
|
self.assertEqual([], bars)
|
||||||
|
|
||||||
self.assertRaises(ConfigurationError, Bar.objects,
|
self.assertRaises(ConfigurationError, Bar.objects,
|
||||||
read_preference='Primary')
|
read_preference='Primary')
|
||||||
|
|
||||||
|
bars = Bar.objects(read_preference=ReadPreference.SECONDARY_PREFERRED)
|
||||||
|
self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
|
||||||
|
|
||||||
def test_json_simple(self):
|
def test_json_simple(self):
|
||||||
|
|
||||||
@@ -3380,6 +3391,34 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(B.objects.get(a=a).a, a)
|
self.assertEqual(B.objects.get(a=a).a, a)
|
||||||
self.assertEqual(B.objects.get(a=a.id).a, a)
|
self.assertEqual(B.objects.get(a=a.id).a, a)
|
||||||
|
|
||||||
|
def test_cls_query_in_subclassed_docs(self):
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True
|
||||||
|
}
|
||||||
|
|
||||||
|
class Dog(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Cat(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal.objects(name='Charlie')._query, {
|
||||||
|
'name': 'Charlie',
|
||||||
|
'_cls': { '$in': ('Animal', 'Animal.Dog', 'Animal.Cat') }
|
||||||
|
})
|
||||||
|
self.assertEqual(Dog.objects(name='Charlie')._query, {
|
||||||
|
'name': 'Charlie',
|
||||||
|
'_cls': 'Animal.Dog'
|
||||||
|
})
|
||||||
|
self.assertEqual(Cat.objects(name='Charlie')._query, {
|
||||||
|
'name': 'Charlie',
|
||||||
|
'_cls': 'Animal.Cat'
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@@ -68,11 +68,11 @@ class QTest(unittest.TestCase):
|
|||||||
x = IntField()
|
x = IntField()
|
||||||
y = StringField()
|
y = StringField()
|
||||||
|
|
||||||
# Check than an error is raised when conflicting queries are anded
|
query = (Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc)
|
||||||
def invalid_combination():
|
self.assertEqual(query, {'$and': [{'x': {'$lt': 7}}, {'x': {'$lt': 3}}]})
|
||||||
query = Q(x__lt=7) & Q(x__lt=3)
|
|
||||||
query.to_query(TestDoc)
|
query = (Q(y="a") & Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc)
|
||||||
self.assertRaises(InvalidQueryError, invalid_combination)
|
self.assertEqual(query, {'$and': [{'y': "a"}, {'x': {'$lt': 7}}, {'x': {'$lt': 3}}]})
|
||||||
|
|
||||||
# Check normal cases work without an error
|
# Check normal cases work without an error
|
||||||
query = Q(x__lt=7) & Q(x__gt=3)
|
query = Q(x__lt=7) & Q(x__gt=3)
|
||||||
@@ -325,10 +325,26 @@ class QTest(unittest.TestCase):
|
|||||||
pk = ObjectId()
|
pk = ObjectId()
|
||||||
User(email='example@example.com', pk=pk).save()
|
User(email='example@example.com', pk=pk).save()
|
||||||
|
|
||||||
self.assertEqual(1, User.objects.filter(
|
self.assertEqual(1, User.objects.filter(Q(email='example@example.com') |
|
||||||
Q(email='example@example.com') |
|
Q(name='John Doe')).limit(2).filter(pk=pk).count())
|
||||||
Q(name='John Doe')
|
|
||||||
).limit(2).filter(pk=pk).count())
|
def test_chained_q_or_filtering(self):
|
||||||
|
|
||||||
|
class Post(EmbeddedDocument):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
class Item(Document):
|
||||||
|
postables = ListField(EmbeddedDocumentField(Post))
|
||||||
|
|
||||||
|
Item.drop_collection()
|
||||||
|
|
||||||
|
Item(postables=[Post(name="a"), Post(name="b")]).save()
|
||||||
|
Item(postables=[Post(name="a"), Post(name="c")]).save()
|
||||||
|
Item(postables=[Post(name="a"), Post(name="b"), Post(name="c")]).save()
|
||||||
|
|
||||||
|
self.assertEqual(Item.objects(Q(postables__name="a") & Q(postables__name="b")).count(), 2)
|
||||||
|
self.assertEqual(Item.objects.filter(postables__name="a").filter(postables__name="b").count(), 2)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@@ -275,7 +275,7 @@ class MongoAuthTest(unittest.TestCase):
|
|||||||
|
|
||||||
def test_user_manager(self):
|
def test_user_manager(self):
|
||||||
manager = get_user_model()._default_manager
|
manager = get_user_model()._default_manager
|
||||||
self.assertIsInstance(manager, MongoUserManager)
|
self.assertTrue(isinstance(manager, MongoUserManager))
|
||||||
|
|
||||||
def test_user_manager_exception(self):
|
def test_user_manager_exception(self):
|
||||||
manager = get_user_model()._default_manager
|
manager = get_user_model()._default_manager
|
||||||
@@ -285,14 +285,14 @@ class MongoAuthTest(unittest.TestCase):
|
|||||||
def test_create_user(self):
|
def test_create_user(self):
|
||||||
manager = get_user_model()._default_manager
|
manager = get_user_model()._default_manager
|
||||||
user = manager.create_user(**self.user_data)
|
user = manager.create_user(**self.user_data)
|
||||||
self.assertIsInstance(user, User)
|
self.assertTrue(isinstance(user, User))
|
||||||
db_user = User.objects.get(username='user')
|
db_user = User.objects.get(username='user')
|
||||||
self.assertEqual(user.id, db_user.id)
|
self.assertEqual(user.id, db_user.id)
|
||||||
|
|
||||||
def test_authenticate(self):
|
def test_authenticate(self):
|
||||||
get_user_model()._default_manager.create_user(**self.user_data)
|
get_user_model()._default_manager.create_user(**self.user_data)
|
||||||
user = authenticate(username='user', password='fail')
|
user = authenticate(username='user', password='fail')
|
||||||
self.assertIsNone(user)
|
self.assertEqual(None, user)
|
||||||
user = authenticate(username='user', password='test')
|
user = authenticate(username='user', password='test')
|
||||||
db_user = User.objects.get(username='user')
|
db_user = User.objects.get(username='user')
|
||||||
self.assertEqual(user.id, db_user.id)
|
self.assertEqual(user.id, db_user.id)
|
||||||
|
@@ -43,6 +43,15 @@ class SignalTests(unittest.TestCase):
|
|||||||
def pre_save(cls, sender, document, **kwargs):
|
def pre_save(cls, sender, document, **kwargs):
|
||||||
signal_output.append('pre_save signal, %s' % document)
|
signal_output.append('pre_save signal, %s' % document)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pre_save_post_validation(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('pre_save_post_validation signal, %s' % document)
|
||||||
|
if 'created' in kwargs:
|
||||||
|
if kwargs['created']:
|
||||||
|
signal_output.append('Is created')
|
||||||
|
else:
|
||||||
|
signal_output.append('Is updated')
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def post_save(cls, sender, document, **kwargs):
|
def post_save(cls, sender, document, **kwargs):
|
||||||
signal_output.append('post_save signal, %s' % document)
|
signal_output.append('post_save signal, %s' % document)
|
||||||
@@ -75,40 +84,19 @@ class SignalTests(unittest.TestCase):
|
|||||||
Author.drop_collection()
|
Author.drop_collection()
|
||||||
|
|
||||||
class Another(Document):
|
class Another(Document):
|
||||||
|
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def pre_init(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('pre_init Another signal, %s' % cls.__name__)
|
|
||||||
signal_output.append(str(kwargs['values']))
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def post_init(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('post_init Another signal, %s' % document)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def pre_save(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('pre_save Another signal, %s' % document)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def post_save(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('post_save Another signal, %s' % document)
|
|
||||||
if 'created' in kwargs:
|
|
||||||
if kwargs['created']:
|
|
||||||
signal_output.append('Is created')
|
|
||||||
else:
|
|
||||||
signal_output.append('Is updated')
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def pre_delete(cls, sender, document, **kwargs):
|
def pre_delete(cls, sender, document, **kwargs):
|
||||||
signal_output.append('pre_delete Another signal, %s' % document)
|
signal_output.append('pre_delete signal, %s' % document)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def post_delete(cls, sender, document, **kwargs):
|
def post_delete(cls, sender, document, **kwargs):
|
||||||
signal_output.append('post_delete Another signal, %s' % document)
|
signal_output.append('post_delete signal, %s' % document)
|
||||||
|
|
||||||
self.Another = Another
|
self.Another = Another
|
||||||
Another.drop_collection()
|
Another.drop_collection()
|
||||||
@@ -133,6 +121,7 @@ class SignalTests(unittest.TestCase):
|
|||||||
len(signals.pre_init.receivers),
|
len(signals.pre_init.receivers),
|
||||||
len(signals.post_init.receivers),
|
len(signals.post_init.receivers),
|
||||||
len(signals.pre_save.receivers),
|
len(signals.pre_save.receivers),
|
||||||
|
len(signals.pre_save_post_validation.receivers),
|
||||||
len(signals.post_save.receivers),
|
len(signals.post_save.receivers),
|
||||||
len(signals.pre_delete.receivers),
|
len(signals.pre_delete.receivers),
|
||||||
len(signals.post_delete.receivers),
|
len(signals.post_delete.receivers),
|
||||||
@@ -143,16 +132,13 @@ class SignalTests(unittest.TestCase):
|
|||||||
signals.pre_init.connect(Author.pre_init, sender=Author)
|
signals.pre_init.connect(Author.pre_init, sender=Author)
|
||||||
signals.post_init.connect(Author.post_init, sender=Author)
|
signals.post_init.connect(Author.post_init, sender=Author)
|
||||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||||
|
signals.pre_save_post_validation.connect(Author.pre_save_post_validation, sender=Author)
|
||||||
signals.post_save.connect(Author.post_save, sender=Author)
|
signals.post_save.connect(Author.post_save, sender=Author)
|
||||||
signals.pre_delete.connect(Author.pre_delete, sender=Author)
|
signals.pre_delete.connect(Author.pre_delete, sender=Author)
|
||||||
signals.post_delete.connect(Author.post_delete, sender=Author)
|
signals.post_delete.connect(Author.post_delete, sender=Author)
|
||||||
signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author)
|
signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author)
|
||||||
signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author)
|
signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author)
|
||||||
|
|
||||||
signals.pre_init.connect(Another.pre_init, sender=Another)
|
|
||||||
signals.post_init.connect(Another.post_init, sender=Another)
|
|
||||||
signals.pre_save.connect(Another.pre_save, sender=Another)
|
|
||||||
signals.post_save.connect(Another.post_save, sender=Another)
|
|
||||||
signals.pre_delete.connect(Another.pre_delete, sender=Another)
|
signals.pre_delete.connect(Another.pre_delete, sender=Another)
|
||||||
signals.post_delete.connect(Another.post_delete, sender=Another)
|
signals.post_delete.connect(Another.post_delete, sender=Another)
|
||||||
|
|
||||||
@@ -164,16 +150,13 @@ class SignalTests(unittest.TestCase):
|
|||||||
signals.post_delete.disconnect(self.Author.post_delete)
|
signals.post_delete.disconnect(self.Author.post_delete)
|
||||||
signals.pre_delete.disconnect(self.Author.pre_delete)
|
signals.pre_delete.disconnect(self.Author.pre_delete)
|
||||||
signals.post_save.disconnect(self.Author.post_save)
|
signals.post_save.disconnect(self.Author.post_save)
|
||||||
|
signals.pre_save_post_validation.disconnect(self.Author.pre_save_post_validation)
|
||||||
signals.pre_save.disconnect(self.Author.pre_save)
|
signals.pre_save.disconnect(self.Author.pre_save)
|
||||||
signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert)
|
signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert)
|
||||||
signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert)
|
signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert)
|
||||||
|
|
||||||
signals.pre_init.disconnect(self.Another.pre_init)
|
|
||||||
signals.post_init.disconnect(self.Another.post_init)
|
|
||||||
signals.post_delete.disconnect(self.Another.post_delete)
|
signals.post_delete.disconnect(self.Another.post_delete)
|
||||||
signals.pre_delete.disconnect(self.Another.pre_delete)
|
signals.pre_delete.disconnect(self.Another.pre_delete)
|
||||||
signals.post_save.disconnect(self.Another.post_save)
|
|
||||||
signals.pre_save.disconnect(self.Another.pre_save)
|
|
||||||
|
|
||||||
signals.post_save.disconnect(self.ExplicitId.post_save)
|
signals.post_save.disconnect(self.ExplicitId.post_save)
|
||||||
|
|
||||||
@@ -182,6 +165,7 @@ class SignalTests(unittest.TestCase):
|
|||||||
len(signals.pre_init.receivers),
|
len(signals.pre_init.receivers),
|
||||||
len(signals.post_init.receivers),
|
len(signals.post_init.receivers),
|
||||||
len(signals.pre_save.receivers),
|
len(signals.pre_save.receivers),
|
||||||
|
len(signals.pre_save_post_validation.receivers),
|
||||||
len(signals.post_save.receivers),
|
len(signals.post_save.receivers),
|
||||||
len(signals.pre_delete.receivers),
|
len(signals.pre_delete.receivers),
|
||||||
len(signals.post_delete.receivers),
|
len(signals.post_delete.receivers),
|
||||||
@@ -216,6 +200,8 @@ class SignalTests(unittest.TestCase):
|
|||||||
a1 = self.Author(name='Bill Shakespeare')
|
a1 = self.Author(name='Bill Shakespeare')
|
||||||
self.assertEqual(self.get_signal_output(a1.save), [
|
self.assertEqual(self.get_signal_output(a1.save), [
|
||||||
"pre_save signal, Bill Shakespeare",
|
"pre_save signal, Bill Shakespeare",
|
||||||
|
"pre_save_post_validation signal, Bill Shakespeare",
|
||||||
|
"Is created",
|
||||||
"post_save signal, Bill Shakespeare",
|
"post_save signal, Bill Shakespeare",
|
||||||
"Is created"
|
"Is created"
|
||||||
])
|
])
|
||||||
@@ -224,6 +210,8 @@ class SignalTests(unittest.TestCase):
|
|||||||
a1.name = 'William Shakespeare'
|
a1.name = 'William Shakespeare'
|
||||||
self.assertEqual(self.get_signal_output(a1.save), [
|
self.assertEqual(self.get_signal_output(a1.save), [
|
||||||
"pre_save signal, William Shakespeare",
|
"pre_save signal, William Shakespeare",
|
||||||
|
"pre_save_post_validation signal, William Shakespeare",
|
||||||
|
"Is updated",
|
||||||
"post_save signal, William Shakespeare",
|
"post_save signal, William Shakespeare",
|
||||||
"Is updated"
|
"Is updated"
|
||||||
])
|
])
|
||||||
@@ -252,7 +240,14 @@ class SignalTests(unittest.TestCase):
|
|||||||
"Not loaded",
|
"Not loaded",
|
||||||
])
|
])
|
||||||
|
|
||||||
self.Author.objects.delete()
|
def test_queryset_delete_signals(self):
|
||||||
|
""" Queryset delete should throw some signals. """
|
||||||
|
|
||||||
|
self.Another(name='Bill Shakespeare').save()
|
||||||
|
self.assertEqual(self.get_signal_output(self.Another.objects.delete), [
|
||||||
|
'pre_delete signal, Bill Shakespeare',
|
||||||
|
'post_delete signal, Bill Shakespeare',
|
||||||
|
])
|
||||||
|
|
||||||
def test_signals_with_explicit_doc_ids(self):
|
def test_signals_with_explicit_doc_ids(self):
|
||||||
""" Model saves must have a created flag the first time."""
|
""" Model saves must have a created flag the first time."""
|
||||||
|
Reference in New Issue
Block a user