Merge branch 'dev'
This commit is contained in:
commit
fdc34869ca
6
.gitignore
vendored
6
.gitignore
vendored
@ -1,3 +1,5 @@
|
|||||||
|
.*
|
||||||
|
!.gitignore
|
||||||
*.pyc
|
*.pyc
|
||||||
.*.swp
|
.*.swp
|
||||||
*.egg
|
*.egg
|
||||||
@ -7,3 +9,7 @@ build/
|
|||||||
dist/
|
dist/
|
||||||
mongoengine.egg-info/
|
mongoengine.egg-info/
|
||||||
env/
|
env/
|
||||||
|
.settings
|
||||||
|
.project
|
||||||
|
.pydevproject
|
||||||
|
tests/bugfix.py
|
||||||
|
64
AUTHORS
64
AUTHORS
@ -1,5 +1,69 @@
|
|||||||
|
The PRIMARY AUTHORS are (and/or have been):
|
||||||
|
|
||||||
Harry Marr <harry@hmarr.com>
|
Harry Marr <harry@hmarr.com>
|
||||||
Matt Dennewitz <mattdennewitz@gmail.com>
|
Matt Dennewitz <mattdennewitz@gmail.com>
|
||||||
Deepak Thukral <iapain@yahoo.com>
|
Deepak Thukral <iapain@yahoo.com>
|
||||||
Florian Schlachter <flori@n-schlachter.de>
|
Florian Schlachter <flori@n-schlachter.de>
|
||||||
Steve Challis <steve@stevechallis.com>
|
Steve Challis <steve@stevechallis.com>
|
||||||
|
Ross Lawley <ross.lawley@gmail.com>
|
||||||
|
Wilson Júnior <wilsonpjunior@gmail.com>
|
||||||
|
Dan Crosta https://github.com/dcrosta
|
||||||
|
|
||||||
|
CONTRIBUTORS
|
||||||
|
|
||||||
|
Dervived from the git logs, inevitably incomplete but all of whom and others
|
||||||
|
have submitted patches, reported bugs and generally helped make MongoEngine
|
||||||
|
that much better:
|
||||||
|
|
||||||
|
* Harry Marr
|
||||||
|
* Ross Lawley
|
||||||
|
* blackbrrr
|
||||||
|
* Florian Schlachter
|
||||||
|
* Vincent Driessen
|
||||||
|
* Steve Challis
|
||||||
|
* flosch
|
||||||
|
* Deepak Thukral
|
||||||
|
* Colin Howe
|
||||||
|
* Wilson Júnior
|
||||||
|
* Alistair Roche
|
||||||
|
* Dan Crosta
|
||||||
|
* Viktor Kerkez
|
||||||
|
* Stephan Jaekel
|
||||||
|
* Rached Ben Mustapha
|
||||||
|
* Greg Turner
|
||||||
|
* Daniel Hasselrot
|
||||||
|
* Mircea Pasoi
|
||||||
|
* Matt Chisholm
|
||||||
|
* James Punteney
|
||||||
|
* TimothéePeignier
|
||||||
|
* Stuart Rackham
|
||||||
|
* Serge Matveenko
|
||||||
|
* Matt Dennewitz
|
||||||
|
* Don Spaulding
|
||||||
|
* Ales Zoulek
|
||||||
|
* sshwsfc
|
||||||
|
* sib
|
||||||
|
* Samuel Clay
|
||||||
|
* Nick Vlku
|
||||||
|
* martin
|
||||||
|
* Flavio Amieiro
|
||||||
|
* Анхбаяр Лхагвадорж
|
||||||
|
* Zak Johnson
|
||||||
|
* Victor Farazdagi
|
||||||
|
* vandersonmota
|
||||||
|
* Theo Julienne
|
||||||
|
* sp
|
||||||
|
* Slavi Pantaleev
|
||||||
|
* Richard Henry
|
||||||
|
* Nicolas Perriault
|
||||||
|
* Nick Vlku Jr
|
||||||
|
* Michael Henson
|
||||||
|
* Leo Honkanen
|
||||||
|
* kuno
|
||||||
|
* Josh Ourisman
|
||||||
|
* Jaime
|
||||||
|
* Igor Ivanov
|
||||||
|
* Gregg Lind
|
||||||
|
* Gareth Lloyd
|
||||||
|
* Albert Choi
|
||||||
|
* John Arnfield
|
||||||
|
@ -41,6 +41,8 @@ Fields
|
|||||||
|
|
||||||
.. autoclass:: mongoengine.URLField
|
.. autoclass:: mongoengine.URLField
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.EmailField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.IntField
|
.. autoclass:: mongoengine.IntField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.FloatField
|
.. autoclass:: mongoengine.FloatField
|
||||||
@ -51,12 +53,16 @@ Fields
|
|||||||
|
|
||||||
.. autoclass:: mongoengine.DateTimeField
|
.. autoclass:: mongoengine.DateTimeField
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.ComplexDateTimeField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.DictField
|
.. autoclass:: mongoengine.DictField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.ListField
|
.. autoclass:: mongoengine.ListField
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.SortedListField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.BinaryField
|
.. autoclass:: mongoengine.BinaryField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.ObjectIdField
|
.. autoclass:: mongoengine.ObjectIdField
|
||||||
|
@ -2,6 +2,72 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
Changes in dev
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Added InvalidDocumentError - so Document core methods can't be overwritten
|
||||||
|
- Added GenericEmbeddedDocument - so you can embed any type of embeddable document
|
||||||
|
- Added within_polygon support - for those with mongodb 1.9
|
||||||
|
- Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments
|
||||||
|
- Added where() - filter to allowing users to specify query expressions as Javascript
|
||||||
|
- Added SequenceField - for creating sequential counters
|
||||||
|
- Added update() convenience method to a document
|
||||||
|
- Added cascading saves - so changes to Referenced documents are saved on .save()
|
||||||
|
- Added select_related() support
|
||||||
|
- Added support for the positional operator
|
||||||
|
- Updated geo index checking to be recursive and check in embedded documents
|
||||||
|
- Updated default collection naming convention
|
||||||
|
- Added Document Mixin support
|
||||||
|
- Fixed queryet __repr__ mid iteration
|
||||||
|
- Added hint() support, so cantell Mongo the proper index to use for the query
|
||||||
|
- Fixed issue with inconsitent setting of _cls breaking inherited referencing
|
||||||
|
- Added help_text and verbose_name to fields to help with some form libs
|
||||||
|
- Updated item_frequencies to handle embedded document lookups
|
||||||
|
- Added delta tracking now only sets / unsets explicitly changed fields
|
||||||
|
- Fixed saving so sets updated values rather than overwrites
|
||||||
|
- Added ComplexDateTimeField - Handles datetimes correctly with microseconds
|
||||||
|
- Added ComplexBaseField - for improved flexibility and performance
|
||||||
|
- Added get_FIELD_display() method for easy choice field displaying
|
||||||
|
- Added queryset.slave_okay(enabled) method
|
||||||
|
- Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable
|
||||||
|
- Added insert method for bulk inserts
|
||||||
|
- Added blinker signal support
|
||||||
|
- Added query_counter context manager for tests
|
||||||
|
- Added map_reduce method item_frequencies and set as default (as db.eval doesn't work in sharded environments)
|
||||||
|
- Added inline_map_reduce option to map_reduce
|
||||||
|
- Updated connection exception so it provides more info on the cause.
|
||||||
|
- Added searching multiple levels deep in ``DictField``
|
||||||
|
- Added ``DictField`` entries containing strings to use matching operators
|
||||||
|
- Added ``MapField``, similar to ``DictField``
|
||||||
|
- Added Abstract Base Classes
|
||||||
|
- Added Custom Objects Managers
|
||||||
|
- Added sliced subfields updating
|
||||||
|
- Added ``NotRegistered`` exception if dereferencing ``Document`` not in the registry
|
||||||
|
- Added a write concern for ``save``, ``update``, ``update_one`` and ``get_or_create``
|
||||||
|
- Added slicing / subarray fetching controls
|
||||||
|
- Fixed various unique index and other index issues
|
||||||
|
- Fixed threaded connection issues
|
||||||
|
- Added spherical geospatial query operators
|
||||||
|
- Updated queryset to handle latest version of pymongo
|
||||||
|
map_reduce now requires an output.
|
||||||
|
- Added ``Document`` __hash__, __ne__ for pickling
|
||||||
|
- Added ``FileField`` optional size arg for read method
|
||||||
|
- Fixed ``FileField`` seek and tell methods for reading files
|
||||||
|
- Added ``QuerySet.clone`` to support copying querysets
|
||||||
|
- Fixed item_frequencies when using name thats the same as a native js function
|
||||||
|
- Added reverse delete rules
|
||||||
|
- Fixed issue with unset operation
|
||||||
|
- Fixed Q-object bug
|
||||||
|
- Added ``QuerySet.all_fields`` resets previous .only() and .exclude()
|
||||||
|
- Added ``QuerySet.exclude``
|
||||||
|
- Added django style choices
|
||||||
|
- Fixed order and filter issue
|
||||||
|
- Added ``QuerySet.only`` subfield support
|
||||||
|
- Added creation_counter to ``BaseField`` allowing fields to be sorted in the
|
||||||
|
way the user has specified them
|
||||||
|
- Fixed various errors
|
||||||
|
- Added many tests
|
||||||
|
|
||||||
Changes in v0.4
|
Changes in v0.4
|
||||||
===============
|
===============
|
||||||
- Added ``GridFSStorage`` Django storage backend
|
- Added ``GridFSStorage`` Django storage backend
|
||||||
|
@ -38,7 +38,7 @@ master_doc = 'index'
|
|||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'MongoEngine'
|
project = u'MongoEngine'
|
||||||
copyright = u'2009-2010, Harry Marr'
|
copyright = u'2009-2011, Harry Marr'
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
@ -36,22 +36,26 @@ are as follows:
|
|||||||
|
|
||||||
* :class:`~mongoengine.StringField`
|
* :class:`~mongoengine.StringField`
|
||||||
* :class:`~mongoengine.URLField`
|
* :class:`~mongoengine.URLField`
|
||||||
|
* :class:`~mongoengine.EmailField`
|
||||||
* :class:`~mongoengine.IntField`
|
* :class:`~mongoengine.IntField`
|
||||||
* :class:`~mongoengine.FloatField`
|
* :class:`~mongoengine.FloatField`
|
||||||
* :class:`~mongoengine.DecimalField`
|
* :class:`~mongoengine.DecimalField`
|
||||||
* :class:`~mongoengine.DateTimeField`
|
* :class:`~mongoengine.DateTimeField`
|
||||||
|
* :class:`~mongoengine.ComplexDateTimeField`
|
||||||
* :class:`~mongoengine.ListField`
|
* :class:`~mongoengine.ListField`
|
||||||
|
* :class:`~mongoengine.SortedListField`
|
||||||
* :class:`~mongoengine.DictField`
|
* :class:`~mongoengine.DictField`
|
||||||
|
* :class:`~mongoengine.MapField`
|
||||||
* :class:`~mongoengine.ObjectIdField`
|
* :class:`~mongoengine.ObjectIdField`
|
||||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
|
||||||
* :class:`~mongoengine.ReferenceField`
|
* :class:`~mongoengine.ReferenceField`
|
||||||
* :class:`~mongoengine.GenericReferenceField`
|
* :class:`~mongoengine.GenericReferenceField`
|
||||||
|
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||||
|
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
||||||
* :class:`~mongoengine.BooleanField`
|
* :class:`~mongoengine.BooleanField`
|
||||||
* :class:`~mongoengine.FileField`
|
* :class:`~mongoengine.FileField`
|
||||||
* :class:`~mongoengine.EmailField`
|
|
||||||
* :class:`~mongoengine.SortedListField`
|
|
||||||
* :class:`~mongoengine.BinaryField`
|
* :class:`~mongoengine.BinaryField`
|
||||||
* :class:`~mongoengine.GeoPointField`
|
* :class:`~mongoengine.GeoPointField`
|
||||||
|
* :class:`~mongoengine.SequenceField`
|
||||||
|
|
||||||
Field arguments
|
Field arguments
|
||||||
---------------
|
---------------
|
||||||
@ -105,6 +109,12 @@ arguments can be set on all fields:
|
|||||||
:attr:`choices` (Default: None)
|
:attr:`choices` (Default: None)
|
||||||
An iterable of choices to which the value of this field should be limited.
|
An iterable of choices to which the value of this field should be limited.
|
||||||
|
|
||||||
|
:attr:`help_text` (Default: None)
|
||||||
|
Optional help text to output with the field - used by form libraries
|
||||||
|
|
||||||
|
:attr:`verbose` (Default: None)
|
||||||
|
Optional human-readable name for the field - used by form libraries
|
||||||
|
|
||||||
|
|
||||||
List fields
|
List fields
|
||||||
-----------
|
-----------
|
||||||
@ -155,6 +165,9 @@ store; in this situation a :class:`~mongoengine.DictField` is appropriate::
|
|||||||
survey_response.answers = response_form.cleaned_data()
|
survey_response.answers = response_form.cleaned_data()
|
||||||
survey_response.save()
|
survey_response.save()
|
||||||
|
|
||||||
|
Dictionaries can store complex data, other dictionaries, lists, references to
|
||||||
|
other objects, so are the most flexible field type available.
|
||||||
|
|
||||||
Reference fields
|
Reference fields
|
||||||
----------------
|
----------------
|
||||||
References may be stored to other documents in the database using the
|
References may be stored to other documents in the database using the
|
||||||
@ -193,6 +206,59 @@ as the constructor's argument::
|
|||||||
class ProfilePage(Document):
|
class ProfilePage(Document):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
|
|
||||||
|
Dealing with deletion of referred documents
|
||||||
|
'''''''''''''''''''''''''''''''''''''''''''
|
||||||
|
By default, MongoDB doesn't check the integrity of your data, so deleting
|
||||||
|
documents that other documents still hold references to will lead to consistency
|
||||||
|
issues. Mongoengine's :class:`ReferenceField` adds some functionality to
|
||||||
|
safeguard against these kinds of database integrity problems, providing each
|
||||||
|
reference with a delete rule specification. A delete rule is specified by
|
||||||
|
supplying the :attr:`reverse_delete_rule` attributes on the
|
||||||
|
:class:`ReferenceField` definition, like this::
|
||||||
|
|
||||||
|
class Employee(Document):
|
||||||
|
...
|
||||||
|
profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY)
|
||||||
|
|
||||||
|
The declaration in this example means that when an :class:`Employee` object is
|
||||||
|
removed, the :class:`ProfilePage` that belongs to that employee is removed as
|
||||||
|
well. If a whole batch of employees is removed, all profile pages that are
|
||||||
|
linked are removed as well.
|
||||||
|
|
||||||
|
Its value can take any of the following constants:
|
||||||
|
|
||||||
|
:const:`mongoengine.DO_NOTHING`
|
||||||
|
This is the default and won't do anything. Deletes are fast, but may cause
|
||||||
|
database inconsistency or dangling references.
|
||||||
|
:const:`mongoengine.DENY`
|
||||||
|
Deletion is denied if there still exist references to the object being
|
||||||
|
deleted.
|
||||||
|
:const:`mongoengine.NULLIFY`
|
||||||
|
Any object's fields still referring to the object being deleted are removed
|
||||||
|
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
||||||
|
:const:`mongoengine.CASCADE`
|
||||||
|
Any object containing fields that are refererring to the object being deleted
|
||||||
|
are deleted first.
|
||||||
|
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
A safety note on setting up these delete rules! Since the delete rules are
|
||||||
|
not recorded on the database level by MongoDB itself, but instead at runtime,
|
||||||
|
in-memory, by the MongoEngine module, it is of the upmost importance
|
||||||
|
that the module that declares the relationship is loaded **BEFORE** the
|
||||||
|
delete is invoked.
|
||||||
|
|
||||||
|
If, for example, the :class:`Employee` object lives in the
|
||||||
|
:mod:`payroll` app, and the :class:`ProfilePage` in the :mod:`people`
|
||||||
|
app, it is extremely important that the :mod:`people` app is loaded
|
||||||
|
before any employee is removed, because otherwise, MongoEngine could
|
||||||
|
never know this relationship exists.
|
||||||
|
|
||||||
|
In Django, be sure to put all apps that have such delete rule declarations in
|
||||||
|
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
||||||
|
|
||||||
|
|
||||||
Generic reference fields
|
Generic reference fields
|
||||||
''''''''''''''''''''''''
|
''''''''''''''''''''''''
|
||||||
A second kind of reference field also exists,
|
A second kind of reference field also exists,
|
||||||
@ -219,6 +285,7 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
|||||||
Bookmark(bookmark_object=post).save()
|
Bookmark(bookmark_object=post).save()
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less
|
Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less
|
||||||
efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if
|
efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if
|
||||||
you will only be referencing one document type, prefer the standard
|
you will only be referencing one document type, prefer the standard
|
||||||
@ -288,9 +355,10 @@ Indexes
|
|||||||
You can specify indexes on collections to make querying faster. This is done
|
You can specify indexes on collections to make querying faster. This is done
|
||||||
by creating a list of index specifications called :attr:`indexes` in the
|
by creating a list of index specifications called :attr:`indexes` in the
|
||||||
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
||||||
either be a single field name, or a tuple containing multiple field names. A
|
either be a single field name, a tuple containing multiple field names, or a
|
||||||
direction may be specified on fields by prefixing the field name with a **+**
|
dictionary containing a full index definition. A direction may be specified on
|
||||||
or a **-** sign. Note that direction only matters on multi-field indexes. ::
|
fields by prefixing the field name with a **+** or a **-** sign. Note that
|
||||||
|
direction only matters on multi-field indexes. ::
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
title = StringField()
|
title = StringField()
|
||||||
@ -299,7 +367,23 @@ or a **-** sign. Note that direction only matters on multi-field indexes. ::
|
|||||||
'indexes': ['title', ('title', '-rating')]
|
'indexes': ['title', ('title', '-rating')]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
If a dictionary is passed then the following options are available:
|
||||||
|
|
||||||
|
:attr:`fields` (Default: None)
|
||||||
|
The fields to index. Specified in the same format as described above.
|
||||||
|
|
||||||
|
:attr:`types` (Default: True)
|
||||||
|
Whether the index should have the :attr:`_types` field added automatically
|
||||||
|
to the start of the index.
|
||||||
|
|
||||||
|
:attr:`sparse` (Default: False)
|
||||||
|
Whether the index should be sparse.
|
||||||
|
|
||||||
|
:attr:`unique` (Default: False)
|
||||||
|
Whether the index should be sparse.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Geospatial indexes will be automatically created for all
|
Geospatial indexes will be automatically created for all
|
||||||
:class:`~mongoengine.GeoPointField`\ s
|
:class:`~mongoengine.GeoPointField`\ s
|
||||||
|
|
||||||
|
@ -18,10 +18,22 @@ attribute syntax::
|
|||||||
|
|
||||||
Saving and deleting documents
|
Saving and deleting documents
|
||||||
=============================
|
=============================
|
||||||
To save the document to the database, call the
|
MongoEngine tracks changes to documents to provide efficient saving. To save
|
||||||
:meth:`~mongoengine.Document.save` method. If the document does not exist in
|
the document to the database, call the :meth:`~mongoengine.Document.save` method.
|
||||||
the database, it will be created. If it does already exist, it will be
|
If the document does not exist in the database, it will be created. If it does
|
||||||
updated.
|
already exist, then any changes will be updated atomically. For example::
|
||||||
|
|
||||||
|
>>> page = Page(title="Test Page")
|
||||||
|
>>> page.save() # Performs an insert
|
||||||
|
>>> page.title = "My Page"
|
||||||
|
>>> page.save() # Performs an atomic set on the title field.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Changes to documents are tracked and on the whole perform `set` operations.
|
||||||
|
|
||||||
|
* ``list_field.pop(0)`` - *sets* the resulting list
|
||||||
|
* ``del(list_field)`` - *unsets* whole list
|
||||||
|
|
||||||
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
||||||
Note that this will only work if the document exists in the database and has a
|
Note that this will only work if the document exists in the database and has a
|
||||||
@ -67,6 +79,7 @@ is an alias to :attr:`id`::
|
|||||||
>>> page.id == page.pk
|
>>> page.id == page.pk
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
If you define your own primary key field, the field implicitly becomes
|
If you define your own primary key field, the field implicitly becomes
|
||||||
required, so a :class:`ValidationError` will be thrown if you don't provide
|
required, so a :class:`ValidationError` will be thrown if you don't provide
|
||||||
it.
|
it.
|
||||||
|
@ -66,6 +66,7 @@ Deleting stored files is achieved with the :func:`delete` method::
|
|||||||
marmot.photo.delete()
|
marmot.photo.delete()
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
The FileField in a Document actually only stores the ID of a file in a
|
The FileField in a Document actually only stores the ID of a file in a
|
||||||
separate GridFS collection. This means that deleting a document
|
separate GridFS collection. This means that deleting a document
|
||||||
with a defined FileField does not actually delete the file. You must be
|
with a defined FileField does not actually delete the file. You must be
|
||||||
|
@ -11,3 +11,4 @@ User Guide
|
|||||||
document-instances
|
document-instances
|
||||||
querying
|
querying
|
||||||
gridfs
|
gridfs
|
||||||
|
signals
|
||||||
|
@ -1,31 +1,31 @@
|
|||||||
======================
|
======================
|
||||||
Installing MongoEngine
|
Installing MongoEngine
|
||||||
======================
|
======================
|
||||||
|
|
||||||
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
|
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
|
||||||
and ensure it is running in an accessible location. You will also need
|
and ensure it is running in an accessible location. You will also need
|
||||||
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
||||||
install MongoEngine using setuptools, then the dependencies will be handled for
|
install MongoEngine using setuptools, then the dependencies will be handled for
|
||||||
you.
|
you.
|
||||||
|
|
||||||
MongoEngine is available on PyPI, so to use it you can use
|
MongoEngine is available on PyPI, so to use it you can use :program:`pip`:
|
||||||
:program:`easy_install`:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
# easy_install mongoengine
|
$ pip install mongoengine
|
||||||
|
|
||||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
# python setup.py install
|
$ python setup.py install
|
||||||
|
|
||||||
To use the bleeding-edge version of MongoEngine, you can get the source from
|
To use the bleeding-edge version of MongoEngine, you can get the source from
|
||||||
`GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above:
|
`GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
# git clone git://github.com/hmarr/mongoengine
|
$ git clone git://github.com/hmarr/mongoengine
|
||||||
# cd mongoengine
|
$ cd mongoengine
|
||||||
# python setup.py install
|
$ python setup.py install
|
||||||
|
@ -5,8 +5,8 @@ Querying the database
|
|||||||
is used for accessing the objects in the database associated with the class.
|
is used for accessing the objects in the database associated with the class.
|
||||||
The :attr:`objects` attribute is actually a
|
The :attr:`objects` attribute is actually a
|
||||||
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
|
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
|
||||||
a new :class:`~mongoengine.queryset.QuerySet` object on access. The
|
:class:`~mongoengine.queryset.QuerySet` object on access. The
|
||||||
:class:`~mongoengine.queryset.QuerySet` object may may be iterated over to
|
:class:`~mongoengine.queryset.QuerySet` object may be iterated over to
|
||||||
fetch documents from the database::
|
fetch documents from the database::
|
||||||
|
|
||||||
# Prints out the names of all the users in the database
|
# Prints out the names of all the users in the database
|
||||||
@ -14,6 +14,7 @@ fetch documents from the database::
|
|||||||
print user.name
|
print user.name
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Once the iteration finishes (when :class:`StopIteration` is raised),
|
Once the iteration finishes (when :class:`StopIteration` is raised),
|
||||||
:meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the
|
:meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the
|
||||||
:class:`~mongoengine.queryset.QuerySet` may be iterated over again. The
|
:class:`~mongoengine.queryset.QuerySet` may be iterated over again. The
|
||||||
@ -39,29 +40,6 @@ syntax::
|
|||||||
# been written by a user whose 'country' field is set to 'uk'
|
# been written by a user whose 'country' field is set to 'uk'
|
||||||
uk_pages = Page.objects(author__country='uk')
|
uk_pages = Page.objects(author__country='uk')
|
||||||
|
|
||||||
Querying lists
|
|
||||||
--------------
|
|
||||||
On most fields, this syntax will look up documents where the field specified
|
|
||||||
matches the given value exactly, but when the field refers to a
|
|
||||||
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
|
||||||
lists that contain that item will be matched::
|
|
||||||
|
|
||||||
class Page(Document):
|
|
||||||
tags = ListField(StringField())
|
|
||||||
|
|
||||||
# This will match all pages that have the word 'coding' as an item in the
|
|
||||||
# 'tags' list
|
|
||||||
Page.objects(tags='coding')
|
|
||||||
|
|
||||||
Raw queries
|
|
||||||
-----------
|
|
||||||
It is possible to provide a raw PyMongo query as a query parameter, which will
|
|
||||||
be integrated directly into the query. This is done using the ``__raw__``
|
|
||||||
keyword argument::
|
|
||||||
|
|
||||||
Page.objects(__raw__={'tags': 'coding'})
|
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
|
||||||
|
|
||||||
Query operators
|
Query operators
|
||||||
===============
|
===============
|
||||||
@ -99,26 +77,67 @@ expressions:
|
|||||||
* ``endswith`` -- string field ends with value
|
* ``endswith`` -- string field ends with value
|
||||||
* ``iendswith`` -- string field ends with value (case insensitive)
|
* ``iendswith`` -- string field ends with value (case insensitive)
|
||||||
|
|
||||||
.. versionadded:: 0.3
|
|
||||||
|
|
||||||
There are a few special operators for performing geographical queries, that
|
There are a few special operators for performing geographical queries, that
|
||||||
may used with :class:`~mongoengine.GeoPointField`\ s:
|
may used with :class:`~mongoengine.GeoPointField`\ s:
|
||||||
|
|
||||||
* ``within_distance`` -- provide a list containing a point and a maximum
|
* ``within_distance`` -- provide a list containing a point and a maximum
|
||||||
distance (e.g. [(41.342, -87.653), 5])
|
distance (e.g. [(41.342, -87.653), 5])
|
||||||
|
* ``within_spherical_distance`` -- Same as above but using the spherical geo model
|
||||||
|
(e.g. [(41.342, -87.653), 5/earth_radius])
|
||||||
|
* ``near`` -- order the documents by how close they are to a given point
|
||||||
|
* ``near_sphere`` -- Same as above but using the spherical geo model
|
||||||
* ``within_box`` -- filter documents to those within a given bounding box (e.g.
|
* ``within_box`` -- filter documents to those within a given bounding box (e.g.
|
||||||
[(35.0, -125.0), (40.0, -100.0)])
|
[(35.0, -125.0), (40.0, -100.0)])
|
||||||
* ``near`` -- order the documents by how close they are to a given point
|
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
|
||||||
|
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
|
||||||
|
.. note:: Requires Mongo Server 2.0
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
|
||||||
|
|
||||||
Querying by position
|
Querying lists
|
||||||
====================
|
--------------
|
||||||
|
On most fields, this syntax will look up documents where the field specified
|
||||||
|
matches the given value exactly, but when the field refers to a
|
||||||
|
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
||||||
|
lists that contain that item will be matched::
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
tags = ListField(StringField())
|
||||||
|
|
||||||
|
# This will match all pages that have the word 'coding' as an item in the
|
||||||
|
# 'tags' list
|
||||||
|
Page.objects(tags='coding')
|
||||||
|
|
||||||
It is possible to query by position in a list by using a numerical value as a
|
It is possible to query by position in a list by using a numerical value as a
|
||||||
query operator. So if you wanted to find all pages whose first tag was ``db``,
|
query operator. So if you wanted to find all pages whose first tag was ``db``,
|
||||||
you could use the following query::
|
you could use the following query::
|
||||||
|
|
||||||
BlogPost.objects(tags__0='db')
|
Page.objects(tags__0='db')
|
||||||
|
|
||||||
|
If you only want to fetch part of a list eg: you want to paginate a list, then
|
||||||
|
the `slice` operator is required::
|
||||||
|
|
||||||
|
# comments - skip 5, limit 10
|
||||||
|
Page.objects.fields(slice__comments=[5, 10])
|
||||||
|
|
||||||
|
For updating documents, if you don't know the position in a list, you can use
|
||||||
|
the $ positional operator ::
|
||||||
|
|
||||||
|
Post.objects(comments__by="joe").update(**{'inc__comments__$__votes': 1})
|
||||||
|
|
||||||
|
However, this doesn't map well to the syntax so you can also use a capital S instead ::
|
||||||
|
|
||||||
|
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
||||||
|
|
||||||
|
.. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query.
|
||||||
|
|
||||||
|
|
||||||
|
Raw queries
|
||||||
|
-----------
|
||||||
|
It is possible to provide a raw PyMongo query as a query parameter, which will
|
||||||
|
be integrated directly into the query. This is done using the ``__raw__``
|
||||||
|
keyword argument::
|
||||||
|
|
||||||
|
Page.objects(__raw__={'tags': 'coding'})
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
@ -175,6 +194,22 @@ to be created::
|
|||||||
>>> a.name == b.name and a.age == b.age
|
>>> a.name == b.name and a.age == b.age
|
||||||
True
|
True
|
||||||
|
|
||||||
|
Dereferencing results
|
||||||
|
---------------------
|
||||||
|
When iterating the results of :class:`~mongoengine.ListField` or
|
||||||
|
:class:`~mongoengine.DictField` we automatically dereference any
|
||||||
|
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
||||||
|
number the queries to mongo.
|
||||||
|
|
||||||
|
There are times when that efficiency is not enough, documents that have
|
||||||
|
:class:`~mongoengine.ReferenceField` objects or
|
||||||
|
:class:`~mongoengine.GenericReferenceField` objects at the top level are
|
||||||
|
expensive as the number of queries to MongoDB can quickly rise.
|
||||||
|
|
||||||
|
To limit the number of queries use
|
||||||
|
:func:`~mongoengine.queryset.QuerySet.select_related` which converts the
|
||||||
|
QuerySet to a list and dereferences as efficiently as possible.
|
||||||
|
|
||||||
Default Document queries
|
Default Document queries
|
||||||
========================
|
========================
|
||||||
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
|
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
|
||||||
@ -254,6 +289,7 @@ You may sum over the values of a specific field on documents using
|
|||||||
yearly_expense = Employee.objects.sum('salary')
|
yearly_expense = Employee.objects.sum('salary')
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
If the field isn't present on a document, that document will be ignored from
|
If the field isn't present on a document, that document will be ignored from
|
||||||
the sum.
|
the sum.
|
||||||
|
|
||||||
@ -302,6 +338,11 @@ will be given::
|
|||||||
>>> f.rating # default value
|
>>> f.rating # default value
|
||||||
3
|
3
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The :meth:`~mongoengine.queryset.QuerySet.exclude` is the opposite of
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.only` if you want to exclude a field.
|
||||||
|
|
||||||
If you later need the missing fields, just call
|
If you later need the missing fields, just call
|
||||||
:meth:`~mongoengine.Document.reload` on your document.
|
:meth:`~mongoengine.Document.reload` on your document.
|
||||||
|
|
||||||
@ -325,11 +366,66 @@ calling it with keyword arguments::
|
|||||||
# Get top posts
|
# Get top posts
|
||||||
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
|
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
|
||||||
|
|
||||||
.. warning::
|
.. _guide-atomic-updates:
|
||||||
Only use these advanced queries if absolutely necessary as they will execute
|
|
||||||
significantly slower than regular queries. This is because they are not
|
Atomic updates
|
||||||
natively supported by MongoDB -- they are compiled to Javascript and sent
|
==============
|
||||||
to the server for execution.
|
Documents may be updated atomically by using the
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.update_one` and
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
||||||
|
that you may use with these methods:
|
||||||
|
|
||||||
|
* ``set`` -- set a particular value
|
||||||
|
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
||||||
|
* ``inc`` -- increment a value by a given amount
|
||||||
|
* ``dec`` -- decrement a value by a given amount
|
||||||
|
* ``pop`` -- remove the last item from a list
|
||||||
|
* ``push`` -- append a value to a list
|
||||||
|
* ``push_all`` -- append several values to a list
|
||||||
|
* ``pop`` -- remove the first or last element of a list
|
||||||
|
* ``pull`` -- remove a value from a list
|
||||||
|
* ``pull_all`` -- remove several values from a list
|
||||||
|
* ``add_to_set`` -- add value to a list only if its not in the list already
|
||||||
|
|
||||||
|
The syntax for atomic updates is similar to the querying syntax, but the
|
||||||
|
modifier comes before the field, not after it::
|
||||||
|
|
||||||
|
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
|
||||||
|
>>> post.save()
|
||||||
|
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
|
||||||
|
>>> post.reload() # the document has been changed, so we need to reload it
|
||||||
|
>>> post.page_views
|
||||||
|
1
|
||||||
|
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
|
||||||
|
>>> post.reload()
|
||||||
|
>>> post.title
|
||||||
|
'Example Post'
|
||||||
|
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
|
||||||
|
>>> post.reload()
|
||||||
|
>>> post.tags
|
||||||
|
['database', 'nosql']
|
||||||
|
|
||||||
|
.. note ::
|
||||||
|
|
||||||
|
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
|
||||||
|
on changed documents by tracking changes to that document.
|
||||||
|
|
||||||
|
The positional operator allows you to update list items without knowing the
|
||||||
|
index position, therefore making the update a single atomic operation. As we
|
||||||
|
cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
|
||||||
|
|
||||||
|
>>> post = BlogPost(title='Test', page_views=0, tags=['database', 'mongo'])
|
||||||
|
>>> post.save()
|
||||||
|
>>> BlogPost.objects(id=post.id, tags='mongo').update(set__tags__S='mongodb')
|
||||||
|
>>> post.reload()
|
||||||
|
>>> post.tags
|
||||||
|
['database', 'mongodb']
|
||||||
|
|
||||||
|
.. note ::
|
||||||
|
Currently only top level lists are handled, future versions of mongodb /
|
||||||
|
pymongo plan to support nested positional operators. See `The $ positional
|
||||||
|
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
||||||
|
|
||||||
Server-side javascript execution
|
Server-side javascript execution
|
||||||
================================
|
================================
|
||||||
@ -433,43 +529,3 @@ following example shows how the substitutions are made::
|
|||||||
return comments;
|
return comments;
|
||||||
}
|
}
|
||||||
""")
|
""")
|
||||||
|
|
||||||
.. _guide-atomic-updates:
|
|
||||||
|
|
||||||
Atomic updates
|
|
||||||
==============
|
|
||||||
Documents may be updated atomically by using the
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet.update_one` and
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
|
||||||
that you may use with these methods:
|
|
||||||
|
|
||||||
* ``set`` -- set a particular value
|
|
||||||
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
|
||||||
* ``inc`` -- increment a value by a given amount
|
|
||||||
* ``dec`` -- decrement a value by a given amount
|
|
||||||
* ``pop`` -- remove the last item from a list
|
|
||||||
* ``push`` -- append a value to a list
|
|
||||||
* ``push_all`` -- append several values to a list
|
|
||||||
* ``pop`` -- remove the first or last element of a list
|
|
||||||
* ``pull`` -- remove a value from a list
|
|
||||||
* ``pull_all`` -- remove several values from a list
|
|
||||||
* ``add_to_set`` -- add value to a list only if its not in the list already
|
|
||||||
|
|
||||||
The syntax for atomic updates is similar to the querying syntax, but the
|
|
||||||
modifier comes before the field, not after it::
|
|
||||||
|
|
||||||
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
|
|
||||||
>>> post.save()
|
|
||||||
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
|
|
||||||
>>> post.reload() # the document has been changed, so we need to reload it
|
|
||||||
>>> post.page_views
|
|
||||||
1
|
|
||||||
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
|
|
||||||
>>> post.reload()
|
|
||||||
>>> post.title
|
|
||||||
'Example Post'
|
|
||||||
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
|
|
||||||
>>> post.reload()
|
|
||||||
>>> post.tags
|
|
||||||
['database', 'nosql']
|
|
||||||
|
49
docs/guide/signals.rst
Normal file
49
docs/guide/signals.rst
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
.. _signals:
|
||||||
|
|
||||||
|
Signals
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
|
Signal support is provided by the excellent `blinker`_ library and
|
||||||
|
will gracefully fall back if it is not available.
|
||||||
|
|
||||||
|
|
||||||
|
The following document signals exist in MongoEngine and are pretty self explaintary:
|
||||||
|
|
||||||
|
* `mongoengine.signals.pre_init`
|
||||||
|
* `mongoengine.signals.post_init`
|
||||||
|
* `mongoengine.signals.pre_save`
|
||||||
|
* `mongoengine.signals.post_save`
|
||||||
|
* `mongoengine.signals.pre_delete`
|
||||||
|
* `mongoengine.signals.post_delete`
|
||||||
|
|
||||||
|
Example usage::
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine import signals
|
||||||
|
|
||||||
|
class Author(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pre_save(cls, sender, document, **kwargs):
|
||||||
|
logging.debug("Pre Save: %s" % document.name)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def post_save(cls, sender, document, **kwargs):
|
||||||
|
logging.debug("Post Save: %s" % document.name)
|
||||||
|
if 'created' in kwargs:
|
||||||
|
if kwargs['created']:
|
||||||
|
logging.debug("Created")
|
||||||
|
else:
|
||||||
|
logging.debug("Updated")
|
||||||
|
|
||||||
|
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||||
|
signals.post_save.connect(Author.post_save, sender=Author)
|
||||||
|
|
||||||
|
|
||||||
|
.. _blinker: http://pypi.python.org/pypi/blinker
|
@ -2,34 +2,62 @@
|
|||||||
MongoEngine User Documentation
|
MongoEngine User Documentation
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
MongoEngine is an Object-Document Mapper, written in Python for working with
|
**MongoEngine** is an Object-Document Mapper, written in Python for working with
|
||||||
MongoDB. To install it, simply run
|
MongoDB. To install it, simply run
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
# pip install -U mongoengine
|
# pip install -U mongoengine
|
||||||
|
|
||||||
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_.
|
:doc:`tutorial`
|
||||||
|
Start here for a quick overview.
|
||||||
|
|
||||||
|
:doc:`guide/index`
|
||||||
|
The Full guide to MongoEngine
|
||||||
|
|
||||||
|
:doc:`apireference`
|
||||||
|
The complete API documentation.
|
||||||
|
|
||||||
|
:doc:`django`
|
||||||
|
Using MongoEngine and Django
|
||||||
|
|
||||||
|
Community
|
||||||
|
---------
|
||||||
|
|
||||||
To get help with using MongoEngine, use the `MongoEngine Users mailing list
|
To get help with using MongoEngine, use the `MongoEngine Users mailing list
|
||||||
<http://groups.google.com/group/mongoengine-users>`_ or come chat on the
|
<http://groups.google.com/group/mongoengine-users>`_ or come chat on the
|
||||||
`#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_.
|
`#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_.
|
||||||
|
|
||||||
If you are interested in contributing, join the developers' `mailing list
|
Contributing
|
||||||
|
------------
|
||||||
|
|
||||||
|
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and
|
||||||
|
contributions are always encouraged. Contributions can be as simple as
|
||||||
|
minor tweaks to this documentation. To contribute, fork the project on
|
||||||
|
`GitHub <http://github.com/hmarr/mongoengine>`_ and send a
|
||||||
|
pull request.
|
||||||
|
|
||||||
|
Also, you can join the developers' `mailing list
|
||||||
<http://groups.google.com/group/mongoengine-dev>`_.
|
<http://groups.google.com/group/mongoengine-dev>`_.
|
||||||
|
|
||||||
|
Changes
|
||||||
|
-------
|
||||||
|
See the :doc:`changelog` for a full list of changes to MongoEngine.
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:hidden:
|
||||||
|
|
||||||
tutorial
|
tutorial
|
||||||
guide/index
|
guide/index
|
||||||
apireference
|
apireference
|
||||||
django
|
django
|
||||||
changelog
|
changelog
|
||||||
|
upgrade
|
||||||
|
|
||||||
Indices and tables
|
Indices and tables
|
||||||
==================
|
------------------
|
||||||
|
|
||||||
* :ref:`genindex`
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
||||||
* :ref:`search`
|
* :ref:`search`
|
||||||
|
|
||||||
|
@ -152,6 +152,21 @@ We can then store a list of comment documents in our post document::
|
|||||||
tags = ListField(StringField(max_length=30))
|
tags = ListField(StringField(max_length=30))
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
|
Handling deletions of references
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The :class:`~mongoengine.ReferenceField` object takes a keyword
|
||||||
|
`reverse_delete_rule` for handling deletion rules if the reference is deleted.
|
||||||
|
To delete all the posts if a user is deleted set the rule::
|
||||||
|
|
||||||
|
class Post(Document):
|
||||||
|
title = StringField(max_length=120, required=True)
|
||||||
|
author = ReferenceField(User, reverse_delete_rule=CASCADE)
|
||||||
|
tags = ListField(StringField(max_length=30))
|
||||||
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
|
See :class:`~mongoengine.ReferenceField` for more information.
|
||||||
|
|
||||||
Adding data to our Tumblelog
|
Adding data to our Tumblelog
|
||||||
============================
|
============================
|
||||||
Now that we've defined how our documents will be structured, let's start adding
|
Now that we've defined how our documents will be structured, let's start adding
|
||||||
@ -250,5 +265,5 @@ the first matched by the query you provide. Aggregation functions may also be
|
|||||||
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
||||||
|
|
||||||
num_posts = Post.objects(tags='mongodb').count()
|
num_posts = Post.objects(tags='mongodb').count()
|
||||||
print 'Found % posts with tag "mongodb"' % num_posts
|
print 'Found %d posts with tag "mongodb"' % num_posts
|
||||||
|
|
||||||
|
97
docs/upgrade.rst
Normal file
97
docs/upgrade.rst
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
=========
|
||||||
|
Upgrading
|
||||||
|
=========
|
||||||
|
|
||||||
|
0.4 to 0.5
|
||||||
|
===========
|
||||||
|
|
||||||
|
There have been the following backwards incompatibilities from 0.4 to 0.5. The
|
||||||
|
main areas of changed are: choices in fields, map_reduce and collection names.
|
||||||
|
|
||||||
|
Choice options:
|
||||||
|
--------------
|
||||||
|
|
||||||
|
Are now expected to be an iterable of tuples, with the first element in each
|
||||||
|
tuple being the actual value to be stored. The second element is the
|
||||||
|
human-readable name for the option.
|
||||||
|
|
||||||
|
|
||||||
|
PyMongo / MongoDB
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output
|
||||||
|
parameters, have been depreciated.
|
||||||
|
|
||||||
|
More methods now use map_reduce as db.eval is not supported for sharding as such
|
||||||
|
the following have been changed:
|
||||||
|
|
||||||
|
* :meth:`~mongoengine.queryset.QuerySet.sum`
|
||||||
|
* :meth:`~mongoengine.queryset.QuerySet.average`
|
||||||
|
* :meth:`~mongoengine.queryset.QuerySet.item_frequencies`
|
||||||
|
|
||||||
|
|
||||||
|
Default collection naming
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
Previously it was just lowercase, its now much more pythonic and readable as its
|
||||||
|
lowercase and underscores, previously ::
|
||||||
|
|
||||||
|
class MyAceDocument(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
MyAceDocument._meta['collection'] == myacedocument
|
||||||
|
|
||||||
|
In 0.5 this will change to ::
|
||||||
|
|
||||||
|
class MyAceDocument(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
MyAceDocument._get_collection_name() == my_ace_document
|
||||||
|
|
||||||
|
To upgrade use a Mixin class to set meta like so ::
|
||||||
|
|
||||||
|
class BaseMixin(object):
|
||||||
|
meta = {
|
||||||
|
'collection': lambda c: c.__name__.lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
class MyAceDocument(Document, BaseMixin):
|
||||||
|
pass
|
||||||
|
|
||||||
|
MyAceDocument._get_collection_name() == myacedocument
|
||||||
|
|
||||||
|
Alternatively, you can rename your collections eg ::
|
||||||
|
|
||||||
|
from mongoengine.connection import _get_db
|
||||||
|
from mongoengine.base import _document_registry
|
||||||
|
|
||||||
|
def rename_collections():
|
||||||
|
db = _get_db()
|
||||||
|
|
||||||
|
failure = False
|
||||||
|
|
||||||
|
collection_names = [d._get_collection_name() for d in _document_registry.values()]
|
||||||
|
|
||||||
|
for new_style_name in collection_names:
|
||||||
|
if not new_style_name: # embedded documents don't have collections
|
||||||
|
continue
|
||||||
|
old_style_name = new_style_name.replace('_', '')
|
||||||
|
|
||||||
|
if old_style_name == new_style_name:
|
||||||
|
continue # Nothing to do
|
||||||
|
|
||||||
|
existing = db.collection_names()
|
||||||
|
if old_style_name in existing:
|
||||||
|
if new_style_name in existing:
|
||||||
|
failure = True
|
||||||
|
print "FAILED to rename: %s to %s (already exists)" % (
|
||||||
|
old_style_name, new_style_name)
|
||||||
|
else:
|
||||||
|
db[old_style_name].rename(new_style_name)
|
||||||
|
print "Renamed: %s to %s" % (old_style_name, new_style_name)
|
||||||
|
|
||||||
|
if failure:
|
||||||
|
print "Upgrading collection names failed"
|
||||||
|
else:
|
||||||
|
print "Upgraded collection names"
|
||||||
|
|
@ -6,13 +6,16 @@ import connection
|
|||||||
from connection import *
|
from connection import *
|
||||||
import queryset
|
import queryset
|
||||||
from queryset import *
|
from queryset import *
|
||||||
|
import signals
|
||||||
|
from signals import *
|
||||||
|
|
||||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||||
queryset.__all__)
|
queryset.__all__ + signals.__all__)
|
||||||
|
|
||||||
__author__ = 'Harry Marr'
|
__author__ = 'Harry Marr'
|
||||||
|
|
||||||
VERSION = (0, 4, 0)
|
VERSION = (0, 4, 1)
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
version = '%s.%s' % (VERSION[0], VERSION[1])
|
version = '%s.%s' % (VERSION[0], VERSION[1])
|
||||||
@ -21,4 +24,3 @@ def get_version():
|
|||||||
return version
|
return version
|
||||||
|
|
||||||
__version__ = get_version()
|
__version__ = get_version()
|
||||||
|
|
||||||
|
@ -1,33 +1,68 @@
|
|||||||
from queryset import QuerySet, QuerySetManager
|
from queryset import QuerySet, QuerySetManager
|
||||||
from queryset import DoesNotExist, MultipleObjectsReturned
|
from queryset import DoesNotExist, MultipleObjectsReturned
|
||||||
|
from queryset import DO_NOTHING
|
||||||
|
|
||||||
|
from mongoengine import signals
|
||||||
|
|
||||||
|
import weakref
|
||||||
import sys
|
import sys
|
||||||
import pymongo
|
import pymongo
|
||||||
import pymongo.objectid
|
import pymongo.objectid
|
||||||
|
import operator
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
|
||||||
_document_registry = {}
|
class NotRegistered(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
def get_document(name):
|
|
||||||
return _document_registry[name]
|
|
||||||
|
|
||||||
|
class InvalidDocumentError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
class ValidationError(Exception):
|
class ValidationError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
_document_registry = {}
|
||||||
|
|
||||||
|
|
||||||
|
def get_document(name):
|
||||||
|
doc = _document_registry.get(name, None)
|
||||||
|
if not doc:
|
||||||
|
# Possible old style names
|
||||||
|
end = ".%s" % name
|
||||||
|
possible_match = [k for k in _document_registry.keys() if k.endswith(end)]
|
||||||
|
if len(possible_match) == 1:
|
||||||
|
doc = _document_registry.get(possible_match.pop(), None)
|
||||||
|
if not doc:
|
||||||
|
raise NotRegistered("""
|
||||||
|
`%s` has not been registered in the document registry.
|
||||||
|
Importing the document class automatically registers it, has it
|
||||||
|
been imported?
|
||||||
|
""".strip() % name)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
|
||||||
class BaseField(object):
|
class BaseField(object):
|
||||||
"""A base class for fields in a MongoDB document. Instances of this class
|
"""A base class for fields in a MongoDB document. Instances of this class
|
||||||
may be added to subclasses of `Document` to define a document's schema.
|
may be added to subclasses of `Document` to define a document's schema.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5 - added verbose and help text
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Fields may have _types inserted into indexes by default
|
# Fields may have _types inserted into indexes by default
|
||||||
_index_with_types = True
|
_index_with_types = True
|
||||||
_geo_index = False
|
_geo_index = False
|
||||||
|
|
||||||
|
# These track each time a Field instance is created. Used to retain order.
|
||||||
|
# The auto_creation_counter is used for fields that MongoEngine implicitly
|
||||||
|
# creates, creation_counter is used for all user-specified fields.
|
||||||
|
creation_counter = 0
|
||||||
|
auto_creation_counter = -1
|
||||||
|
|
||||||
def __init__(self, db_field=None, name=None, required=False, default=None,
|
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||||
unique=False, unique_with=None, primary_key=False,
|
unique=False, unique_with=None, primary_key=False,
|
||||||
validation=None, choices=None):
|
validation=None, choices=None, verbose_name=None, help_text=None):
|
||||||
self.db_field = (db_field or name) if not primary_key else '_id'
|
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||||
if name:
|
if name:
|
||||||
import warnings
|
import warnings
|
||||||
@ -41,6 +76,16 @@ class BaseField(object):
|
|||||||
self.primary_key = primary_key
|
self.primary_key = primary_key
|
||||||
self.validation = validation
|
self.validation = validation
|
||||||
self.choices = choices
|
self.choices = choices
|
||||||
|
self.verbose_name = verbose_name
|
||||||
|
self.help_text = help_text
|
||||||
|
|
||||||
|
# Adjust the appropriate creation counter, and save our local copy.
|
||||||
|
if self.db_field == '_id':
|
||||||
|
self.creation_counter = BaseField.auto_creation_counter
|
||||||
|
BaseField.auto_creation_counter -= 1
|
||||||
|
else:
|
||||||
|
self.creation_counter = BaseField.creation_counter
|
||||||
|
BaseField.creation_counter += 1
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance, owner):
|
||||||
"""Descriptor for retrieving a value from a field in a document. Do
|
"""Descriptor for retrieving a value from a field in a document. Do
|
||||||
@ -57,12 +102,19 @@ class BaseField(object):
|
|||||||
# Allow callable default values
|
# Allow callable default values
|
||||||
if callable(value):
|
if callable(value):
|
||||||
value = value()
|
value = value()
|
||||||
|
|
||||||
|
# Convert lists / values so we can watch for any changes on them
|
||||||
|
if isinstance(value, (list, tuple)) and not isinstance(value, BaseList):
|
||||||
|
value = BaseList(value, instance=instance, name=self.name)
|
||||||
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
|
value = BaseDict(value, instance=instance, name=self.name)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
"""Descriptor for assigning a value to a field in a document.
|
"""Descriptor for assigning a value to a field in a document.
|
||||||
"""
|
"""
|
||||||
instance._data[self.name] = value
|
instance._data[self.name] = value
|
||||||
|
instance._mark_as_changed(self.name)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
"""Convert a MongoDB-compatible type to a Python type.
|
"""Convert a MongoDB-compatible type to a Python type.
|
||||||
@ -87,9 +139,9 @@ class BaseField(object):
|
|||||||
def _validate(self, value):
|
def _validate(self, value):
|
||||||
# check choices
|
# check choices
|
||||||
if self.choices is not None:
|
if self.choices is not None:
|
||||||
if value not in self.choices:
|
option_keys = [option_key for option_key, option_value in self.choices]
|
||||||
raise ValidationError("Value must be one of %s."
|
if value not in option_keys:
|
||||||
% unicode(self.choices))
|
raise ValidationError("Value must be one of %s." % unicode(option_keys))
|
||||||
|
|
||||||
# check validation argument
|
# check validation argument
|
||||||
if self.validation is not None:
|
if self.validation is not None:
|
||||||
@ -102,13 +154,159 @@ class BaseField(object):
|
|||||||
|
|
||||||
self.validate(value)
|
self.validate(value)
|
||||||
|
|
||||||
|
|
||||||
|
class ComplexBaseField(BaseField):
|
||||||
|
"""Handles complex fields, such as lists / dictionaries.
|
||||||
|
|
||||||
|
Allows for nesting of embedded documents inside complex types.
|
||||||
|
Handles the lazy dereferencing of a queryset by lazily dereferencing all
|
||||||
|
items in a list / dict rather than one at a time.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
field = None
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
"""Descriptor to automatically dereference references.
|
||||||
|
"""
|
||||||
|
if instance is None:
|
||||||
|
# Document class being used rather than a document object
|
||||||
|
return self
|
||||||
|
|
||||||
|
from dereference import dereference
|
||||||
|
instance._data[self.name] = dereference(
|
||||||
|
instance._data.get(self.name), max_depth=1, instance=instance, name=self.name, get=True
|
||||||
|
)
|
||||||
|
return super(ComplexBaseField, self).__get__(instance, owner)
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
"""Convert a MongoDB-compatible type to a Python type.
|
||||||
|
"""
|
||||||
|
from mongoengine import Document
|
||||||
|
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, 'to_python'):
|
||||||
|
return value.to_python()
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
try:
|
||||||
|
is_list = True
|
||||||
|
value = dict([(k,v) for k,v in enumerate(value)])
|
||||||
|
except TypeError: # Not iterable return the value
|
||||||
|
return value
|
||||||
|
|
||||||
|
if self.field:
|
||||||
|
value_dict = dict([(key, self.field.to_python(item)) for key, item in value.items()])
|
||||||
|
else:
|
||||||
|
value_dict = {}
|
||||||
|
for k,v in value.items():
|
||||||
|
if isinstance(v, Document):
|
||||||
|
# We need the id from the saved object to create the DBRef
|
||||||
|
if v.pk is None:
|
||||||
|
raise ValidationError('You can only reference documents once '
|
||||||
|
'they have been saved to the database')
|
||||||
|
collection = v._get_collection_name()
|
||||||
|
value_dict[k] = pymongo.dbref.DBRef(collection, v.pk)
|
||||||
|
elif hasattr(v, 'to_python'):
|
||||||
|
value_dict[k] = v.to_python()
|
||||||
|
else:
|
||||||
|
value_dict[k] = self.to_python(v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
return [v for k,v in sorted(value_dict.items(), key=operator.itemgetter(0))]
|
||||||
|
return value_dict
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
"""Convert a Python type to a MongoDB-compatible type.
|
||||||
|
"""
|
||||||
|
from mongoengine import Document
|
||||||
|
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, 'to_mongo'):
|
||||||
|
return value.to_mongo()
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
try:
|
||||||
|
is_list = True
|
||||||
|
value = dict([(k,v) for k,v in enumerate(value)])
|
||||||
|
except TypeError: # Not iterable return the value
|
||||||
|
return value
|
||||||
|
|
||||||
|
if self.field:
|
||||||
|
value_dict = dict([(key, self.field.to_mongo(item)) for key, item in value.items()])
|
||||||
|
else:
|
||||||
|
value_dict = {}
|
||||||
|
for k,v in value.items():
|
||||||
|
if isinstance(v, Document):
|
||||||
|
# We need the id from the saved object to create the DBRef
|
||||||
|
if v.pk is None:
|
||||||
|
raise ValidationError('You can only reference documents once '
|
||||||
|
'they have been saved to the database')
|
||||||
|
|
||||||
|
# If its a document that is not inheritable it won't have
|
||||||
|
# _types / _cls data so make it a generic reference allows
|
||||||
|
# us to dereference
|
||||||
|
meta = getattr(v, 'meta', getattr(v, '_meta', {}))
|
||||||
|
if meta and not meta['allow_inheritance'] and not self.field:
|
||||||
|
from fields import GenericReferenceField
|
||||||
|
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||||
|
else:
|
||||||
|
collection = v._get_collection_name()
|
||||||
|
value_dict[k] = pymongo.dbref.DBRef(collection, v.pk)
|
||||||
|
elif hasattr(v, 'to_mongo'):
|
||||||
|
value_dict[k] = v.to_mongo()
|
||||||
|
else:
|
||||||
|
value_dict[k] = self.to_mongo(v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
return [v for k,v in sorted(value_dict.items(), key=operator.itemgetter(0))]
|
||||||
|
return value_dict
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
"""If field provided ensure the value is valid.
|
||||||
|
"""
|
||||||
|
if self.field:
|
||||||
|
try:
|
||||||
|
if hasattr(value, 'iteritems'):
|
||||||
|
[self.field.validate(v) for k,v in value.iteritems()]
|
||||||
|
else:
|
||||||
|
[self.field.validate(v) for v in value]
|
||||||
|
except Exception, err:
|
||||||
|
raise ValidationError('Invalid %s item (%s)' % (
|
||||||
|
self.field.__class__.__name__, str(v)))
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
def lookup_member(self, member_name):
|
||||||
|
if self.field:
|
||||||
|
return self.field.lookup_member(member_name)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _set_owner_document(self, owner_document):
|
||||||
|
if self.field:
|
||||||
|
self.field.owner_document = owner_document
|
||||||
|
self._owner_document = owner_document
|
||||||
|
|
||||||
|
def _get_owner_document(self, owner_document):
|
||||||
|
self._owner_document = owner_document
|
||||||
|
|
||||||
|
owner_document = property(_get_owner_document, _set_owner_document)
|
||||||
|
|
||||||
|
|
||||||
class ObjectIdField(BaseField):
|
class ObjectIdField(BaseField):
|
||||||
"""An field wrapper around MongoDB's ObjectIds.
|
"""An field wrapper around MongoDB's ObjectIds.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return value
|
return value
|
||||||
# return unicode(value)
|
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
if not isinstance(value, pymongo.objectid.ObjectId):
|
if not isinstance(value, pymongo.objectid.ObjectId):
|
||||||
@ -143,25 +341,30 @@ class DocumentMetaclass(type):
|
|||||||
class_name = [name]
|
class_name = [name]
|
||||||
superclasses = {}
|
superclasses = {}
|
||||||
simple_class = True
|
simple_class = True
|
||||||
|
|
||||||
for base in bases:
|
for base in bases:
|
||||||
# Include all fields present in superclasses
|
# Include all fields present in superclasses
|
||||||
if hasattr(base, '_fields'):
|
if hasattr(base, '_fields'):
|
||||||
doc_fields.update(base._fields)
|
doc_fields.update(base._fields)
|
||||||
class_name.append(base._class_name)
|
|
||||||
# Get superclasses from superclass
|
# Get superclasses from superclass
|
||||||
superclasses[base._class_name] = base
|
superclasses[base._class_name] = base
|
||||||
superclasses.update(base._superclasses)
|
superclasses.update(base._superclasses)
|
||||||
|
else: # Add any mixin fields
|
||||||
|
attrs.update(dict([(k,v) for k,v in base.__dict__.items()
|
||||||
|
if issubclass(v.__class__, BaseField)]))
|
||||||
|
|
||||||
if hasattr(base, '_meta'):
|
if hasattr(base, '_meta') and not base._meta.get('abstract'):
|
||||||
# Ensure that the Document class may be subclassed -
|
# Ensure that the Document class may be subclassed -
|
||||||
# inheritance may be disabled to remove dependency on
|
# inheritance may be disabled to remove dependency on
|
||||||
# additional fields _cls and _types
|
# additional fields _cls and _types
|
||||||
|
class_name.append(base._class_name)
|
||||||
if base._meta.get('allow_inheritance', True) == False:
|
if base._meta.get('allow_inheritance', True) == False:
|
||||||
raise ValueError('Document %s may not be subclassed' %
|
raise ValueError('Document %s may not be subclassed' %
|
||||||
base.__name__)
|
base.__name__)
|
||||||
else:
|
else:
|
||||||
simple_class = False
|
simple_class = False
|
||||||
|
|
||||||
|
doc_class_name = '.'.join(reversed(class_name))
|
||||||
meta = attrs.get('_meta', attrs.get('meta', {}))
|
meta = attrs.get('_meta', attrs.get('meta', {}))
|
||||||
|
|
||||||
if 'allow_inheritance' not in meta:
|
if 'allow_inheritance' not in meta:
|
||||||
@ -169,12 +372,11 @@ class DocumentMetaclass(type):
|
|||||||
|
|
||||||
# Only simple classes - direct subclasses of Document - may set
|
# Only simple classes - direct subclasses of Document - may set
|
||||||
# allow_inheritance to False
|
# allow_inheritance to False
|
||||||
if not simple_class and not meta['allow_inheritance']:
|
if not simple_class and not meta['allow_inheritance'] and not meta['abstract']:
|
||||||
raise ValueError('Only direct subclasses of Document may set '
|
raise ValueError('Only direct subclasses of Document may set '
|
||||||
'"allow_inheritance" to False')
|
'"allow_inheritance" to False')
|
||||||
attrs['_meta'] = meta
|
attrs['_meta'] = meta
|
||||||
|
attrs['_class_name'] = doc_class_name
|
||||||
attrs['_class_name'] = '.'.join(reversed(class_name))
|
|
||||||
attrs['_superclasses'] = superclasses
|
attrs['_superclasses'] = superclasses
|
||||||
|
|
||||||
# Add the document's fields to the _fields attribute
|
# Add the document's fields to the _fields attribute
|
||||||
@ -186,10 +388,21 @@ class DocumentMetaclass(type):
|
|||||||
attr_value.db_field = attr_name
|
attr_value.db_field = attr_name
|
||||||
doc_fields[attr_name] = attr_value
|
doc_fields[attr_name] = attr_value
|
||||||
attrs['_fields'] = doc_fields
|
attrs['_fields'] = doc_fields
|
||||||
|
attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items() if k!=v.db_field])
|
||||||
|
attrs['_reverse_db_field_map'] = dict([(v, k) for k, v in attrs['_db_field_map'].items()])
|
||||||
|
|
||||||
|
from mongoengine import Document
|
||||||
|
|
||||||
new_class = super_new(cls, name, bases, attrs)
|
new_class = super_new(cls, name, bases, attrs)
|
||||||
for field in new_class._fields.values():
|
for field in new_class._fields.values():
|
||||||
field.owner_document = new_class
|
field.owner_document = new_class
|
||||||
|
delete_rule = getattr(field, 'reverse_delete_rule', DO_NOTHING)
|
||||||
|
if delete_rule != DO_NOTHING:
|
||||||
|
field.document_type.register_delete_rule(new_class, field.name,
|
||||||
|
delete_rule)
|
||||||
|
|
||||||
|
if field.name and hasattr(Document, field.name):
|
||||||
|
raise InvalidDocumentError("%s is a document method and not a valid field name" % field.name)
|
||||||
|
|
||||||
module = attrs.get('__module__')
|
module = attrs.get('__module__')
|
||||||
|
|
||||||
@ -205,7 +418,7 @@ class DocumentMetaclass(type):
|
|||||||
new_class.add_to_class('MultipleObjectsReturned', exc)
|
new_class.add_to_class('MultipleObjectsReturned', exc)
|
||||||
|
|
||||||
global _document_registry
|
global _document_registry
|
||||||
_document_registry[name] = new_class
|
_document_registry[doc_class_name] = new_class
|
||||||
|
|
||||||
return new_class
|
return new_class
|
||||||
|
|
||||||
@ -225,10 +438,19 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
# __metaclass__ is only set on the class with the __metaclass__
|
# __metaclass__ is only set on the class with the __metaclass__
|
||||||
# attribute (i.e. it is not set on subclasses). This differentiates
|
# attribute (i.e. it is not set on subclasses). This differentiates
|
||||||
# 'real' documents from the 'Document' class
|
# 'real' documents from the 'Document' class
|
||||||
if attrs.get('__metaclass__') == TopLevelDocumentMetaclass:
|
#
|
||||||
|
# Also assume a class is abstract if it has abstract set to True in
|
||||||
|
# its meta dictionary. This allows custom Document superclasses.
|
||||||
|
if (attrs.get('__metaclass__') == TopLevelDocumentMetaclass or
|
||||||
|
('meta' in attrs and attrs['meta'].get('abstract', False))):
|
||||||
|
# Make sure no base class was non-abstract
|
||||||
|
non_abstract_bases = [b for b in bases
|
||||||
|
if hasattr(b,'_meta') and not b._meta.get('abstract', False)]
|
||||||
|
if non_abstract_bases:
|
||||||
|
raise ValueError("Abstract document cannot have non-abstract base")
|
||||||
return super_new(cls, name, bases, attrs)
|
return super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
collection = name.lower()
|
collection = ''.join('_%s' % c if c.isupper() else c for c in name).strip('_').lower()
|
||||||
|
|
||||||
id_field = None
|
id_field = None
|
||||||
base_indexes = []
|
base_indexes = []
|
||||||
@ -236,9 +458,14 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
|
|
||||||
# Subclassed documents inherit collection from superclass
|
# Subclassed documents inherit collection from superclass
|
||||||
for base in bases:
|
for base in bases:
|
||||||
if hasattr(base, '_meta') and 'collection' in base._meta:
|
if hasattr(base, '_meta'):
|
||||||
collection = base._meta['collection']
|
if 'collection' in attrs.get('meta', {}) and not base._meta.get('abstract', False):
|
||||||
|
import warnings
|
||||||
|
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||||
|
warnings.warn(msg, SyntaxWarning)
|
||||||
|
del(attrs['meta']['collection'])
|
||||||
|
if base._get_collection_name():
|
||||||
|
collection = base._get_collection_name()
|
||||||
# Propagate index options.
|
# Propagate index options.
|
||||||
for key in ('index_background', 'index_drop_dups', 'index_opts'):
|
for key in ('index_background', 'index_drop_dups', 'index_opts'):
|
||||||
if key in base._meta:
|
if key in base._meta:
|
||||||
@ -246,8 +473,18 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
|
|
||||||
id_field = id_field or base._meta.get('id_field')
|
id_field = id_field or base._meta.get('id_field')
|
||||||
base_indexes += base._meta.get('indexes', [])
|
base_indexes += base._meta.get('indexes', [])
|
||||||
|
# Propagate 'allow_inheritance'
|
||||||
|
if 'allow_inheritance' in base._meta:
|
||||||
|
base_meta['allow_inheritance'] = base._meta['allow_inheritance']
|
||||||
|
if 'queryset_class' in base._meta:
|
||||||
|
base_meta['queryset_class'] = base._meta['queryset_class']
|
||||||
|
try:
|
||||||
|
base_meta['objects'] = base.__getattribute__(base, 'objects')
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
|
'abstract': False,
|
||||||
'collection': collection,
|
'collection': collection,
|
||||||
'max_documents': None,
|
'max_documents': None,
|
||||||
'max_size': None,
|
'max_size': None,
|
||||||
@ -258,6 +495,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
'index_drop_dups': False,
|
'index_drop_dups': False,
|
||||||
'index_opts': {},
|
'index_opts': {},
|
||||||
'queryset_class': QuerySet,
|
'queryset_class': QuerySet,
|
||||||
|
'delete_rules': {},
|
||||||
|
'allow_inheritance': True
|
||||||
}
|
}
|
||||||
meta.update(base_meta)
|
meta.update(base_meta)
|
||||||
|
|
||||||
@ -269,14 +508,44 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
# DocumentMetaclass before instantiating CollectionManager object
|
# DocumentMetaclass before instantiating CollectionManager object
|
||||||
new_class = super_new(cls, name, bases, attrs)
|
new_class = super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
collection = attrs['_meta'].get('collection', None)
|
||||||
|
if callable(collection):
|
||||||
|
new_class._meta['collection'] = collection(new_class)
|
||||||
|
|
||||||
# Provide a default queryset unless one has been manually provided
|
# Provide a default queryset unless one has been manually provided
|
||||||
if not hasattr(new_class, 'objects'):
|
manager = attrs.get('objects', meta.get('objects', QuerySetManager()))
|
||||||
new_class.objects = QuerySetManager()
|
if hasattr(manager, 'queryset_class'):
|
||||||
|
meta['queryset_class'] = manager.queryset_class
|
||||||
|
new_class.objects = manager
|
||||||
|
|
||||||
user_indexes = [QuerySet._build_index_spec(new_class, spec)
|
user_indexes = [QuerySet._build_index_spec(new_class, spec)
|
||||||
for spec in meta['indexes']] + base_indexes
|
for spec in meta['indexes']] + base_indexes
|
||||||
new_class._meta['indexes'] = user_indexes
|
new_class._meta['indexes'] = user_indexes
|
||||||
|
|
||||||
|
unique_indexes = cls._unique_with_indexes(new_class)
|
||||||
|
new_class._meta['unique_indexes'] = unique_indexes
|
||||||
|
|
||||||
|
for field_name, field in new_class._fields.items():
|
||||||
|
# Check for custom primary key
|
||||||
|
if field.primary_key:
|
||||||
|
current_pk = new_class._meta['id_field']
|
||||||
|
if current_pk and current_pk != field_name:
|
||||||
|
raise ValueError('Cannot override primary key field')
|
||||||
|
|
||||||
|
if not current_pk:
|
||||||
|
new_class._meta['id_field'] = field_name
|
||||||
|
# Make 'Document.id' an alias to the real primary key field
|
||||||
|
new_class.id = field
|
||||||
|
|
||||||
|
if not new_class._meta['id_field']:
|
||||||
|
new_class._meta['id_field'] = 'id'
|
||||||
|
new_class._fields['id'] = ObjectIdField(db_field='_id')
|
||||||
|
new_class.id = new_class._fields['id']
|
||||||
|
|
||||||
|
return new_class
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _unique_with_indexes(cls, new_class, namespace=""):
|
||||||
unique_indexes = []
|
unique_indexes = []
|
||||||
for field_name, field in new_class._fields.items():
|
for field_name, field in new_class._fields.items():
|
||||||
# Generate a list of indexes needed by uniqueness constraints
|
# Generate a list of indexes needed by uniqueness constraints
|
||||||
@ -302,46 +571,44 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
unique_fields += unique_with
|
unique_fields += unique_with
|
||||||
|
|
||||||
# Add the new index to the list
|
# Add the new index to the list
|
||||||
index = [(f, pymongo.ASCENDING) for f in unique_fields]
|
index = [("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields]
|
||||||
unique_indexes.append(index)
|
unique_indexes.append(index)
|
||||||
|
|
||||||
# Check for custom primary key
|
# Grab any embedded document field unique indexes
|
||||||
if field.primary_key:
|
if field.__class__.__name__ == "EmbeddedDocumentField":
|
||||||
current_pk = new_class._meta['id_field']
|
field_namespace = "%s." % field_name
|
||||||
if current_pk and current_pk != field_name:
|
unique_indexes += cls._unique_with_indexes(field.document_type,
|
||||||
raise ValueError('Cannot override primary key field')
|
field_namespace)
|
||||||
|
|
||||||
if not current_pk:
|
return unique_indexes
|
||||||
new_class._meta['id_field'] = field_name
|
|
||||||
# Make 'Document.id' an alias to the real primary key field
|
|
||||||
new_class.id = field
|
|
||||||
|
|
||||||
new_class._meta['unique_indexes'] = unique_indexes
|
|
||||||
|
|
||||||
if not new_class._meta['id_field']:
|
|
||||||
new_class._meta['id_field'] = 'id'
|
|
||||||
new_class._fields['id'] = ObjectIdField(db_field='_id')
|
|
||||||
new_class.id = new_class._fields['id']
|
|
||||||
|
|
||||||
return new_class
|
|
||||||
|
|
||||||
|
|
||||||
class BaseDocument(object):
|
class BaseDocument(object):
|
||||||
|
|
||||||
def __init__(self, **values):
|
def __init__(self, **values):
|
||||||
|
signals.pre_init.send(self.__class__, document=self, values=values)
|
||||||
|
|
||||||
self._data = {}
|
self._data = {}
|
||||||
|
self._initialised = False
|
||||||
# Assign default values to instance
|
# Assign default values to instance
|
||||||
for attr_name in self._fields.keys():
|
for attr_name, field in self._fields.items():
|
||||||
# Use default value if present
|
|
||||||
value = getattr(self, attr_name, None)
|
value = getattr(self, attr_name, None)
|
||||||
setattr(self, attr_name, value)
|
setattr(self, attr_name, value)
|
||||||
|
|
||||||
# Assign initial values to instance
|
# Assign initial values to instance
|
||||||
for attr_name in values.keys():
|
for attr_name in values.keys():
|
||||||
try:
|
try:
|
||||||
setattr(self, attr_name, values.pop(attr_name))
|
value = values.pop(attr_name)
|
||||||
|
setattr(self, attr_name, value)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# Set any get_fieldname_display methods
|
||||||
|
self.__set_field_display()
|
||||||
|
# Flag initialised
|
||||||
|
self._initialised = True
|
||||||
|
signals.post_init.send(self.__class__, document=self)
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
"""Ensure that all fields' values are valid and that required fields
|
"""Ensure that all fields' values are valid and that required fields
|
||||||
are present.
|
are present.
|
||||||
@ -356,11 +623,44 @@ class BaseDocument(object):
|
|||||||
try:
|
try:
|
||||||
field._validate(value)
|
field._validate(value)
|
||||||
except (ValueError, AttributeError, AssertionError), e:
|
except (ValueError, AttributeError, AssertionError), e:
|
||||||
raise ValidationError('Invalid value for field of type "%s": %s'
|
raise ValidationError('Invalid value for field named "%s" of type "%s": %s'
|
||||||
% (field.__class__.__name__, value))
|
% (field.name, field.__class__.__name__, value))
|
||||||
elif field.required:
|
elif field.required:
|
||||||
raise ValidationError('Field "%s" is required' % field.name)
|
raise ValidationError('Field "%s" is required' % field.name)
|
||||||
|
|
||||||
|
@apply
|
||||||
|
def pk():
|
||||||
|
"""Primary key alias
|
||||||
|
"""
|
||||||
|
def fget(self):
|
||||||
|
return getattr(self, self._meta['id_field'])
|
||||||
|
def fset(self, value):
|
||||||
|
return setattr(self, self._meta['id_field'], value)
|
||||||
|
return property(fget, fset)
|
||||||
|
|
||||||
|
def to_mongo(self):
|
||||||
|
"""Return data dictionary ready for use with MongoDB.
|
||||||
|
"""
|
||||||
|
data = {}
|
||||||
|
for field_name, field in self._fields.items():
|
||||||
|
value = getattr(self, field_name, None)
|
||||||
|
if value is not None:
|
||||||
|
data[field.db_field] = field.to_mongo(value)
|
||||||
|
# Only add _cls and _types if allow_inheritance is not False
|
||||||
|
if not (hasattr(self, '_meta') and
|
||||||
|
self._meta.get('allow_inheritance', True) == False):
|
||||||
|
data['_cls'] = self._class_name
|
||||||
|
data['_types'] = self._superclasses.keys() + [self._class_name]
|
||||||
|
if '_id' in data and data['_id'] is None:
|
||||||
|
del data['_id']
|
||||||
|
return data
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_collection_name(cls):
|
||||||
|
"""Returns the collection name for this class.
|
||||||
|
"""
|
||||||
|
return cls._meta.get('collection', None)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_subclasses(cls):
|
def _get_subclasses(cls):
|
||||||
"""Return a dictionary of all subclasses (found recursively).
|
"""Return a dictionary of all subclasses (found recursively).
|
||||||
@ -376,15 +676,184 @@ class BaseDocument(object):
|
|||||||
all_subclasses.update(subclass._get_subclasses())
|
all_subclasses.update(subclass._get_subclasses())
|
||||||
return all_subclasses
|
return all_subclasses
|
||||||
|
|
||||||
@apply
|
@classmethod
|
||||||
def pk():
|
def _from_son(cls, son):
|
||||||
"""Primary key alias
|
"""Create an instance of a Document (subclass) from a PyMongo SON.
|
||||||
"""
|
"""
|
||||||
def fget(self):
|
# get the class name from the document, falling back to the given
|
||||||
return getattr(self, self._meta['id_field'])
|
# class if unavailable
|
||||||
def fset(self, value):
|
class_name = son.get(u'_cls', cls._class_name)
|
||||||
return setattr(self, self._meta['id_field'], value)
|
data = dict((str(key), value) for key, value in son.items())
|
||||||
return property(fget, fset)
|
|
||||||
|
if '_types' in data:
|
||||||
|
del data['_types']
|
||||||
|
|
||||||
|
if '_cls' in data:
|
||||||
|
del data['_cls']
|
||||||
|
|
||||||
|
# Return correct subclass for document type
|
||||||
|
if class_name != cls._class_name:
|
||||||
|
subclasses = cls._get_subclasses()
|
||||||
|
if class_name not in subclasses:
|
||||||
|
# Type of document is probably more generic than the class
|
||||||
|
# that has been queried to return this SON
|
||||||
|
raise NotRegistered("""
|
||||||
|
`%s` has not been registered in the document registry.
|
||||||
|
Importing the document class automatically registers it,
|
||||||
|
has it been imported?
|
||||||
|
""".strip() % class_name)
|
||||||
|
cls = subclasses[class_name]
|
||||||
|
|
||||||
|
present_fields = data.keys()
|
||||||
|
for field_name, field in cls._fields.items():
|
||||||
|
if field.db_field in data:
|
||||||
|
value = data[field.db_field]
|
||||||
|
data[field_name] = (value if value is None
|
||||||
|
else field.to_python(value))
|
||||||
|
|
||||||
|
obj = cls(**data)
|
||||||
|
obj._changed_fields = []
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def _mark_as_changed(self, key):
|
||||||
|
"""Marks a key as explicitly changed by the user
|
||||||
|
"""
|
||||||
|
if not key:
|
||||||
|
return
|
||||||
|
key = self._db_field_map.get(key, key)
|
||||||
|
if hasattr(self, '_changed_fields') and key not in self._changed_fields:
|
||||||
|
self._changed_fields.append(key)
|
||||||
|
|
||||||
|
def _get_changed_fields(self, key=''):
|
||||||
|
"""Returns a list of all fields that have explicitly been changed.
|
||||||
|
"""
|
||||||
|
from mongoengine import EmbeddedDocument
|
||||||
|
_changed_fields = []
|
||||||
|
_changed_fields += getattr(self, '_changed_fields', [])
|
||||||
|
for field_name in self._fields:
|
||||||
|
db_field_name = self._db_field_map.get(field_name, field_name)
|
||||||
|
key = '%s.' % db_field_name
|
||||||
|
field = getattr(self, field_name, None)
|
||||||
|
if isinstance(field, EmbeddedDocument) and db_field_name not in _changed_fields: # Grab all embedded fields that have been changed
|
||||||
|
_changed_fields += ["%s%s" % (key, k) for k in field._get_changed_fields(key) if k]
|
||||||
|
elif isinstance(field, (list, tuple, dict)) and db_field_name not in _changed_fields: # Loop list / dict fields as they contain documents
|
||||||
|
# Determine the iterator to use
|
||||||
|
if not hasattr(field, 'items'):
|
||||||
|
iterator = enumerate(field)
|
||||||
|
else:
|
||||||
|
iterator = field.iteritems()
|
||||||
|
for index, value in iterator:
|
||||||
|
if not hasattr(value, '_get_changed_fields'):
|
||||||
|
continue
|
||||||
|
list_key = "%s%s." % (key, index)
|
||||||
|
_changed_fields += ["%s%s" % (list_key, k) for k in value._get_changed_fields(list_key) if k]
|
||||||
|
|
||||||
|
return _changed_fields
|
||||||
|
|
||||||
|
def _delta(self):
|
||||||
|
"""Returns the delta (set, unset) of the changes for a document.
|
||||||
|
Gets any values that have been explicitly changed.
|
||||||
|
"""
|
||||||
|
# Handles cases where not loaded from_son but has _id
|
||||||
|
doc = self.to_mongo()
|
||||||
|
set_fields = self._get_changed_fields()
|
||||||
|
set_data = {}
|
||||||
|
unset_data = {}
|
||||||
|
if hasattr(self, '_changed_fields'):
|
||||||
|
set_data = {}
|
||||||
|
# Fetch each set item from its path
|
||||||
|
for path in set_fields:
|
||||||
|
parts = path.split('.')
|
||||||
|
d = doc
|
||||||
|
for p in parts:
|
||||||
|
if hasattr(d, '__getattr__'):
|
||||||
|
d = getattr(p, d)
|
||||||
|
elif p.isdigit():
|
||||||
|
d = d[int(p)]
|
||||||
|
else:
|
||||||
|
d = d.get(p)
|
||||||
|
set_data[path] = d
|
||||||
|
else:
|
||||||
|
set_data = doc
|
||||||
|
if '_id' in set_data:
|
||||||
|
del(set_data['_id'])
|
||||||
|
|
||||||
|
# Determine if any changed items were actually unset.
|
||||||
|
for path, value in set_data.items():
|
||||||
|
if value:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If we've set a value that ain't the default value dont unset it.
|
||||||
|
default = None
|
||||||
|
|
||||||
|
if path in self._fields:
|
||||||
|
default = self._fields[path].default
|
||||||
|
else: # Perform a full lookup for lists / embedded lookups
|
||||||
|
d = self
|
||||||
|
parts = path.split('.')
|
||||||
|
db_field_name = parts.pop()
|
||||||
|
for p in parts:
|
||||||
|
if p.isdigit():
|
||||||
|
d = d[int(p)]
|
||||||
|
elif hasattr(d, '__getattribute__') and not isinstance(d, dict):
|
||||||
|
real_path = d._reverse_db_field_map.get(p, p)
|
||||||
|
d = getattr(d, real_path)
|
||||||
|
else:
|
||||||
|
d = d.get(p)
|
||||||
|
|
||||||
|
if hasattr(d, '_fields'):
|
||||||
|
field_name = d._reverse_db_field_map.get(db_field_name,
|
||||||
|
db_field_name)
|
||||||
|
|
||||||
|
default = d._fields[field_name].default
|
||||||
|
|
||||||
|
if default is not None:
|
||||||
|
if callable(default):
|
||||||
|
default = default()
|
||||||
|
if default != value:
|
||||||
|
continue
|
||||||
|
|
||||||
|
del(set_data[path])
|
||||||
|
unset_data[path] = 1
|
||||||
|
return set_data, unset_data
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _geo_indices(cls, inspected_classes=None):
|
||||||
|
inspected_classes = inspected_classes or []
|
||||||
|
geo_indices = []
|
||||||
|
inspected_classes.append(cls)
|
||||||
|
for field in cls._fields.values():
|
||||||
|
if hasattr(field, 'document_type'):
|
||||||
|
field_cls = field.document_type
|
||||||
|
if field_cls in inspected_classes:
|
||||||
|
continue
|
||||||
|
if hasattr(field_cls, '_geo_indices'):
|
||||||
|
geo_indices += field_cls._geo_indices(inspected_classes)
|
||||||
|
elif field._geo_index:
|
||||||
|
geo_indices.append(field)
|
||||||
|
return geo_indices
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
self_dict = self.__dict__
|
||||||
|
removals = ["get_%s_display" % k for k,v in self._fields.items() if v.choices]
|
||||||
|
for k in removals:
|
||||||
|
if hasattr(self, k):
|
||||||
|
delattr(self, k)
|
||||||
|
return self.__dict__
|
||||||
|
|
||||||
|
def __setstate__(self, __dict__):
|
||||||
|
self.__dict__ = __dict__
|
||||||
|
self.__set_field_display()
|
||||||
|
|
||||||
|
def __set_field_display(self):
|
||||||
|
for attr_name, field in self._fields.items():
|
||||||
|
if field.choices: # dynamically adds a way to get the display value for a field with choices
|
||||||
|
setattr(self, 'get_%s_display' % attr_name, partial(self.__get_field_display, field=field))
|
||||||
|
|
||||||
|
def __get_field_display(self, field):
|
||||||
|
"""Returns the display value for a choice field"""
|
||||||
|
value = getattr(self, field.name)
|
||||||
|
return dict(field.choices).get(value, value)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return iter(self._fields)
|
return iter(self._fields)
|
||||||
@ -429,60 +898,6 @@ class BaseDocument(object):
|
|||||||
return unicode(self).encode('utf-8')
|
return unicode(self).encode('utf-8')
|
||||||
return '%s object' % self.__class__.__name__
|
return '%s object' % self.__class__.__name__
|
||||||
|
|
||||||
def to_mongo(self):
|
|
||||||
"""Return data dictionary ready for use with MongoDB.
|
|
||||||
"""
|
|
||||||
data = {}
|
|
||||||
for field_name, field in self._fields.items():
|
|
||||||
value = getattr(self, field_name, None)
|
|
||||||
if value is not None:
|
|
||||||
data[field.db_field] = field.to_mongo(value)
|
|
||||||
# Only add _cls and _types if allow_inheritance is not False
|
|
||||||
if not (hasattr(self, '_meta') and
|
|
||||||
self._meta.get('allow_inheritance', True) == False):
|
|
||||||
data['_cls'] = self._class_name
|
|
||||||
data['_types'] = self._superclasses.keys() + [self._class_name]
|
|
||||||
if data.has_key('_id') and not data['_id']:
|
|
||||||
del data['_id']
|
|
||||||
return data
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _from_son(cls, son):
|
|
||||||
"""Create an instance of a Document (subclass) from a PyMongo SON.
|
|
||||||
"""
|
|
||||||
# get the class name from the document, falling back to the given
|
|
||||||
# class if unavailable
|
|
||||||
class_name = son.get(u'_cls', cls._class_name)
|
|
||||||
|
|
||||||
data = dict((str(key), value) for key, value in son.items())
|
|
||||||
|
|
||||||
if '_types' in data:
|
|
||||||
del data['_types']
|
|
||||||
|
|
||||||
if '_cls' in data:
|
|
||||||
del data['_cls']
|
|
||||||
|
|
||||||
# Return correct subclass for document type
|
|
||||||
if class_name != cls._class_name:
|
|
||||||
subclasses = cls._get_subclasses()
|
|
||||||
if class_name not in subclasses:
|
|
||||||
# Type of document is probably more generic than the class
|
|
||||||
# that has been queried to return this SON
|
|
||||||
return None
|
|
||||||
cls = subclasses[class_name]
|
|
||||||
|
|
||||||
present_fields = data.keys()
|
|
||||||
|
|
||||||
for field_name, field in cls._fields.items():
|
|
||||||
if field.db_field in data:
|
|
||||||
value = data[field.db_field]
|
|
||||||
data[field_name] = (value if value is None
|
|
||||||
else field.to_python(value))
|
|
||||||
|
|
||||||
obj = cls(**data)
|
|
||||||
obj._present_fields = present_fields
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if isinstance(other, self.__class__) and hasattr(other, 'id'):
|
if isinstance(other, self.__class__) and hasattr(other, 'id'):
|
||||||
if self.id == other.id:
|
if self.id == other.id:
|
||||||
@ -493,16 +908,115 @@ class BaseDocument(object):
|
|||||||
return not self.__eq__(other)
|
return not self.__eq__(other)
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
""" For list, dic key """
|
|
||||||
if self.pk is None:
|
if self.pk is None:
|
||||||
# For new object
|
# For new object
|
||||||
return super(BaseDocument,self).__hash__()
|
return super(BaseDocument,self).__hash__()
|
||||||
else:
|
else:
|
||||||
return hash(self.pk)
|
return hash(self.pk)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseList(list):
|
||||||
|
"""A special list so we can watch any changes
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, list_items, instance, name):
|
||||||
|
self.instance = instance
|
||||||
|
self.name = name
|
||||||
|
super(BaseList, self).__init__(list_items)
|
||||||
|
|
||||||
|
def __setitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
super(BaseList, self).__setitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
super(BaseList, self).__delitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def append(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).append(*args, **kwargs)
|
||||||
|
|
||||||
|
def extend(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).extend(*args, **kwargs)
|
||||||
|
|
||||||
|
def insert(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).insert(*args, **kwargs)
|
||||||
|
|
||||||
|
def pop(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).pop(*args, **kwargs)
|
||||||
|
|
||||||
|
def remove(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).remove(*args, **kwargs)
|
||||||
|
|
||||||
|
def reverse(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).reverse(*args, **kwargs)
|
||||||
|
|
||||||
|
def sort(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).sort(*args, **kwargs)
|
||||||
|
|
||||||
|
def _mark_as_changed(self):
|
||||||
|
"""Marks a list as changed if has an instance and a name"""
|
||||||
|
if hasattr(self, 'instance') and hasattr(self, 'name'):
|
||||||
|
self.instance._mark_as_changed(self.name)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDict(dict):
|
||||||
|
"""A special dict so we can watch any changes
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, dict_items, instance, name):
|
||||||
|
self.instance = instance
|
||||||
|
self.name = name
|
||||||
|
super(BaseDict, self).__init__(dict_items)
|
||||||
|
|
||||||
|
def __setitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
super(BaseDict, self).__setitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __setattr__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
super(BaseDict, self).__setattr__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delete__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
super(BaseDict, self).__delete__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
super(BaseDict, self).__delitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delattr__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
super(BaseDict, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
|
def clear(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
super(BaseDict, self).clear(*args, **kwargs)
|
||||||
|
|
||||||
|
def pop(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
super(BaseDict, self).clear(*args, **kwargs)
|
||||||
|
|
||||||
|
def popitem(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
super(BaseDict, self).clear(*args, **kwargs)
|
||||||
|
|
||||||
|
def _mark_as_changed(self):
|
||||||
|
"""Marks a dict as changed if has an instance and a name"""
|
||||||
|
if hasattr(self, 'instance') and hasattr(self, 'name'):
|
||||||
|
self.instance._mark_as_changed(self.name)
|
||||||
|
|
||||||
if sys.version_info < (2, 5):
|
if sys.version_info < (2, 5):
|
||||||
# Prior to Python 2.5, Exception was an old-style class
|
# Prior to Python 2.5, Exception was an old-style class
|
||||||
|
import types
|
||||||
def subclass_exception(name, parents, unused):
|
def subclass_exception(name, parents, unused):
|
||||||
|
import types
|
||||||
return types.ClassType(name, parents, {})
|
return types.ClassType(name, parents, {})
|
||||||
else:
|
else:
|
||||||
def subclass_exception(name, parents, module):
|
def subclass_exception(name, parents, module):
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from pymongo import Connection
|
from pymongo import Connection
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
import threading
|
||||||
|
|
||||||
__all__ = ['ConnectionError', 'connect']
|
__all__ = ['ConnectionError', 'connect']
|
||||||
|
|
||||||
@ -22,17 +23,22 @@ class ConnectionError(Exception):
|
|||||||
|
|
||||||
|
|
||||||
def _get_connection(reconnect=False):
|
def _get_connection(reconnect=False):
|
||||||
|
"""Handles the connection to the database
|
||||||
|
"""
|
||||||
global _connection
|
global _connection
|
||||||
identity = get_identity()
|
identity = get_identity()
|
||||||
# Connect to the database if not already connected
|
# Connect to the database if not already connected
|
||||||
if _connection.get(identity) is None or reconnect:
|
if _connection.get(identity) is None or reconnect:
|
||||||
try:
|
try:
|
||||||
_connection[identity] = Connection(**_connection_settings)
|
_connection[identity] = Connection(**_connection_settings)
|
||||||
except:
|
except Exception, e:
|
||||||
raise ConnectionError('Cannot connect to the database')
|
raise ConnectionError("Cannot connect to the database:\n%s" % e)
|
||||||
return _connection[identity]
|
return _connection[identity]
|
||||||
|
|
||||||
def _get_db(reconnect=False):
|
def _get_db(reconnect=False):
|
||||||
|
"""Handles database connections and authentication based on the current
|
||||||
|
identity
|
||||||
|
"""
|
||||||
global _db, _connection
|
global _db, _connection
|
||||||
identity = get_identity()
|
identity = get_identity()
|
||||||
# Connect if not already connected
|
# Connect if not already connected
|
||||||
@ -52,8 +58,13 @@ def _get_db(reconnect=False):
|
|||||||
return _db[identity]
|
return _db[identity]
|
||||||
|
|
||||||
def get_identity():
|
def get_identity():
|
||||||
|
"""Creates an identity key based on the current process and thread
|
||||||
|
identity.
|
||||||
|
"""
|
||||||
identity = multiprocessing.current_process()._identity
|
identity = multiprocessing.current_process()._identity
|
||||||
identity = 0 if not identity else identity[0]
|
identity = 0 if not identity else identity[0]
|
||||||
|
|
||||||
|
identity = (identity, threading.current_thread().ident)
|
||||||
return identity
|
return identity
|
||||||
|
|
||||||
def connect(db, username=None, password=None, **kwargs):
|
def connect(db, username=None, password=None, **kwargs):
|
||||||
|
184
mongoengine/dereference.py
Normal file
184
mongoengine/dereference.py
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
import operator
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
from base import BaseDict, BaseList, get_document, TopLevelDocumentMetaclass
|
||||||
|
from fields import ReferenceField
|
||||||
|
from connection import _get_db
|
||||||
|
from queryset import QuerySet
|
||||||
|
from document import Document
|
||||||
|
|
||||||
|
|
||||||
|
class DeReference(object):
|
||||||
|
|
||||||
|
def __call__(self, items, max_depth=1, instance=None, name=None, get=False):
|
||||||
|
"""
|
||||||
|
Cheaply dereferences the items to a set depth.
|
||||||
|
Also handles the convertion of complex data types.
|
||||||
|
|
||||||
|
:param items: The iterable (dict, list, queryset) to be dereferenced.
|
||||||
|
:param max_depth: The maximum depth to recurse to
|
||||||
|
:param instance: The owning instance used for tracking changes by
|
||||||
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
|
:param name: The name of the field, used for tracking changes by
|
||||||
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
|
:param get: A boolean determining if being called by __get__
|
||||||
|
"""
|
||||||
|
if items is None or isinstance(items, basestring):
|
||||||
|
return items
|
||||||
|
|
||||||
|
# cheapest way to convert a queryset to a list
|
||||||
|
# list(queryset) uses a count() query to determine length
|
||||||
|
if isinstance(items, QuerySet):
|
||||||
|
items = [i for i in items]
|
||||||
|
|
||||||
|
self.max_depth = max_depth
|
||||||
|
|
||||||
|
doc_type = None
|
||||||
|
if instance and instance._fields:
|
||||||
|
doc_type = instance._fields[name].field
|
||||||
|
|
||||||
|
if isinstance(doc_type, ReferenceField):
|
||||||
|
doc_type = doc_type.document_type
|
||||||
|
|
||||||
|
self.reference_map = self._find_references(items)
|
||||||
|
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||||
|
return self._attach_objects(items, 0, instance, name, get)
|
||||||
|
|
||||||
|
def _find_references(self, items, depth=0):
|
||||||
|
"""
|
||||||
|
Recursively finds all db references to be dereferenced
|
||||||
|
|
||||||
|
:param items: The iterable (dict, list, queryset)
|
||||||
|
:param depth: The current depth of recursion
|
||||||
|
"""
|
||||||
|
reference_map = {}
|
||||||
|
if not items:
|
||||||
|
return reference_map
|
||||||
|
|
||||||
|
# Determine the iterator to use
|
||||||
|
if not hasattr(items, 'items'):
|
||||||
|
iterator = enumerate(items)
|
||||||
|
else:
|
||||||
|
iterator = items.iteritems()
|
||||||
|
|
||||||
|
# Recursively find dbreferences
|
||||||
|
for k, item in iterator:
|
||||||
|
if hasattr(item, '_fields'):
|
||||||
|
for field_name, field in item._fields.iteritems():
|
||||||
|
v = item._data.get(field_name, None)
|
||||||
|
if isinstance(v, (pymongo.dbref.DBRef)):
|
||||||
|
reference_map.setdefault(field.document_type, []).append(v.id)
|
||||||
|
elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v:
|
||||||
|
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
|
||||||
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
|
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||||
|
references = self._find_references(v, depth)
|
||||||
|
for key, refs in references.iteritems():
|
||||||
|
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||||
|
key = field_cls
|
||||||
|
reference_map.setdefault(key, []).extend(refs)
|
||||||
|
elif isinstance(item, (pymongo.dbref.DBRef)):
|
||||||
|
reference_map.setdefault(item.collection, []).append(item.id)
|
||||||
|
elif isinstance(item, (dict, pymongo.son.SON)) and '_ref' in item:
|
||||||
|
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
|
||||||
|
elif isinstance(item, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
|
references = self._find_references(item, depth)
|
||||||
|
for key, refs in references.iteritems():
|
||||||
|
reference_map.setdefault(key, []).extend(refs)
|
||||||
|
depth += 1
|
||||||
|
return reference_map
|
||||||
|
|
||||||
|
def _fetch_objects(self, doc_type=None):
|
||||||
|
"""Fetch all references and convert to their document objects
|
||||||
|
"""
|
||||||
|
object_map = {}
|
||||||
|
for col, dbrefs in self.reference_map.iteritems():
|
||||||
|
keys = object_map.keys()
|
||||||
|
refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys]))
|
||||||
|
if hasattr(col, 'objects'): # We have a document class for the refs
|
||||||
|
references = col.objects.in_bulk(refs)
|
||||||
|
for key, doc in references.iteritems():
|
||||||
|
object_map[key] = doc
|
||||||
|
else: # Generic reference: use the refs data to convert to document
|
||||||
|
references = _get_db()[col].find({'_id': {'$in': refs}})
|
||||||
|
for ref in references:
|
||||||
|
if '_cls' in ref:
|
||||||
|
doc = get_document(ref['_cls'])._from_son(ref)
|
||||||
|
else:
|
||||||
|
doc = doc_type._from_son(ref)
|
||||||
|
object_map[doc.id] = doc
|
||||||
|
return object_map
|
||||||
|
|
||||||
|
def _attach_objects(self, items, depth=0, instance=None, name=None, get=False):
|
||||||
|
"""
|
||||||
|
Recursively finds all db references to be dereferenced
|
||||||
|
|
||||||
|
:param items: The iterable (dict, list, queryset)
|
||||||
|
:param depth: The current depth of recursion
|
||||||
|
:param instance: The owning instance used for tracking changes by
|
||||||
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
|
:param name: The name of the field, used for tracking changes by
|
||||||
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
|
:param get: A boolean determining if being called by __get__
|
||||||
|
"""
|
||||||
|
if not items:
|
||||||
|
if isinstance(items, (BaseDict, BaseList)):
|
||||||
|
return items
|
||||||
|
|
||||||
|
if instance:
|
||||||
|
if isinstance(items, dict):
|
||||||
|
return BaseDict(items, instance=instance, name=name)
|
||||||
|
else:
|
||||||
|
return BaseList(items, instance=instance, name=name)
|
||||||
|
|
||||||
|
if isinstance(items, (dict, pymongo.son.SON)):
|
||||||
|
if '_ref' in items:
|
||||||
|
return self.object_map.get(items['_ref'].id, items)
|
||||||
|
elif '_types' in items and '_cls' in items:
|
||||||
|
doc = get_document(items['_cls'])._from_son(items)
|
||||||
|
if not get:
|
||||||
|
doc._data = self._attach_objects(doc._data, depth, doc, name, get)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
if not hasattr(items, 'items'):
|
||||||
|
is_list = True
|
||||||
|
iterator = enumerate(items)
|
||||||
|
data = []
|
||||||
|
else:
|
||||||
|
is_list = False
|
||||||
|
iterator = items.iteritems()
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
for k, v in iterator:
|
||||||
|
if is_list:
|
||||||
|
data.append(v)
|
||||||
|
else:
|
||||||
|
data[k] = v
|
||||||
|
|
||||||
|
if k in self.object_map:
|
||||||
|
data[k] = self.object_map[k]
|
||||||
|
elif hasattr(v, '_fields'):
|
||||||
|
for field_name, field in v._fields.iteritems():
|
||||||
|
v = data[k]._data.get(field_name, None)
|
||||||
|
if isinstance(v, (pymongo.dbref.DBRef)):
|
||||||
|
data[k]._data[field_name] = self.object_map.get(v.id, v)
|
||||||
|
elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v:
|
||||||
|
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
|
||||||
|
elif isinstance(v, dict) and depth < self.max_depth:
|
||||||
|
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||||
|
elif isinstance(v, (list, tuple)):
|
||||||
|
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||||
|
elif isinstance(v, (dict, list, tuple)) and depth < self.max_depth:
|
||||||
|
data[k] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||||
|
elif hasattr(v, 'id'):
|
||||||
|
data[k] = self.object_map.get(v.id, v)
|
||||||
|
|
||||||
|
if instance and name:
|
||||||
|
if is_list:
|
||||||
|
return BaseList(data, instance=instance, name=name)
|
||||||
|
return BaseDict(data, instance=instance, name=name)
|
||||||
|
depth += 1
|
||||||
|
return data
|
||||||
|
|
||||||
|
dereference = DeReference()
|
@ -3,6 +3,7 @@ from mongoengine import *
|
|||||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||||
from django.utils.encoding import smart_str
|
from django.utils.encoding import smart_str
|
||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
@ -21,16 +22,38 @@ class User(Document):
|
|||||||
"""A User document that aims to mirror most of the API specified by Django
|
"""A User document that aims to mirror most of the API specified by Django
|
||||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||||
"""
|
"""
|
||||||
username = StringField(max_length=30, required=True)
|
username = StringField(max_length=30, required=True,
|
||||||
first_name = StringField(max_length=30)
|
verbose_name=_('username'),
|
||||||
last_name = StringField(max_length=30)
|
help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
|
||||||
email = StringField()
|
|
||||||
password = StringField(max_length=128)
|
first_name = StringField(max_length=30,
|
||||||
is_staff = BooleanField(default=False)
|
verbose_name=_('first name'))
|
||||||
is_active = BooleanField(default=True)
|
|
||||||
is_superuser = BooleanField(default=False)
|
last_name = StringField(max_length=30,
|
||||||
last_login = DateTimeField(default=datetime.datetime.now)
|
verbose_name=_('last name'))
|
||||||
date_joined = DateTimeField(default=datetime.datetime.now)
|
email = EmailField(verbose_name=_('e-mail address'))
|
||||||
|
password = StringField(max_length=128,
|
||||||
|
verbose_name=_('password'),
|
||||||
|
help_text=_("Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
||||||
|
is_staff = BooleanField(default=False,
|
||||||
|
verbose_name=_('staff status'),
|
||||||
|
help_text=_("Designates whether the user can log into this admin site."))
|
||||||
|
is_active = BooleanField(default=True,
|
||||||
|
verbose_name=_('active'),
|
||||||
|
help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
|
||||||
|
is_superuser = BooleanField(default=False,
|
||||||
|
verbose_name=_('superuser status'),
|
||||||
|
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
|
||||||
|
last_login = DateTimeField(default=datetime.datetime.now,
|
||||||
|
verbose_name=_('last login'))
|
||||||
|
date_joined = DateTimeField(default=datetime.datetime.now,
|
||||||
|
verbose_name=_('date joined'))
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
{'fields': ['username'], 'unique': True}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.username
|
return self.username
|
||||||
@ -86,7 +109,7 @@ class User(Document):
|
|||||||
else:
|
else:
|
||||||
email = '@'.join([email_name, domain_part.lower()])
|
email = '@'.join([email_name, domain_part.lower()])
|
||||||
|
|
||||||
user = User(username=username, email=email, date_joined=now)
|
user = cls(username=username, email=email, date_joined=now)
|
||||||
user.set_password(password)
|
user.set_password(password)
|
||||||
user.save()
|
user.save()
|
||||||
return user
|
return user
|
||||||
@ -99,6 +122,10 @@ class MongoEngineBackend(object):
|
|||||||
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
supports_object_permissions = False
|
||||||
|
supports_anonymous_user = False
|
||||||
|
supports_inactive_user = False
|
||||||
|
|
||||||
def authenticate(self, username=None, password=None):
|
def authenticate(self, username=None, password=None):
|
||||||
user = User.objects(username=username).first()
|
user = User.objects(username=username).first()
|
||||||
if user:
|
if user:
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from django.http import Http404
|
from django.http import Http404
|
||||||
from mongoengine.queryset import QuerySet
|
from mongoengine.queryset import QuerySet
|
||||||
from mongoengine.base import BaseDocument
|
from mongoengine.base import BaseDocument
|
||||||
|
from mongoengine.base import ValidationError
|
||||||
|
|
||||||
def _get_queryset(cls):
|
def _get_queryset(cls):
|
||||||
"""Inspired by django.shortcuts.*"""
|
"""Inspired by django.shortcuts.*"""
|
||||||
@ -25,7 +26,7 @@ def get_document_or_404(cls, *args, **kwargs):
|
|||||||
queryset = _get_queryset(cls)
|
queryset = _get_queryset(cls)
|
||||||
try:
|
try:
|
||||||
return queryset.get(*args, **kwargs)
|
return queryset.get(*args, **kwargs)
|
||||||
except queryset._document.DoesNotExist:
|
except (queryset._document.DoesNotExist, ValidationError):
|
||||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||||
|
|
||||||
def get_list_or_404(cls, *args, **kwargs):
|
def get_list_or_404(cls, *args, **kwargs):
|
||||||
|
@ -1,12 +1,17 @@
|
|||||||
|
from mongoengine import signals
|
||||||
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
||||||
ValidationError)
|
ValidationError, BaseDict, BaseList)
|
||||||
from queryset import OperationError
|
from queryset import OperationError
|
||||||
from connection import _get_db
|
from connection import _get_db
|
||||||
|
|
||||||
import pymongo
|
import pymongo
|
||||||
|
|
||||||
|
__all__ = ['Document', 'EmbeddedDocument', 'ValidationError',
|
||||||
|
'OperationError', 'InvalidCollectionError']
|
||||||
|
|
||||||
__all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError']
|
|
||||||
|
class InvalidCollectionError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class EmbeddedDocument(BaseDocument):
|
class EmbeddedDocument(BaseDocument):
|
||||||
@ -18,6 +23,18 @@ class EmbeddedDocument(BaseDocument):
|
|||||||
|
|
||||||
__metaclass__ = DocumentMetaclass
|
__metaclass__ = DocumentMetaclass
|
||||||
|
|
||||||
|
def __delattr__(self, *args, **kwargs):
|
||||||
|
"""Handle deletions of fields"""
|
||||||
|
field_name = args[0]
|
||||||
|
if field_name in self._fields:
|
||||||
|
default = self._fields[field_name].default
|
||||||
|
if callable(default):
|
||||||
|
default = default()
|
||||||
|
setattr(self, field_name, default)
|
||||||
|
else:
|
||||||
|
super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Document(BaseDocument):
|
class Document(BaseDocument):
|
||||||
"""The base class used for defining the structure and properties of
|
"""The base class used for defining the structure and properties of
|
||||||
@ -52,11 +69,50 @@ class Document(BaseDocument):
|
|||||||
dictionary. The value should be a list of field names or tuples of field
|
dictionary. The value should be a list of field names or tuples of field
|
||||||
names. Index direction may be specified by prefixing the field names with
|
names. Index direction may be specified by prefixing the field names with
|
||||||
a **+** or **-** sign.
|
a **+** or **-** sign.
|
||||||
"""
|
|
||||||
|
|
||||||
|
By default, _types will be added to the start of every index (that
|
||||||
|
doesn't contain a list) if allow_inheritence is True. This can be
|
||||||
|
disabled by either setting types to False on the specific index or
|
||||||
|
by setting index_types to False on the meta dictionary for the document.
|
||||||
|
"""
|
||||||
__metaclass__ = TopLevelDocumentMetaclass
|
__metaclass__ = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
def save(self, safe=True, force_insert=False, validate=True):
|
@classmethod
|
||||||
|
def _get_collection(self):
|
||||||
|
"""Returns the collection for the document."""
|
||||||
|
db = _get_db()
|
||||||
|
collection_name = self._get_collection_name()
|
||||||
|
|
||||||
|
if not hasattr(self, '_collection') or self._collection is None:
|
||||||
|
# Create collection as a capped collection if specified
|
||||||
|
if self._meta['max_size'] or self._meta['max_documents']:
|
||||||
|
# Get max document limit and max byte size from meta
|
||||||
|
max_size = self._meta['max_size'] or 10000000 # 10MB default
|
||||||
|
max_documents = self._meta['max_documents']
|
||||||
|
|
||||||
|
if collection_name in db.collection_names():
|
||||||
|
self._collection = db[collection_name]
|
||||||
|
# The collection already exists, check if its capped
|
||||||
|
# options match the specified capped options
|
||||||
|
options = self._collection.options()
|
||||||
|
if options.get('max') != max_documents or \
|
||||||
|
options.get('size') != max_size:
|
||||||
|
msg = ('Cannot create collection "%s" as a capped '
|
||||||
|
'collection as it already exists') % self._collection
|
||||||
|
raise InvalidCollectionError(msg)
|
||||||
|
else:
|
||||||
|
# Create the collection as a capped collection
|
||||||
|
opts = {'capped': True, 'size': max_size}
|
||||||
|
if max_documents:
|
||||||
|
opts['max'] = max_documents
|
||||||
|
self._collection = db.create_collection(
|
||||||
|
collection_name, **opts
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._collection = db[collection_name]
|
||||||
|
return self._collection
|
||||||
|
|
||||||
|
def save(self, safe=True, force_insert=False, validate=True, write_options=None, _refs=None):
|
||||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||||
document already exists, it will be updated, otherwise it will be
|
document already exists, it will be updated, otherwise it will be
|
||||||
created.
|
created.
|
||||||
@ -68,16 +124,58 @@ class Document(BaseDocument):
|
|||||||
:param force_insert: only try to create a new document, don't allow
|
:param force_insert: only try to create a new document, don't allow
|
||||||
updates of existing documents
|
updates of existing documents
|
||||||
:param validate: validates the document; set to ``False`` to skip.
|
:param validate: validates the document; set to ``False`` to skip.
|
||||||
|
:param write_options: Extra keyword arguments are passed down to
|
||||||
|
:meth:`~pymongo.collection.Collection.save` OR
|
||||||
|
:meth:`~pymongo.collection.Collection.insert`
|
||||||
|
which will be used as options for the resultant ``getLastError`` command.
|
||||||
|
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
|
||||||
|
have recorded the write and will force an fsync on each server being written to.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5
|
||||||
|
In existing documents it only saves changed fields using set / unset
|
||||||
|
Saves are cascaded and any :class:`~pymongo.dbref.DBRef` objects
|
||||||
|
that have changes are saved as well.
|
||||||
"""
|
"""
|
||||||
|
from fields import ReferenceField, GenericReferenceField
|
||||||
|
|
||||||
|
signals.pre_save.send(self.__class__, document=self)
|
||||||
|
|
||||||
if validate:
|
if validate:
|
||||||
self.validate()
|
self.validate()
|
||||||
|
|
||||||
|
if not write_options:
|
||||||
|
write_options = {}
|
||||||
|
|
||||||
doc = self.to_mongo()
|
doc = self.to_mongo()
|
||||||
|
|
||||||
|
created = '_id' in doc
|
||||||
|
creation_mode = force_insert or not created
|
||||||
try:
|
try:
|
||||||
collection = self.__class__.objects._collection
|
collection = self.__class__.objects._collection
|
||||||
|
if creation_mode:
|
||||||
if force_insert:
|
if force_insert:
|
||||||
object_id = collection.insert(doc, safe=safe)
|
object_id = collection.insert(doc, safe=safe, **write_options)
|
||||||
else:
|
else:
|
||||||
object_id = collection.save(doc, safe=safe)
|
object_id = collection.save(doc, safe=safe, **write_options)
|
||||||
|
else:
|
||||||
|
object_id = doc['_id']
|
||||||
|
updates, removals = self._delta()
|
||||||
|
if updates:
|
||||||
|
collection.update({'_id': object_id}, {"$set": updates}, upsert=True, safe=safe, **write_options)
|
||||||
|
if removals:
|
||||||
|
collection.update({'_id': object_id}, {"$unset": removals}, upsert=True, safe=safe, **write_options)
|
||||||
|
|
||||||
|
# Save any references / generic references
|
||||||
|
_refs = _refs or []
|
||||||
|
for name, cls in self._fields.items():
|
||||||
|
if isinstance(cls, (ReferenceField, GenericReferenceField)):
|
||||||
|
ref = getattr(self, name)
|
||||||
|
if ref and str(ref) not in _refs:
|
||||||
|
_refs.append(str(ref))
|
||||||
|
ref.save(safe=safe, force_insert=force_insert,
|
||||||
|
validate=validate, write_options=write_options,
|
||||||
|
_refs=_refs)
|
||||||
|
|
||||||
except pymongo.errors.OperationFailure, err:
|
except pymongo.errors.OperationFailure, err:
|
||||||
message = 'Could not save document (%s)'
|
message = 'Could not save document (%s)'
|
||||||
if u'duplicate key' in unicode(err):
|
if u'duplicate key' in unicode(err):
|
||||||
@ -86,12 +184,42 @@ class Document(BaseDocument):
|
|||||||
id_field = self._meta['id_field']
|
id_field = self._meta['id_field']
|
||||||
self[id_field] = self._fields[id_field].to_python(object_id)
|
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||||
|
|
||||||
|
def reset_changed_fields(doc, inspected_docs=None):
|
||||||
|
"""Loop through and reset changed fields lists"""
|
||||||
|
|
||||||
|
inspected_docs = inspected_docs or []
|
||||||
|
inspected_docs.append(doc)
|
||||||
|
if hasattr(doc, '_changed_fields'):
|
||||||
|
doc._changed_fields = []
|
||||||
|
|
||||||
|
for field_name in doc._fields:
|
||||||
|
field = getattr(doc, field_name)
|
||||||
|
if field not in inspected_docs and hasattr(field, '_changed_fields'):
|
||||||
|
reset_changed_fields(field, inspected_docs)
|
||||||
|
|
||||||
|
reset_changed_fields(self)
|
||||||
|
signals.post_save.send(self.__class__, document=self, created=creation_mode)
|
||||||
|
|
||||||
|
def update(self, **kwargs):
|
||||||
|
"""Performs an update on the :class:`~mongoengine.Document`
|
||||||
|
A convenience wrapper to :meth:`~mongoengine.QuerySet.update`.
|
||||||
|
|
||||||
|
Raises :class:`OperationError` if called on an object that has not yet
|
||||||
|
been saved.
|
||||||
|
"""
|
||||||
|
if not self.pk:
|
||||||
|
raise OperationError('attempt to update a document not yet saved')
|
||||||
|
|
||||||
|
return self.__class__.objects(pk=self.pk).update_one(**kwargs)
|
||||||
|
|
||||||
def delete(self, safe=False):
|
def delete(self, safe=False):
|
||||||
"""Delete the :class:`~mongoengine.Document` from the database. This
|
"""Delete the :class:`~mongoengine.Document` from the database. This
|
||||||
will only take effect if the document has been previously saved.
|
will only take effect if the document has been previously saved.
|
||||||
|
|
||||||
:param safe: check if the operation succeeded before returning
|
:param safe: check if the operation succeeded before returning
|
||||||
"""
|
"""
|
||||||
|
signals.pre_delete.send(self.__class__, document=self)
|
||||||
|
|
||||||
id_field = self._meta['id_field']
|
id_field = self._meta['id_field']
|
||||||
object_id = self._fields[id_field].to_mongo(self[id_field])
|
object_id = self._fields[id_field].to_mongo(self[id_field])
|
||||||
try:
|
try:
|
||||||
@ -100,6 +228,18 @@ class Document(BaseDocument):
|
|||||||
message = u'Could not delete document (%s)' % err.message
|
message = u'Could not delete document (%s)' % err.message
|
||||||
raise OperationError(message)
|
raise OperationError(message)
|
||||||
|
|
||||||
|
signals.post_delete.send(self.__class__, document=self)
|
||||||
|
|
||||||
|
def select_related(self, max_depth=1):
|
||||||
|
"""Handles dereferencing of :class:`~pymongo.dbref.DBRef` objects to
|
||||||
|
a maximum depth in order to cut down the number queries to mongodb.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
"""
|
||||||
|
from dereference import dereference
|
||||||
|
self._data = dereference(self._data, max_depth)
|
||||||
|
return self
|
||||||
|
|
||||||
def reload(self):
|
def reload(self):
|
||||||
"""Reloads all attributes from the database.
|
"""Reloads all attributes from the database.
|
||||||
|
|
||||||
@ -108,7 +248,37 @@ class Document(BaseDocument):
|
|||||||
id_field = self._meta['id_field']
|
id_field = self._meta['id_field']
|
||||||
obj = self.__class__.objects(**{id_field: self[id_field]}).first()
|
obj = self.__class__.objects(**{id_field: self[id_field]}).first()
|
||||||
for field in self._fields:
|
for field in self._fields:
|
||||||
setattr(self, field, obj[field])
|
setattr(self, field, self._reload(field, obj[field]))
|
||||||
|
self._changed_fields = []
|
||||||
|
|
||||||
|
def _reload(self, key, value):
|
||||||
|
"""Used by :meth:`~mongoengine.Document.reload` to ensure the
|
||||||
|
correct instance is linked to self.
|
||||||
|
"""
|
||||||
|
if isinstance(value, BaseDict):
|
||||||
|
value = [(k, self._reload(k,v)) for k,v in value.items()]
|
||||||
|
value = BaseDict(value, instance=self, name=key)
|
||||||
|
elif isinstance(value, BaseList):
|
||||||
|
value = [self._reload(key, v) for v in value]
|
||||||
|
value = BaseList(value, instance=self, name=key)
|
||||||
|
elif isinstance(value, EmbeddedDocument):
|
||||||
|
value._changed_fields = []
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_dbref(self):
|
||||||
|
"""Returns an instance of :class:`~pymongo.dbref.DBRef` useful in
|
||||||
|
`__raw__` queries."""
|
||||||
|
if not self.pk:
|
||||||
|
msg = "Only saved documents can have a valid dbref"
|
||||||
|
raise OperationError(msg)
|
||||||
|
return pymongo.dbref.DBRef(self.__class__._get_collection_name(), self.pk)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register_delete_rule(cls, document_cls, field_name, rule):
|
||||||
|
"""This method registers the delete rules to apply when removing this
|
||||||
|
object.
|
||||||
|
"""
|
||||||
|
cls._meta['delete_rules'][(document_cls, field_name)] = rule
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def drop_collection(cls):
|
def drop_collection(cls):
|
||||||
@ -116,7 +286,7 @@ class Document(BaseDocument):
|
|||||||
:class:`~mongoengine.Document` type from the database.
|
:class:`~mongoengine.Document` type from the database.
|
||||||
"""
|
"""
|
||||||
db = _get_db()
|
db = _get_db()
|
||||||
db.drop_collection(cls._meta['collection'])
|
db.drop_collection(cls._get_collection_name())
|
||||||
|
|
||||||
|
|
||||||
class MapReduceDocument(object):
|
class MapReduceDocument(object):
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
from base import BaseField, ObjectIdField, ValidationError, get_document
|
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||||
|
ValidationError, get_document)
|
||||||
|
from queryset import DO_NOTHING
|
||||||
from document import Document, EmbeddedDocument
|
from document import Document, EmbeddedDocument
|
||||||
from connection import _get_db
|
from connection import _get_db
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
@ -8,18 +10,18 @@ import pymongo
|
|||||||
import pymongo.dbref
|
import pymongo.dbref
|
||||||
import pymongo.son
|
import pymongo.son
|
||||||
import pymongo.binary
|
import pymongo.binary
|
||||||
import datetime
|
import datetime, time
|
||||||
import decimal
|
import decimal
|
||||||
import gridfs
|
import gridfs
|
||||||
import warnings
|
|
||||||
import types
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
||||||
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
||||||
'ObjectIdField', 'ReferenceField', 'ValidationError',
|
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
|
||||||
'DecimalField', 'URLField', 'GenericReferenceField', 'FileField',
|
'DecimalField', 'ComplexDateTimeField', 'URLField',
|
||||||
'BinaryField', 'SortedListField', 'EmailField', 'GeoPointField']
|
'GenericReferenceField', 'FileField', 'BinaryField',
|
||||||
|
'SortedListField', 'EmailField', 'GeoPointField',
|
||||||
|
'SequenceField', 'GenericEmbeddedDocumentField']
|
||||||
|
|
||||||
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
||||||
|
|
||||||
@ -150,6 +152,9 @@ class IntField(BaseField):
|
|||||||
if self.max_value is not None and value > self.max_value:
|
if self.max_value is not None and value > self.max_value:
|
||||||
raise ValidationError('Integer value is too large')
|
raise ValidationError('Integer value is too large')
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return int(value)
|
||||||
|
|
||||||
|
|
||||||
class FloatField(BaseField):
|
class FloatField(BaseField):
|
||||||
"""An floating point number field.
|
"""An floating point number field.
|
||||||
@ -173,6 +178,10 @@ class FloatField(BaseField):
|
|||||||
if self.max_value is not None and value > self.max_value:
|
if self.max_value is not None and value > self.max_value:
|
||||||
raise ValidationError('Float value is too large')
|
raise ValidationError('Float value is too large')
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return float(value)
|
||||||
|
|
||||||
|
|
||||||
class DecimalField(BaseField):
|
class DecimalField(BaseField):
|
||||||
"""A fixed-point decimal number field.
|
"""A fixed-point decimal number field.
|
||||||
|
|
||||||
@ -222,15 +231,151 @@ class BooleanField(BaseField):
|
|||||||
|
|
||||||
class DateTimeField(BaseField):
|
class DateTimeField(BaseField):
|
||||||
"""A datetime field.
|
"""A datetime field.
|
||||||
|
|
||||||
|
Note: Microseconds are rounded to the nearest millisecond.
|
||||||
|
Pre UTC microsecond support is effecively broken.
|
||||||
|
Use :class:`~mongoengine.fields.ComplexDateTimeField` if you
|
||||||
|
need accurate microsecond support.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
assert isinstance(value, datetime.datetime)
|
assert isinstance(value, (datetime.datetime, datetime.date))
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
return self.prepare_query_value(None, value)
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
if isinstance(value, datetime.datetime):
|
||||||
|
return value
|
||||||
|
if isinstance(value, datetime.date):
|
||||||
|
return datetime.datetime(value.year, value.month, value.day)
|
||||||
|
|
||||||
|
# Attempt to parse a datetime:
|
||||||
|
# value = smart_str(value)
|
||||||
|
# split usecs, because they are not recognized by strptime.
|
||||||
|
if '.' in value:
|
||||||
|
try:
|
||||||
|
value, usecs = value.split('.')
|
||||||
|
usecs = int(usecs)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
usecs = 0
|
||||||
|
kwargs = {'microsecond': usecs}
|
||||||
|
try: # Seconds are optional, so try converting seconds first.
|
||||||
|
return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6],
|
||||||
|
**kwargs)
|
||||||
|
except ValueError:
|
||||||
|
try: # Try without seconds.
|
||||||
|
return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M')[:5],
|
||||||
|
**kwargs)
|
||||||
|
except ValueError: # Try without hour/minutes/seconds.
|
||||||
|
try:
|
||||||
|
return datetime.datetime(*time.strptime(value, '%Y-%m-%d')[:3],
|
||||||
|
**kwargs)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class ComplexDateTimeField(StringField):
|
||||||
|
"""
|
||||||
|
ComplexDateTimeField handles microseconds exactly instead of rounding
|
||||||
|
like DateTimeField does.
|
||||||
|
|
||||||
|
Derives from a StringField so you can do `gte` and `lte` filtering by
|
||||||
|
using lexicographical comparison when filtering / sorting strings.
|
||||||
|
|
||||||
|
The stored string has the following format:
|
||||||
|
|
||||||
|
YYYY,MM,DD,HH,MM,SS,NNNNNN
|
||||||
|
|
||||||
|
Where NNNNNN is the number of microseconds of the represented `datetime`.
|
||||||
|
The `,` as the separator can be easily modified by passing the `separator`
|
||||||
|
keyword when initializing the field.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, separator=',', **kwargs):
|
||||||
|
self.names = ['year', 'month', 'day', 'hour', 'minute', 'second',
|
||||||
|
'microsecond']
|
||||||
|
self.separtor = separator
|
||||||
|
super(ComplexDateTimeField, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
def _leading_zero(self, number):
|
||||||
|
"""
|
||||||
|
Converts the given number to a string.
|
||||||
|
|
||||||
|
If it has only one digit, a leading zero so as it has always at least
|
||||||
|
two digits.
|
||||||
|
"""
|
||||||
|
if int(number) < 10:
|
||||||
|
return "0%s" % number
|
||||||
|
else:
|
||||||
|
return str(number)
|
||||||
|
|
||||||
|
def _convert_from_datetime(self, val):
|
||||||
|
"""
|
||||||
|
Convert a `datetime` object to a string representation (which will be
|
||||||
|
stored in MongoDB). This is the reverse function of
|
||||||
|
`_convert_from_string`.
|
||||||
|
|
||||||
|
>>> a = datetime(2011, 6, 8, 20, 26, 24, 192284)
|
||||||
|
>>> RealDateTimeField()._convert_from_datetime(a)
|
||||||
|
'2011,06,08,20,26,24,192284'
|
||||||
|
"""
|
||||||
|
data = []
|
||||||
|
for name in self.names:
|
||||||
|
data.append(self._leading_zero(getattr(val, name)))
|
||||||
|
return ','.join(data)
|
||||||
|
|
||||||
|
def _convert_from_string(self, data):
|
||||||
|
"""
|
||||||
|
Convert a string representation to a `datetime` object (the object you
|
||||||
|
will manipulate). This is the reverse function of
|
||||||
|
`_convert_from_datetime`.
|
||||||
|
|
||||||
|
>>> a = '2011,06,08,20,26,24,192284'
|
||||||
|
>>> ComplexDateTimeField()._convert_from_string(a)
|
||||||
|
datetime.datetime(2011, 6, 8, 20, 26, 24, 192284)
|
||||||
|
"""
|
||||||
|
data = data.split(',')
|
||||||
|
data = map(int, data)
|
||||||
|
values = {}
|
||||||
|
for i in range(7):
|
||||||
|
values[self.names[i]] = data[i]
|
||||||
|
return datetime.datetime(**values)
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
data = super(ComplexDateTimeField, self).__get__(instance, owner)
|
||||||
|
if data == None:
|
||||||
|
return datetime.datetime.now()
|
||||||
|
return self._convert_from_string(data)
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
value = self._convert_from_datetime(value)
|
||||||
|
return super(ComplexDateTimeField, self).__set__(instance, value)
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
if not isinstance(value, datetime.datetime):
|
||||||
|
raise ValidationError('Only datetime objects may used in a \
|
||||||
|
ComplexDateTimeField')
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
return self._convert_from_string(value)
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
return self._convert_from_datetime(value)
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return self._convert_from_datetime(value)
|
||||||
|
|
||||||
|
|
||||||
class EmbeddedDocumentField(BaseField):
|
class EmbeddedDocumentField(BaseField):
|
||||||
"""An embedded document field. Only valid values are subclasses of
|
"""An embedded document field - with a declared document_type.
|
||||||
:class:`~mongoengine.EmbeddedDocument`.
|
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, document_type, **kwargs):
|
def __init__(self, document_type, **kwargs):
|
||||||
@ -256,6 +401,8 @@ class EmbeddedDocumentField(BaseField):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
|
if not isinstance(value, self.document_type):
|
||||||
|
return value
|
||||||
return self.document_type.to_mongo(value)
|
return self.document_type.to_mongo(value)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
@ -275,7 +422,41 @@ class EmbeddedDocumentField(BaseField):
|
|||||||
return self.to_mongo(value)
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
|
||||||
class ListField(BaseField):
|
class GenericEmbeddedDocumentField(BaseField):
|
||||||
|
"""A generic embedded document field - allows any
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` to be stored.
|
||||||
|
|
||||||
|
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
if isinstance(value, dict):
|
||||||
|
doc_cls = get_document(value['_cls'])
|
||||||
|
value = doc_cls._from_son(value)
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
if not isinstance(value, EmbeddedDocument):
|
||||||
|
raise ValidationError('Invalid embedded document instance '
|
||||||
|
'provided to an GenericEmbeddedDocumentField')
|
||||||
|
|
||||||
|
value.validate()
|
||||||
|
|
||||||
|
def to_mongo(self, document):
|
||||||
|
if document is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
data = document.to_mongo()
|
||||||
|
if not '_cls' in data:
|
||||||
|
data['_cls'] = document._class_name
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class ListField(ComplexBaseField):
|
||||||
"""A list field that wraps a standard field, allowing multiple instances
|
"""A list field that wraps a standard field, allowing multiple instances
|
||||||
of the field to be used as a list in the database.
|
of the field to be used as a list in the database.
|
||||||
"""
|
"""
|
||||||
@ -283,84 +464,26 @@ class ListField(BaseField):
|
|||||||
# ListFields cannot be indexed with _types - MongoDB doesn't support this
|
# ListFields cannot be indexed with _types - MongoDB doesn't support this
|
||||||
_index_with_types = False
|
_index_with_types = False
|
||||||
|
|
||||||
def __init__(self, field, **kwargs):
|
def __init__(self, field=None, **kwargs):
|
||||||
if not isinstance(field, BaseField):
|
|
||||||
raise ValidationError('Argument to ListField constructor must be '
|
|
||||||
'a valid field')
|
|
||||||
self.field = field
|
self.field = field
|
||||||
kwargs.setdefault('default', lambda: [])
|
kwargs.setdefault('default', lambda: [])
|
||||||
super(ListField, self).__init__(**kwargs)
|
super(ListField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
"""Descriptor to automatically dereference references.
|
|
||||||
"""
|
|
||||||
if instance is None:
|
|
||||||
# Document class being used rather than a document object
|
|
||||||
return self
|
|
||||||
|
|
||||||
if isinstance(self.field, ReferenceField):
|
|
||||||
referenced_type = self.field.document_type
|
|
||||||
# Get value from document instance if available
|
|
||||||
value_list = instance._data.get(self.name)
|
|
||||||
if value_list:
|
|
||||||
deref_list = []
|
|
||||||
for value in value_list:
|
|
||||||
# Dereference DBRefs
|
|
||||||
if isinstance(value, (pymongo.dbref.DBRef)):
|
|
||||||
value = _get_db().dereference(value)
|
|
||||||
deref_list.append(referenced_type._from_son(value))
|
|
||||||
else:
|
|
||||||
deref_list.append(value)
|
|
||||||
instance._data[self.name] = deref_list
|
|
||||||
|
|
||||||
if isinstance(self.field, GenericReferenceField):
|
|
||||||
value_list = instance._data.get(self.name)
|
|
||||||
if value_list:
|
|
||||||
deref_list = []
|
|
||||||
for value in value_list:
|
|
||||||
# Dereference DBRefs
|
|
||||||
if isinstance(value, (dict, pymongo.son.SON)):
|
|
||||||
deref_list.append(self.field.dereference(value))
|
|
||||||
else:
|
|
||||||
deref_list.append(value)
|
|
||||||
instance._data[self.name] = deref_list
|
|
||||||
|
|
||||||
return super(ListField, self).__get__(instance, owner)
|
|
||||||
|
|
||||||
def to_python(self, value):
|
|
||||||
return [self.field.to_python(item) for item in value]
|
|
||||||
|
|
||||||
def to_mongo(self, value):
|
|
||||||
return [self.field.to_mongo(item) for item in value]
|
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
"""Make sure that a list of valid fields is being used.
|
"""Make sure that a list of valid fields is being used.
|
||||||
"""
|
"""
|
||||||
if not isinstance(value, (list, tuple)):
|
if not isinstance(value, (list, tuple)):
|
||||||
raise ValidationError('Only lists and tuples may be used in a '
|
raise ValidationError('Only lists and tuples may be used in a '
|
||||||
'list field')
|
'list field')
|
||||||
|
super(ListField, self).validate(value)
|
||||||
try:
|
|
||||||
[self.field.validate(item) for item in value]
|
|
||||||
except Exception, err:
|
|
||||||
raise ValidationError('Invalid ListField item (%s)' % str(item))
|
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
if op in ('set', 'unset'):
|
if self.field:
|
||||||
|
if op in ('set', 'unset') and (not isinstance(value, basestring)
|
||||||
|
and hasattr(value, '__iter__')):
|
||||||
return [self.field.prepare_query_value(op, v) for v in value]
|
return [self.field.prepare_query_value(op, v) for v in value]
|
||||||
return self.field.prepare_query_value(op, value)
|
return self.field.prepare_query_value(op, value)
|
||||||
|
return super(ListField, self).prepare_query_value(op, value)
|
||||||
def lookup_member(self, member_name):
|
|
||||||
return self.field.lookup_member(member_name)
|
|
||||||
|
|
||||||
def _set_owner_document(self, owner_document):
|
|
||||||
self.field.owner_document = owner_document
|
|
||||||
self._owner_document = owner_document
|
|
||||||
|
|
||||||
def _get_owner_document(self, owner_document):
|
|
||||||
self._owner_document = owner_document
|
|
||||||
|
|
||||||
owner_document = property(_get_owner_document, _set_owner_document)
|
|
||||||
|
|
||||||
|
|
||||||
class SortedListField(ListField):
|
class SortedListField(ListField):
|
||||||
@ -379,20 +502,22 @@ class SortedListField(ListField):
|
|||||||
super(SortedListField, self).__init__(field, **kwargs)
|
super(SortedListField, self).__init__(field, **kwargs)
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
|
value = super(SortedListField, self).to_mongo(value)
|
||||||
if self._ordering is not None:
|
if self._ordering is not None:
|
||||||
return sorted([self.field.to_mongo(item) for item in value],
|
return sorted(value, key=itemgetter(self._ordering))
|
||||||
key=itemgetter(self._ordering))
|
return sorted(value)
|
||||||
return sorted([self.field.to_mongo(item) for item in value])
|
|
||||||
|
|
||||||
|
|
||||||
class DictField(BaseField):
|
class DictField(ComplexBaseField):
|
||||||
"""A dictionary field that wraps a standard Python dictionary. This is
|
"""A dictionary field that wraps a standard Python dictionary. This is
|
||||||
similar to an embedded document, but the structure is not defined.
|
similar to an embedded document, but the structure is not defined.
|
||||||
|
|
||||||
.. versionadded:: 0.3
|
.. versionadded:: 0.3
|
||||||
|
.. versionchanged:: 0.5 - Can now handle complex / varying types of data
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, basecls=None, *args, **kwargs):
|
def __init__(self, basecls=None, field=None, *args, **kwargs):
|
||||||
|
self.field = field
|
||||||
self.basecls = basecls or BaseField
|
self.basecls = basecls or BaseField
|
||||||
assert issubclass(self.basecls, BaseField)
|
assert issubclass(self.basecls, BaseField)
|
||||||
kwargs.setdefault('default', lambda: {})
|
kwargs.setdefault('default', lambda: {})
|
||||||
@ -408,21 +533,67 @@ class DictField(BaseField):
|
|||||||
if any(('.' in k or '$' in k) for k in value):
|
if any(('.' in k or '$' in k) for k in value):
|
||||||
raise ValidationError('Invalid dictionary key name - keys may not '
|
raise ValidationError('Invalid dictionary key name - keys may not '
|
||||||
'contain "." or "$" characters')
|
'contain "." or "$" characters')
|
||||||
|
super(DictField, self).validate(value)
|
||||||
|
|
||||||
def lookup_member(self, member_name):
|
def lookup_member(self, member_name):
|
||||||
return self.basecls(db_field=member_name)
|
return DictField(basecls=self.basecls, db_field=member_name)
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
match_operators = ['contains', 'icontains', 'startswith',
|
||||||
|
'istartswith', 'endswith', 'iendswith',
|
||||||
|
'exact', 'iexact']
|
||||||
|
|
||||||
|
if op in match_operators and isinstance(value, basestring):
|
||||||
|
return StringField().prepare_query_value(op, value)
|
||||||
|
|
||||||
|
return super(DictField, self).prepare_query_value(op, value)
|
||||||
|
|
||||||
|
|
||||||
|
class MapField(DictField):
|
||||||
|
"""A field that maps a name to a specified field type. Similar to
|
||||||
|
a DictField, except the 'value' of each item must match the specified
|
||||||
|
field type.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, field=None, *args, **kwargs):
|
||||||
|
if not isinstance(field, BaseField):
|
||||||
|
raise ValidationError('Argument to MapField constructor must be '
|
||||||
|
'a valid field')
|
||||||
|
super(MapField, self).__init__(field=field, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ReferenceField(BaseField):
|
class ReferenceField(BaseField):
|
||||||
"""A reference to a document that will be automatically dereferenced on
|
"""A reference to a document that will be automatically dereferenced on
|
||||||
access (lazily).
|
access (lazily).
|
||||||
|
|
||||||
|
Use the `reverse_delete_rule` to handle what should happen if the document
|
||||||
|
the field is referencing is deleted.
|
||||||
|
|
||||||
|
The options are:
|
||||||
|
|
||||||
|
* DO_NOTHING - don't do anything (default).
|
||||||
|
* NULLIFY - Updates the reference to null.
|
||||||
|
* CASCADE - Deletes the documents associated with the reference.
|
||||||
|
* DENY - Prevent the deletion of the reference object.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, document_type, **kwargs):
|
def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs):
|
||||||
|
"""Initialises the Reference Field.
|
||||||
|
|
||||||
|
:param reverse_delete_rule: Determines what to do when the referring
|
||||||
|
object is deleted
|
||||||
|
"""
|
||||||
if not isinstance(document_type, basestring):
|
if not isinstance(document_type, basestring):
|
||||||
if not issubclass(document_type, (Document, basestring)):
|
if not issubclass(document_type, (Document, basestring)):
|
||||||
raise ValidationError('Argument to ReferenceField constructor '
|
raise ValidationError('Argument to ReferenceField constructor '
|
||||||
'must be a document class or a string')
|
'must be a document class or a string')
|
||||||
self.document_type_obj = document_type
|
self.document_type_obj = document_type
|
||||||
|
self.reverse_delete_rule = reverse_delete_rule
|
||||||
super(ReferenceField, self).__init__(**kwargs)
|
super(ReferenceField, self).__init__(**kwargs)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -465,7 +636,7 @@ class ReferenceField(BaseField):
|
|||||||
id_ = document
|
id_ = document
|
||||||
|
|
||||||
id_ = id_field.to_mongo(id_)
|
id_ = id_field.to_mongo(id_)
|
||||||
collection = self.document_type._meta['collection']
|
collection = self.document_type._get_collection_name()
|
||||||
return pymongo.dbref.DBRef(collection, id_)
|
return pymongo.dbref.DBRef(collection, id_)
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
@ -474,6 +645,11 @@ class ReferenceField(BaseField):
|
|||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
assert isinstance(value, (self.document_type, pymongo.dbref.DBRef))
|
assert isinstance(value, (self.document_type, pymongo.dbref.DBRef))
|
||||||
|
|
||||||
|
if isinstance(value, Document) and value.id is None:
|
||||||
|
raise ValidationError('You can only reference documents once '
|
||||||
|
'they have been saved to the database')
|
||||||
|
|
||||||
|
|
||||||
def lookup_member(self, member_name):
|
def lookup_member(self, member_name):
|
||||||
return self.document_type._fields.get(member_name)
|
return self.document_type._fields.get(member_name)
|
||||||
|
|
||||||
@ -482,6 +658,9 @@ class GenericReferenceField(BaseField):
|
|||||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||||
that will be automatically dereferenced on access (lazily).
|
that will be automatically dereferenced on access (lazily).
|
||||||
|
|
||||||
|
..note :: Any documents used as a generic reference must be registered in the
|
||||||
|
document registry. Importing the model will automatically register it.
|
||||||
|
|
||||||
.. versionadded:: 0.3
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -495,6 +674,15 @@ class GenericReferenceField(BaseField):
|
|||||||
|
|
||||||
return super(GenericReferenceField, self).__get__(instance, owner)
|
return super(GenericReferenceField, self).__get__(instance, owner)
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
if not isinstance(value, (Document, pymongo.dbref.DBRef)):
|
||||||
|
raise ValidationError('GenericReferences can only contain documents')
|
||||||
|
|
||||||
|
# We need the id from the saved object to create the DBRef
|
||||||
|
if isinstance(value, Document) and value.id is None:
|
||||||
|
raise ValidationError('You can only reference documents once '
|
||||||
|
'they have been saved to the database')
|
||||||
|
|
||||||
def dereference(self, value):
|
def dereference(self, value):
|
||||||
doc_cls = get_document(value['_cls'])
|
doc_cls = get_document(value['_cls'])
|
||||||
reference = value['_ref']
|
reference = value['_ref']
|
||||||
@ -504,6 +692,9 @@ class GenericReferenceField(BaseField):
|
|||||||
return doc
|
return doc
|
||||||
|
|
||||||
def to_mongo(self, document):
|
def to_mongo(self, document):
|
||||||
|
if document is None:
|
||||||
|
return None
|
||||||
|
|
||||||
id_field_name = document.__class__._meta['id_field']
|
id_field_name = document.__class__._meta['id_field']
|
||||||
id_field = document.__class__._fields[id_field_name]
|
id_field = document.__class__._fields[id_field_name]
|
||||||
|
|
||||||
@ -517,9 +708,9 @@ class GenericReferenceField(BaseField):
|
|||||||
id_ = document
|
id_ = document
|
||||||
|
|
||||||
id_ = id_field.to_mongo(id_)
|
id_ = id_field.to_mongo(id_)
|
||||||
collection = document._meta['collection']
|
collection = document._get_collection_name()
|
||||||
ref = pymongo.dbref.DBRef(collection, id_)
|
ref = pymongo.dbref.DBRef(collection, id_)
|
||||||
return {'_cls': document.__class__.__name__, '_ref': ref}
|
return {'_cls': document._class_name, '_ref': ref}
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
return self.to_mongo(value)
|
return self.to_mongo(value)
|
||||||
@ -555,12 +746,16 @@ class GridFSProxy(object):
|
|||||||
"""Proxy object to handle writing and reading of files to and from GridFS
|
"""Proxy object to handle writing and reading of files to and from GridFS
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
|
.. versionchanged:: 0.5 - added optional size param to read
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, grid_id=None):
|
def __init__(self, grid_id=None, key=None, instance=None):
|
||||||
self.fs = gridfs.GridFS(_get_db()) # Filesystem instance
|
self.fs = gridfs.GridFS(_get_db()) # Filesystem instance
|
||||||
self.newfile = None # Used for partial writes
|
self.newfile = None # Used for partial writes
|
||||||
self.grid_id = grid_id # Store GridFS id for file
|
self.grid_id = grid_id # Store GridFS id for file
|
||||||
|
self.gridout = None
|
||||||
|
self.key = key
|
||||||
|
self.instance = instance
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
obj = self.get()
|
obj = self.get()
|
||||||
@ -571,11 +766,18 @@ class GridFSProxy(object):
|
|||||||
def __get__(self, instance, value):
|
def __get__(self, instance, value):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
def __nonzero__(self):
|
||||||
|
return bool(self.grid_id)
|
||||||
|
|
||||||
def get(self, id=None):
|
def get(self, id=None):
|
||||||
if id:
|
if id:
|
||||||
self.grid_id = id
|
self.grid_id = id
|
||||||
|
if self.grid_id is None:
|
||||||
|
return None
|
||||||
try:
|
try:
|
||||||
return self.fs.get(id or self.grid_id)
|
if self.gridout is None:
|
||||||
|
self.gridout = self.fs.get(self.grid_id)
|
||||||
|
return self.gridout
|
||||||
except:
|
except:
|
||||||
# File has been deleted
|
# File has been deleted
|
||||||
return None
|
return None
|
||||||
@ -584,11 +786,12 @@ class GridFSProxy(object):
|
|||||||
self.newfile = self.fs.new_file(**kwargs)
|
self.newfile = self.fs.new_file(**kwargs)
|
||||||
self.grid_id = self.newfile._id
|
self.grid_id = self.newfile._id
|
||||||
|
|
||||||
def put(self, file, **kwargs):
|
def put(self, file_obj, **kwargs):
|
||||||
if self.grid_id:
|
if self.grid_id:
|
||||||
raise GridFSError('This document already has a file. Either delete '
|
raise GridFSError('This document already has a file. Either delete '
|
||||||
'it or call replace to overwrite it')
|
'it or call replace to overwrite it')
|
||||||
self.grid_id = self.fs.put(file, **kwargs)
|
self.grid_id = self.fs.put(file_obj, **kwargs)
|
||||||
|
self._mark_as_changed()
|
||||||
|
|
||||||
def write(self, string):
|
def write(self, string):
|
||||||
if self.grid_id:
|
if self.grid_id:
|
||||||
@ -605,9 +808,9 @@ class GridFSProxy(object):
|
|||||||
self.grid_id = self.newfile._id
|
self.grid_id = self.newfile._id
|
||||||
self.newfile.writelines(lines)
|
self.newfile.writelines(lines)
|
||||||
|
|
||||||
def read(self):
|
def read(self, size=-1):
|
||||||
try:
|
try:
|
||||||
return self.get().read()
|
return self.get().read(size)
|
||||||
except:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -615,20 +818,28 @@ class GridFSProxy(object):
|
|||||||
# Delete file from GridFS, FileField still remains
|
# Delete file from GridFS, FileField still remains
|
||||||
self.fs.delete(self.grid_id)
|
self.fs.delete(self.grid_id)
|
||||||
self.grid_id = None
|
self.grid_id = None
|
||||||
|
self.gridout = None
|
||||||
|
self._mark_as_changed()
|
||||||
|
|
||||||
def replace(self, file, **kwargs):
|
def replace(self, file_obj, **kwargs):
|
||||||
self.delete()
|
self.delete()
|
||||||
self.put(file, **kwargs)
|
self.put(file_obj, **kwargs)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
if self.newfile:
|
if self.newfile:
|
||||||
self.newfile.close()
|
self.newfile.close()
|
||||||
|
|
||||||
|
def _mark_as_changed(self):
|
||||||
|
"""Inform the instance that `self.key` has been changed"""
|
||||||
|
if self.instance:
|
||||||
|
self.instance._mark_as_changed(self.key)
|
||||||
|
|
||||||
|
|
||||||
class FileField(BaseField):
|
class FileField(BaseField):
|
||||||
"""A GridFS storage field.
|
"""A GridFS storage field.
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
|
.. versionchanged:: 0.5 added optional size param for read
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
@ -641,11 +852,15 @@ class FileField(BaseField):
|
|||||||
# Check if a file already exists for this model
|
# Check if a file already exists for this model
|
||||||
grid_file = instance._data.get(self.name)
|
grid_file = instance._data.get(self.name)
|
||||||
self.grid_file = grid_file
|
self.grid_file = grid_file
|
||||||
if self.grid_file:
|
if isinstance(self.grid_file, GridFSProxy):
|
||||||
|
if not self.grid_file.key:
|
||||||
|
self.grid_file.key = self.name
|
||||||
|
self.grid_file.instance = instance
|
||||||
return self.grid_file
|
return self.grid_file
|
||||||
return GridFSProxy()
|
return GridFSProxy(key=self.name, instance=instance)
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
|
key = self.name
|
||||||
if isinstance(value, file) or isinstance(value, str):
|
if isinstance(value, file) or isinstance(value, str):
|
||||||
# using "FileField() = file/string" notation
|
# using "FileField() = file/string" notation
|
||||||
grid_file = instance._data.get(self.name)
|
grid_file = instance._data.get(self.name)
|
||||||
@ -659,10 +874,12 @@ class FileField(BaseField):
|
|||||||
grid_file.put(value)
|
grid_file.put(value)
|
||||||
else:
|
else:
|
||||||
# Create a new proxy object as we don't already have one
|
# Create a new proxy object as we don't already have one
|
||||||
instance._data[self.name] = GridFSProxy()
|
instance._data[key] = GridFSProxy(key=key, instance=instance)
|
||||||
instance._data[self.name].put(value)
|
instance._data[key].put(value)
|
||||||
else:
|
else:
|
||||||
instance._data[self.name] = value
|
instance._data[key] = value
|
||||||
|
|
||||||
|
instance._mark_as_changed(key)
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
# Store the GridFS file id in MongoDB
|
# Store the GridFS file id in MongoDB
|
||||||
@ -700,3 +917,61 @@ class GeoPointField(BaseField):
|
|||||||
if (not isinstance(value[0], (float, int)) and
|
if (not isinstance(value[0], (float, int)) and
|
||||||
not isinstance(value[1], (float, int))):
|
not isinstance(value[1], (float, int))):
|
||||||
raise ValidationError('Both values in point must be float or int.')
|
raise ValidationError('Both values in point must be float or int.')
|
||||||
|
|
||||||
|
|
||||||
|
class SequenceField(IntField):
|
||||||
|
"""Provides a sequental counter.
|
||||||
|
|
||||||
|
..note:: Although traditional databases often use increasing sequence
|
||||||
|
numbers for primary keys. In MongoDB, the preferred approach is to
|
||||||
|
use Object IDs instead. The concept is that in a very large
|
||||||
|
cluster of machines, it is easier to create an object ID than have
|
||||||
|
global, uniformly increasing sequence numbers.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
"""
|
||||||
|
def __init__(self, collection_name=None, *args, **kwargs):
|
||||||
|
self.collection_name = collection_name or 'mongoengine.counters'
|
||||||
|
return super(SequenceField, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def generate_new_value(self):
|
||||||
|
"""
|
||||||
|
Generate and Increment the counter
|
||||||
|
"""
|
||||||
|
sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(),
|
||||||
|
self.name)
|
||||||
|
collection = _get_db()[self.collection_name]
|
||||||
|
counter = collection.find_and_modify(query={"_id": sequence_id},
|
||||||
|
update={"$inc": {"next": 1}},
|
||||||
|
new=True,
|
||||||
|
upsert=True)
|
||||||
|
return counter['next']
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
|
||||||
|
if instance is None:
|
||||||
|
return self
|
||||||
|
|
||||||
|
if not instance._data:
|
||||||
|
return
|
||||||
|
|
||||||
|
value = instance._data.get(self.name)
|
||||||
|
|
||||||
|
if not value and instance._initialised:
|
||||||
|
value = self.generate_new_value()
|
||||||
|
instance._data[self.name] = value
|
||||||
|
instance._mark_as_changed(self.name)
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
|
||||||
|
if value is None and instance._initialised:
|
||||||
|
value = self.generate_new_value()
|
||||||
|
|
||||||
|
return super(SequenceField, self).__set__(instance, value)
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
if value is None:
|
||||||
|
value = self.generate_new_value()
|
||||||
|
return value
|
||||||
|
File diff suppressed because it is too large
Load Diff
44
mongoengine/signals.py
Normal file
44
mongoengine/signals.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
__all__ = ['pre_init', 'post_init', 'pre_save', 'post_save',
|
||||||
|
'pre_delete', 'post_delete']
|
||||||
|
|
||||||
|
signals_available = False
|
||||||
|
try:
|
||||||
|
from blinker import Namespace
|
||||||
|
signals_available = True
|
||||||
|
except ImportError:
|
||||||
|
class Namespace(object):
|
||||||
|
def signal(self, name, doc=None):
|
||||||
|
return _FakeSignal(name, doc)
|
||||||
|
|
||||||
|
class _FakeSignal(object):
|
||||||
|
"""If blinker is unavailable, create a fake class with the same
|
||||||
|
interface that allows sending of signals but will fail with an
|
||||||
|
error on anything else. Instead of doing anything on send, it
|
||||||
|
will just ignore the arguments and do nothing instead.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, doc=None):
|
||||||
|
self.name = name
|
||||||
|
self.__doc__ = doc
|
||||||
|
|
||||||
|
def _fail(self, *args, **kwargs):
|
||||||
|
raise RuntimeError('signalling support is unavailable '
|
||||||
|
'because the blinker library is '
|
||||||
|
'not installed.')
|
||||||
|
send = lambda *a, **kw: None
|
||||||
|
connect = disconnect = has_receivers_for = receivers_for = \
|
||||||
|
temporarily_connected_to = _fail
|
||||||
|
del _fail
|
||||||
|
|
||||||
|
# the namespace for code signals. If you are not mongoengine code, do
|
||||||
|
# not put signals in here. Create your own namespace instead.
|
||||||
|
_signals = Namespace()
|
||||||
|
|
||||||
|
pre_init = _signals.signal('pre_init')
|
||||||
|
post_init = _signals.signal('post_init')
|
||||||
|
pre_save = _signals.signal('pre_save')
|
||||||
|
post_save = _signals.signal('post_save')
|
||||||
|
pre_delete = _signals.signal('pre_delete')
|
||||||
|
post_delete = _signals.signal('post_delete')
|
59
mongoengine/tests.py
Normal file
59
mongoengine/tests.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
from mongoengine.connection import _get_db
|
||||||
|
|
||||||
|
|
||||||
|
class query_counter(object):
|
||||||
|
""" Query_counter contextmanager to get the number of queries. """
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
""" Construct the query_counter. """
|
||||||
|
self.counter = 0
|
||||||
|
self.db = _get_db()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
""" On every with block we need to drop the profile collection. """
|
||||||
|
self.db.set_profiling_level(0)
|
||||||
|
self.db.system.profile.drop()
|
||||||
|
self.db.set_profiling_level(2)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
""" Reset the profiling level. """
|
||||||
|
self.db.set_profiling_level(0)
|
||||||
|
|
||||||
|
def __eq__(self, value):
|
||||||
|
""" == Compare querycounter. """
|
||||||
|
return value == self._get_count()
|
||||||
|
|
||||||
|
def __ne__(self, value):
|
||||||
|
""" != Compare querycounter. """
|
||||||
|
return not self.__eq__(value)
|
||||||
|
|
||||||
|
def __lt__(self, value):
|
||||||
|
""" < Compare querycounter. """
|
||||||
|
return self._get_count() < value
|
||||||
|
|
||||||
|
def __le__(self, value):
|
||||||
|
""" <= Compare querycounter. """
|
||||||
|
return self._get_count() <= value
|
||||||
|
|
||||||
|
def __gt__(self, value):
|
||||||
|
""" > Compare querycounter. """
|
||||||
|
return self._get_count() > value
|
||||||
|
|
||||||
|
def __ge__(self, value):
|
||||||
|
""" >= Compare querycounter. """
|
||||||
|
return self._get_count() >= value
|
||||||
|
|
||||||
|
def __int__(self):
|
||||||
|
""" int representation. """
|
||||||
|
return self._get_count()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
""" repr query_counter as the number of queries. """
|
||||||
|
return u"%s" % self._get_count()
|
||||||
|
|
||||||
|
def _get_count(self):
|
||||||
|
""" Get the number of queries. """
|
||||||
|
count = self.db.system.profile.find().count() - self.counter
|
||||||
|
self.counter += 1
|
||||||
|
return count
|
1
setup.py
1
setup.py
@ -47,4 +47,5 @@ setup(name='mongoengine',
|
|||||||
classifiers=CLASSIFIERS,
|
classifiers=CLASSIFIERS,
|
||||||
install_requires=['pymongo'],
|
install_requires=['pymongo'],
|
||||||
test_suite='tests',
|
test_suite='tests',
|
||||||
|
tests_require=['blinker', 'django==1.3']
|
||||||
)
|
)
|
||||||
|
658
tests/dereference.py
Normal file
658
tests/dereference.py
Normal file
@ -0,0 +1,658 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import _get_db
|
||||||
|
from mongoengine.tests import query_counter
|
||||||
|
|
||||||
|
|
||||||
|
class FieldTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = _get_db()
|
||||||
|
|
||||||
|
def test_list_item_dereference(self):
|
||||||
|
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||||
|
"""
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = ListField(ReferenceField(User))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
user = User(name='user %s' % i)
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
group = Group(members=User.objects)
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
group = Group(members=User.objects)
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
# Document select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first().select_related()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
# Queryset select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
group_objs = Group.objects.select_related()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
for group_obj in group_objs:
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
def test_recursive_reference(self):
|
||||||
|
"""Ensure that ReferenceFields can reference their own documents.
|
||||||
|
"""
|
||||||
|
class Employee(Document):
|
||||||
|
name = StringField()
|
||||||
|
boss = ReferenceField('self')
|
||||||
|
friends = ListField(ReferenceField('self'))
|
||||||
|
|
||||||
|
Employee.drop_collection()
|
||||||
|
|
||||||
|
bill = Employee(name='Bill Lumbergh')
|
||||||
|
bill.save()
|
||||||
|
|
||||||
|
michael = Employee(name='Michael Bolton')
|
||||||
|
michael.save()
|
||||||
|
|
||||||
|
samir = Employee(name='Samir Nagheenanajar')
|
||||||
|
samir.save()
|
||||||
|
|
||||||
|
friends = [michael, samir]
|
||||||
|
peter = Employee(name='Peter Gibbons', boss=bill, friends=friends)
|
||||||
|
peter.save()
|
||||||
|
|
||||||
|
Employee(name='Funky Gibbon', boss=bill, friends=friends).save()
|
||||||
|
Employee(name='Funky Gibbon', boss=bill, friends=friends).save()
|
||||||
|
Employee(name='Funky Gibbon', boss=bill, friends=friends).save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
peter = Employee.objects.with_id(peter.id)
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
peter.boss
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
peter.friends
|
||||||
|
self.assertEqual(q, 3)
|
||||||
|
|
||||||
|
# Document select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
peter = Employee.objects.with_id(peter.id).select_related()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
self.assertEquals(peter.boss, bill)
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
self.assertEquals(peter.friends, friends)
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
# Queryset select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
employees = Employee.objects(boss=bill).select_related()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
for employee in employees:
|
||||||
|
self.assertEquals(employee.boss, bill)
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
self.assertEquals(employee.friends, friends)
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
def test_generic_reference(self):
|
||||||
|
|
||||||
|
class UserA(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class UserB(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class UserC(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = ListField(GenericReferenceField())
|
||||||
|
|
||||||
|
UserA.drop_collection()
|
||||||
|
UserB.drop_collection()
|
||||||
|
UserC.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
members = []
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
a = UserA(name='User A %s' % i)
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
b = UserB(name='User B %s' % i)
|
||||||
|
b.save()
|
||||||
|
|
||||||
|
c = UserC(name='User C %s' % i)
|
||||||
|
c.save()
|
||||||
|
|
||||||
|
members += [a, b, c]
|
||||||
|
|
||||||
|
group = Group(members=members)
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
group = Group(members=members)
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for m in group_obj.members:
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
# Document select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first().select_related()
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for m in group_obj.members:
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
# Queryset select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_objs = Group.objects.select_related()
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for group_obj in group_objs:
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for m in group_obj.members:
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
UserA.drop_collection()
|
||||||
|
UserB.drop_collection()
|
||||||
|
UserC.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
def test_list_field_complex(self):
|
||||||
|
|
||||||
|
class UserA(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class UserB(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class UserC(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = ListField()
|
||||||
|
|
||||||
|
UserA.drop_collection()
|
||||||
|
UserB.drop_collection()
|
||||||
|
UserC.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
members = []
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
a = UserA(name='User A %s' % i)
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
b = UserB(name='User B %s' % i)
|
||||||
|
b.save()
|
||||||
|
|
||||||
|
c = UserC(name='User C %s' % i)
|
||||||
|
c.save()
|
||||||
|
|
||||||
|
members += [a, b, c]
|
||||||
|
|
||||||
|
group = Group(members=members)
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
group = Group(members=members)
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for m in group_obj.members:
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
# Document select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first().select_related()
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for m in group_obj.members:
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
# Queryset select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_objs = Group.objects.select_related()
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for group_obj in group_objs:
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for m in group_obj.members:
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
UserA.drop_collection()
|
||||||
|
UserB.drop_collection()
|
||||||
|
UserC.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
def test_map_field_reference(self):
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = MapField(ReferenceField(User))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
members = []
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
user = User(name='user %s' % i)
|
||||||
|
user.save()
|
||||||
|
members.append(user)
|
||||||
|
|
||||||
|
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue(isinstance(m, User))
|
||||||
|
|
||||||
|
# Document select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first().select_related()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue(isinstance(m, User))
|
||||||
|
|
||||||
|
# Queryset select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_objs = Group.objects.select_related()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
for group_obj in group_objs:
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue(isinstance(m, User))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
def test_dict_field(self):
|
||||||
|
|
||||||
|
class UserA(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class UserB(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class UserC(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = DictField()
|
||||||
|
|
||||||
|
UserA.drop_collection()
|
||||||
|
UserB.drop_collection()
|
||||||
|
UserC.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
members = []
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
a = UserA(name='User A %s' % i)
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
b = UserB(name='User B %s' % i)
|
||||||
|
b.save()
|
||||||
|
|
||||||
|
c = UserC(name='User C %s' % i)
|
||||||
|
c.save()
|
||||||
|
|
||||||
|
members += [a, b, c]
|
||||||
|
|
||||||
|
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||||
|
group.save()
|
||||||
|
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
# Document select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first().select_related()
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
# Queryset select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_objs = Group.objects.select_related()
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for group_obj in group_objs:
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
Group.objects.delete()
|
||||||
|
Group().save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
self.assertEqual(group_obj.members, {})
|
||||||
|
|
||||||
|
UserA.drop_collection()
|
||||||
|
UserB.drop_collection()
|
||||||
|
UserC.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
def test_dict_field_no_field_inheritance(self):
|
||||||
|
|
||||||
|
class UserA(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = DictField()
|
||||||
|
|
||||||
|
UserA.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
members = []
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
a = UserA(name='User A %s' % i)
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
members += [a]
|
||||||
|
|
||||||
|
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue(isinstance(m, UserA))
|
||||||
|
|
||||||
|
# Document select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first().select_related()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue(isinstance(m, UserA))
|
||||||
|
|
||||||
|
# Queryset select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_objs = Group.objects.select_related()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
for group_obj in group_objs:
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue(isinstance(m, UserA))
|
||||||
|
|
||||||
|
UserA.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
def test_generic_reference_map_field(self):
|
||||||
|
|
||||||
|
class UserA(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class UserB(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class UserC(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = MapField(GenericReferenceField())
|
||||||
|
|
||||||
|
UserA.drop_collection()
|
||||||
|
UserB.drop_collection()
|
||||||
|
UserC.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
members = []
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
a = UserA(name='User A %s' % i)
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
b = UserB(name='User B %s' % i)
|
||||||
|
b.save()
|
||||||
|
|
||||||
|
c = UserC(name='User C %s' % i)
|
||||||
|
c.save()
|
||||||
|
|
||||||
|
members += [a, b, c]
|
||||||
|
|
||||||
|
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||||
|
group.save()
|
||||||
|
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
# Document select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first().select_related()
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
# Queryset select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_objs = Group.objects.select_related()
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for group_obj in group_objs:
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 4)
|
||||||
|
|
||||||
|
for k, m in group_obj.members.iteritems():
|
||||||
|
self.assertTrue('User' in m.__class__.__name__)
|
||||||
|
|
||||||
|
Group.objects.delete()
|
||||||
|
Group().save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
UserA.drop_collection()
|
||||||
|
UserB.drop_collection()
|
||||||
|
UserC.drop_collection()
|
||||||
|
Group.drop_collection()
|
69
tests/django_tests.py
Normal file
69
tests/django_tests.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.django.shortcuts import get_document_or_404
|
||||||
|
|
||||||
|
from django.http import Http404
|
||||||
|
from django.template import Context, Template
|
||||||
|
from django.conf import settings
|
||||||
|
settings.configure()
|
||||||
|
|
||||||
|
class QuerySetTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def test_order_by_in_django_template(self):
|
||||||
|
"""Ensure that QuerySets are properly ordered in Django template.
|
||||||
|
"""
|
||||||
|
self.Person.drop_collection()
|
||||||
|
|
||||||
|
self.Person(name="A", age=20).save()
|
||||||
|
self.Person(name="D", age=10).save()
|
||||||
|
self.Person(name="B", age=40).save()
|
||||||
|
self.Person(name="C", age=30).save()
|
||||||
|
|
||||||
|
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
||||||
|
|
||||||
|
d = {"ol": self.Person.objects.order_by('-name')}
|
||||||
|
self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:')
|
||||||
|
d = {"ol": self.Person.objects.order_by('+name')}
|
||||||
|
self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:')
|
||||||
|
d = {"ol": self.Person.objects.order_by('-age')}
|
||||||
|
self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:')
|
||||||
|
d = {"ol": self.Person.objects.order_by('+age')}
|
||||||
|
self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:')
|
||||||
|
|
||||||
|
self.Person.drop_collection()
|
||||||
|
|
||||||
|
def test_q_object_filter_in_template(self):
|
||||||
|
|
||||||
|
self.Person.drop_collection()
|
||||||
|
|
||||||
|
self.Person(name="A", age=20).save()
|
||||||
|
self.Person(name="D", age=10).save()
|
||||||
|
self.Person(name="B", age=40).save()
|
||||||
|
self.Person(name="C", age=30).save()
|
||||||
|
|
||||||
|
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
||||||
|
|
||||||
|
d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))}
|
||||||
|
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
||||||
|
|
||||||
|
# Check double rendering doesn't throw an error
|
||||||
|
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
||||||
|
|
||||||
|
def test_get_document_or_404(self):
|
||||||
|
p = self.Person(name="G404")
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
|
||||||
|
self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
|
||||||
|
|
1706
tests/document.py
1706
tests/document.py
File diff suppressed because it is too large
Load Diff
750
tests/fields.py
750
tests/fields.py
@ -7,6 +7,7 @@ import gridfs
|
|||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.connection import _get_db
|
from mongoengine.connection import _get_db
|
||||||
|
from mongoengine.base import _document_registry, NotRegistered
|
||||||
|
|
||||||
|
|
||||||
class FieldTest(unittest.TestCase):
|
class FieldTest(unittest.TestCase):
|
||||||
@ -20,12 +21,15 @@ class FieldTest(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
age = IntField(default=30)
|
age = IntField(default=30, help_text="Your real age")
|
||||||
userid = StringField(default=lambda: 'test')
|
userid = StringField(default=lambda: 'test', verbose_name="User Identity")
|
||||||
|
|
||||||
person = Person(name='Test Person')
|
person = Person(name='Test Person')
|
||||||
self.assertEqual(person._data['age'], 30)
|
self.assertEqual(person._data['age'], 30)
|
||||||
self.assertEqual(person._data['userid'], 'test')
|
self.assertEqual(person._data['userid'], 'test')
|
||||||
|
self.assertEqual(person._fields['name'].help_text, None)
|
||||||
|
self.assertEqual(person._fields['age'].help_text, "Your real age")
|
||||||
|
self.assertEqual(person._fields['userid'].verbose_name, "User Identity")
|
||||||
|
|
||||||
def test_required_values(self):
|
def test_required_values(self):
|
||||||
"""Ensure that required field constraints are enforced.
|
"""Ensure that required field constraints are enforced.
|
||||||
@ -181,11 +185,184 @@ class FieldTest(unittest.TestCase):
|
|||||||
log.time = datetime.datetime.now()
|
log.time = datetime.datetime.now()
|
||||||
log.validate()
|
log.validate()
|
||||||
|
|
||||||
|
log.time = datetime.date.today()
|
||||||
|
log.validate()
|
||||||
|
|
||||||
log.time = -1
|
log.time = -1
|
||||||
self.assertRaises(ValidationError, log.validate)
|
self.assertRaises(ValidationError, log.validate)
|
||||||
log.time = '1pm'
|
log.time = '1pm'
|
||||||
self.assertRaises(ValidationError, log.validate)
|
self.assertRaises(ValidationError, log.validate)
|
||||||
|
|
||||||
|
def test_datetime(self):
|
||||||
|
"""Tests showing pymongo datetime fields handling of microseconds.
|
||||||
|
Microseconds are rounded to the nearest millisecond and pre UTC
|
||||||
|
handling is wonky.
|
||||||
|
|
||||||
|
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
||||||
|
"""
|
||||||
|
class LogEntry(Document):
|
||||||
|
date = DateTimeField()
|
||||||
|
|
||||||
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
|
# Test can save dates
|
||||||
|
log = LogEntry()
|
||||||
|
log.date = datetime.date.today()
|
||||||
|
log.save()
|
||||||
|
log.reload()
|
||||||
|
self.assertEquals(log.date.date(), datetime.date.today())
|
||||||
|
|
||||||
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
|
# Post UTC - microseconds are rounded (down) nearest millisecond and dropped
|
||||||
|
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999)
|
||||||
|
d2 = datetime.datetime(1970, 01, 01, 00, 00, 01)
|
||||||
|
log = LogEntry()
|
||||||
|
log.date = d1
|
||||||
|
log.save()
|
||||||
|
log.reload()
|
||||||
|
self.assertNotEquals(log.date, d1)
|
||||||
|
self.assertEquals(log.date, d2)
|
||||||
|
|
||||||
|
# Post UTC - microseconds are rounded (down) nearest millisecond
|
||||||
|
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999)
|
||||||
|
d2 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9000)
|
||||||
|
log.date = d1
|
||||||
|
log.save()
|
||||||
|
log.reload()
|
||||||
|
self.assertNotEquals(log.date, d1)
|
||||||
|
self.assertEquals(log.date, d2)
|
||||||
|
|
||||||
|
# Pre UTC dates microseconds below 1000 are dropped
|
||||||
|
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||||
|
d2 = datetime.datetime(1969, 12, 31, 23, 59, 59)
|
||||||
|
log.date = d1
|
||||||
|
log.save()
|
||||||
|
log.reload()
|
||||||
|
self.assertNotEquals(log.date, d1)
|
||||||
|
self.assertEquals(log.date, d2)
|
||||||
|
|
||||||
|
# Pre UTC microseconds above 1000 is wonky.
|
||||||
|
# log.date has an invalid microsecond value so I can't construct
|
||||||
|
# a date to compare.
|
||||||
|
#
|
||||||
|
# However, the timedelta is predicable with pre UTC timestamps
|
||||||
|
# It always adds 16 seconds and [777216-776217] microseconds
|
||||||
|
for i in xrange(1001, 3113, 33):
|
||||||
|
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i)
|
||||||
|
log.date = d1
|
||||||
|
log.save()
|
||||||
|
log.reload()
|
||||||
|
self.assertNotEquals(log.date, d1)
|
||||||
|
|
||||||
|
delta = log.date - d1
|
||||||
|
self.assertEquals(delta.seconds, 16)
|
||||||
|
microseconds = 777216 - (i % 1000)
|
||||||
|
self.assertEquals(delta.microseconds, microseconds)
|
||||||
|
|
||||||
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
|
def test_complexdatetime_storage(self):
|
||||||
|
"""Tests for complex datetime fields - which can handle microseconds
|
||||||
|
without rounding.
|
||||||
|
"""
|
||||||
|
class LogEntry(Document):
|
||||||
|
date = ComplexDateTimeField()
|
||||||
|
|
||||||
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
|
# Post UTC - microseconds are rounded (down) nearest millisecond and dropped - with default datetimefields
|
||||||
|
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999)
|
||||||
|
log = LogEntry()
|
||||||
|
log.date = d1
|
||||||
|
log.save()
|
||||||
|
log.reload()
|
||||||
|
self.assertEquals(log.date, d1)
|
||||||
|
|
||||||
|
# Post UTC - microseconds are rounded (down) nearest millisecond - with default datetimefields
|
||||||
|
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999)
|
||||||
|
log.date = d1
|
||||||
|
log.save()
|
||||||
|
log.reload()
|
||||||
|
self.assertEquals(log.date, d1)
|
||||||
|
|
||||||
|
# Pre UTC dates microseconds below 1000 are dropped - with default datetimefields
|
||||||
|
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||||
|
log.date = d1
|
||||||
|
log.save()
|
||||||
|
log.reload()
|
||||||
|
self.assertEquals(log.date, d1)
|
||||||
|
|
||||||
|
# Pre UTC microseconds above 1000 is wonky - with default datetimefields
|
||||||
|
# log.date has an invalid microsecond value so I can't construct
|
||||||
|
# a date to compare.
|
||||||
|
for i in xrange(1001, 3113, 33):
|
||||||
|
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i)
|
||||||
|
log.date = d1
|
||||||
|
log.save()
|
||||||
|
log.reload()
|
||||||
|
self.assertEquals(log.date, d1)
|
||||||
|
log1 = LogEntry.objects.get(date=d1)
|
||||||
|
self.assertEqual(log, log1)
|
||||||
|
|
||||||
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
|
def test_complexdatetime_usage(self):
|
||||||
|
"""Tests for complex datetime fields - which can handle microseconds
|
||||||
|
without rounding.
|
||||||
|
"""
|
||||||
|
class LogEntry(Document):
|
||||||
|
date = ComplexDateTimeField()
|
||||||
|
|
||||||
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
|
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999)
|
||||||
|
log = LogEntry()
|
||||||
|
log.date = d1
|
||||||
|
log.save()
|
||||||
|
|
||||||
|
log1 = LogEntry.objects.get(date=d1)
|
||||||
|
self.assertEquals(log, log1)
|
||||||
|
|
||||||
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
|
# create 60 log entries
|
||||||
|
for i in xrange(1950, 2010):
|
||||||
|
d = datetime.datetime(i, 01, 01, 00, 00, 01, 999)
|
||||||
|
LogEntry(date=d).save()
|
||||||
|
|
||||||
|
self.assertEqual(LogEntry.objects.count(), 60)
|
||||||
|
|
||||||
|
# Test ordering
|
||||||
|
logs = LogEntry.objects.order_by("date")
|
||||||
|
count = logs.count()
|
||||||
|
i = 0
|
||||||
|
while i == count-1:
|
||||||
|
self.assertTrue(logs[i].date <= logs[i+1].date)
|
||||||
|
i +=1
|
||||||
|
|
||||||
|
logs = LogEntry.objects.order_by("-date")
|
||||||
|
count = logs.count()
|
||||||
|
i = 0
|
||||||
|
while i == count-1:
|
||||||
|
self.assertTrue(logs[i].date >= logs[i+1].date)
|
||||||
|
i +=1
|
||||||
|
|
||||||
|
# Test searching
|
||||||
|
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980,1,1))
|
||||||
|
self.assertEqual(logs.count(), 30)
|
||||||
|
|
||||||
|
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980,1,1))
|
||||||
|
self.assertEqual(logs.count(), 30)
|
||||||
|
|
||||||
|
logs = LogEntry.objects.filter(
|
||||||
|
date__lte=datetime.datetime(2011,1,1),
|
||||||
|
date__gte=datetime.datetime(2000,1,1),
|
||||||
|
)
|
||||||
|
self.assertEqual(logs.count(), 10)
|
||||||
|
|
||||||
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
def test_list_validation(self):
|
def test_list_validation(self):
|
||||||
"""Ensure that a list field only accepts lists with valid elements.
|
"""Ensure that a list field only accepts lists with valid elements.
|
||||||
"""
|
"""
|
||||||
@ -200,6 +377,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
tags = ListField(StringField())
|
tags = ListField(StringField())
|
||||||
authors = ListField(ReferenceField(User))
|
authors = ListField(ReferenceField(User))
|
||||||
|
generic = ListField(GenericReferenceField())
|
||||||
|
|
||||||
post = BlogPost(content='Went for a walk today...')
|
post = BlogPost(content='Went for a walk today...')
|
||||||
post.validate()
|
post.validate()
|
||||||
@ -227,8 +405,28 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.assertRaises(ValidationError, post.validate)
|
self.assertRaises(ValidationError, post.validate)
|
||||||
|
|
||||||
post.authors = [User()]
|
post.authors = [User()]
|
||||||
|
self.assertRaises(ValidationError, post.validate)
|
||||||
|
|
||||||
|
user = User()
|
||||||
|
user.save()
|
||||||
|
post.authors = [user]
|
||||||
post.validate()
|
post.validate()
|
||||||
|
|
||||||
|
post.generic = [1, 2]
|
||||||
|
self.assertRaises(ValidationError, post.validate)
|
||||||
|
|
||||||
|
post.generic = [User(), Comment()]
|
||||||
|
self.assertRaises(ValidationError, post.validate)
|
||||||
|
|
||||||
|
post.generic = [Comment()]
|
||||||
|
self.assertRaises(ValidationError, post.validate)
|
||||||
|
|
||||||
|
post.generic = [user]
|
||||||
|
post.validate()
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
def test_sorted_list_sorting(self):
|
def test_sorted_list_sorting(self):
|
||||||
"""Ensure that a sorted list field properly sorts values.
|
"""Ensure that a sorted list field properly sorts values.
|
||||||
"""
|
"""
|
||||||
@ -261,12 +459,116 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
def test_dict_validation(self):
|
def test_list_field(self):
|
||||||
|
"""Ensure that list types work as expected.
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
info = ListField()
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post = BlogPost()
|
||||||
|
post.info = 'my post'
|
||||||
|
self.assertRaises(ValidationError, post.validate)
|
||||||
|
|
||||||
|
post.info = {'title': 'test'}
|
||||||
|
self.assertRaises(ValidationError, post.validate)
|
||||||
|
|
||||||
|
post.info = ['test']
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
post = BlogPost()
|
||||||
|
post.info = [{'test': 'test'}]
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
post = BlogPost()
|
||||||
|
post.info = [{'test': 3}]
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
|
||||||
|
self.assertEquals(BlogPost.objects.count(), 3)
|
||||||
|
self.assertEquals(BlogPost.objects.filter(info__exact='test').count(), 1)
|
||||||
|
self.assertEquals(BlogPost.objects.filter(info__0__test='test').count(), 1)
|
||||||
|
|
||||||
|
# Confirm handles non strings or non existing keys
|
||||||
|
self.assertEquals(BlogPost.objects.filter(info__0__test__exact='5').count(), 0)
|
||||||
|
self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0)
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_list_field_strict(self):
|
||||||
|
"""Ensure that list field handles validation if provided a strict field type."""
|
||||||
|
|
||||||
|
class Simple(Document):
|
||||||
|
mapping = ListField(field=IntField())
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
|
||||||
|
e = Simple()
|
||||||
|
e.mapping = [1]
|
||||||
|
e.save()
|
||||||
|
|
||||||
|
def create_invalid_mapping():
|
||||||
|
e.mapping = ["abc"]
|
||||||
|
e.save()
|
||||||
|
|
||||||
|
self.assertRaises(ValidationError, create_invalid_mapping)
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
|
||||||
|
def test_list_field_complex(self):
|
||||||
|
"""Ensure that the list fields can handle the complex types."""
|
||||||
|
|
||||||
|
class SettingBase(EmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class StringSetting(SettingBase):
|
||||||
|
value = StringField()
|
||||||
|
|
||||||
|
class IntegerSetting(SettingBase):
|
||||||
|
value = IntField()
|
||||||
|
|
||||||
|
class Simple(Document):
|
||||||
|
mapping = ListField()
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
e = Simple()
|
||||||
|
e.mapping.append(StringSetting(value='foo'))
|
||||||
|
e.mapping.append(IntegerSetting(value=42))
|
||||||
|
e.mapping.append({'number': 1, 'string': 'Hi!', 'float': 1.001,
|
||||||
|
'complex': IntegerSetting(value=42), 'list':
|
||||||
|
[IntegerSetting(value=42), StringSetting(value='foo')]})
|
||||||
|
e.save()
|
||||||
|
|
||||||
|
e2 = Simple.objects.get(id=e.id)
|
||||||
|
self.assertTrue(isinstance(e2.mapping[0], StringSetting))
|
||||||
|
self.assertTrue(isinstance(e2.mapping[1], IntegerSetting))
|
||||||
|
|
||||||
|
# Test querying
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__1__value=42).count(), 1)
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__2__number=1).count(), 1)
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__2__complex__value=42).count(), 1)
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__2__list__0__value=42).count(), 1)
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1)
|
||||||
|
|
||||||
|
# Confirm can update
|
||||||
|
Simple.objects().update(set__mapping__1=IntegerSetting(value=10))
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__1__value=10).count(), 1)
|
||||||
|
|
||||||
|
Simple.objects().update(
|
||||||
|
set__mapping__2__list__1=StringSetting(value='Boo'))
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0)
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1)
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
|
||||||
|
def test_dict_field(self):
|
||||||
"""Ensure that dict types work as expected.
|
"""Ensure that dict types work as expected.
|
||||||
"""
|
"""
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
info = DictField()
|
info = DictField()
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
post = BlogPost()
|
post = BlogPost()
|
||||||
post.info = 'my post'
|
post.info = 'my post'
|
||||||
self.assertRaises(ValidationError, post.validate)
|
self.assertRaises(ValidationError, post.validate)
|
||||||
@ -281,7 +583,149 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.assertRaises(ValidationError, post.validate)
|
self.assertRaises(ValidationError, post.validate)
|
||||||
|
|
||||||
post.info = {'title': 'test'}
|
post.info = {'title': 'test'}
|
||||||
post.validate()
|
post.save()
|
||||||
|
|
||||||
|
post = BlogPost()
|
||||||
|
post.info = {'details': {'test': 'test'}}
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
post = BlogPost()
|
||||||
|
post.info = {'details': {'test': 3}}
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
self.assertEquals(BlogPost.objects.count(), 3)
|
||||||
|
self.assertEquals(BlogPost.objects.filter(info__title__exact='test').count(), 1)
|
||||||
|
self.assertEquals(BlogPost.objects.filter(info__details__test__exact='test').count(), 1)
|
||||||
|
|
||||||
|
# Confirm handles non strings or non existing keys
|
||||||
|
self.assertEquals(BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
|
||||||
|
self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_dictfield_strict(self):
|
||||||
|
"""Ensure that dict field handles validation if provided a strict field type."""
|
||||||
|
|
||||||
|
class Simple(Document):
|
||||||
|
mapping = DictField(field=IntField())
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
|
||||||
|
e = Simple()
|
||||||
|
e.mapping['someint'] = 1
|
||||||
|
e.save()
|
||||||
|
|
||||||
|
def create_invalid_mapping():
|
||||||
|
e.mapping['somestring'] = "abc"
|
||||||
|
e.save()
|
||||||
|
|
||||||
|
self.assertRaises(ValidationError, create_invalid_mapping)
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
|
||||||
|
def test_dictfield_complex(self):
|
||||||
|
"""Ensure that the dict field can handle the complex types."""
|
||||||
|
|
||||||
|
class SettingBase(EmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class StringSetting(SettingBase):
|
||||||
|
value = StringField()
|
||||||
|
|
||||||
|
class IntegerSetting(SettingBase):
|
||||||
|
value = IntField()
|
||||||
|
|
||||||
|
class Simple(Document):
|
||||||
|
mapping = DictField()
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
e = Simple()
|
||||||
|
e.mapping['somestring'] = StringSetting(value='foo')
|
||||||
|
e.mapping['someint'] = IntegerSetting(value=42)
|
||||||
|
e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!', 'float': 1.001,
|
||||||
|
'complex': IntegerSetting(value=42), 'list':
|
||||||
|
[IntegerSetting(value=42), StringSetting(value='foo')]}
|
||||||
|
e.save()
|
||||||
|
|
||||||
|
e2 = Simple.objects.get(id=e.id)
|
||||||
|
self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting))
|
||||||
|
self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting))
|
||||||
|
|
||||||
|
# Test querying
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__someint__value=42).count(), 1)
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__nested_dict__number=1).count(), 1)
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1)
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1)
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1)
|
||||||
|
|
||||||
|
# Confirm can update
|
||||||
|
Simple.objects().update(
|
||||||
|
set__mapping={"someint": IntegerSetting(value=10)})
|
||||||
|
Simple.objects().update(
|
||||||
|
set__mapping__nested_dict__list__1=StringSetting(value='Boo'))
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0)
|
||||||
|
self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1)
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
|
||||||
|
def test_mapfield(self):
|
||||||
|
"""Ensure that the MapField handles the declared type."""
|
||||||
|
|
||||||
|
class Simple(Document):
|
||||||
|
mapping = MapField(IntField())
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
|
||||||
|
e = Simple()
|
||||||
|
e.mapping['someint'] = 1
|
||||||
|
e.save()
|
||||||
|
|
||||||
|
def create_invalid_mapping():
|
||||||
|
e.mapping['somestring'] = "abc"
|
||||||
|
e.save()
|
||||||
|
|
||||||
|
self.assertRaises(ValidationError, create_invalid_mapping)
|
||||||
|
|
||||||
|
def create_invalid_class():
|
||||||
|
class NoDeclaredType(Document):
|
||||||
|
mapping = MapField()
|
||||||
|
|
||||||
|
self.assertRaises(ValidationError, create_invalid_class)
|
||||||
|
|
||||||
|
Simple.drop_collection()
|
||||||
|
|
||||||
|
def test_complex_mapfield(self):
|
||||||
|
"""Ensure that the MapField can handle complex declared types."""
|
||||||
|
|
||||||
|
class SettingBase(EmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class StringSetting(SettingBase):
|
||||||
|
value = StringField()
|
||||||
|
|
||||||
|
class IntegerSetting(SettingBase):
|
||||||
|
value = IntField()
|
||||||
|
|
||||||
|
class Extensible(Document):
|
||||||
|
mapping = MapField(EmbeddedDocumentField(SettingBase))
|
||||||
|
|
||||||
|
Extensible.drop_collection()
|
||||||
|
|
||||||
|
e = Extensible()
|
||||||
|
e.mapping['somestring'] = StringSetting(value='foo')
|
||||||
|
e.mapping['someint'] = IntegerSetting(value=42)
|
||||||
|
e.save()
|
||||||
|
|
||||||
|
e2 = Extensible.objects.get(id=e.id)
|
||||||
|
self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting))
|
||||||
|
self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting))
|
||||||
|
|
||||||
|
def create_invalid_mapping():
|
||||||
|
e.mapping['someint'] = 123
|
||||||
|
e.save()
|
||||||
|
|
||||||
|
self.assertRaises(ValidationError, create_invalid_mapping)
|
||||||
|
|
||||||
|
Extensible.drop_collection()
|
||||||
|
|
||||||
def test_embedded_document_validation(self):
|
def test_embedded_document_validation(self):
|
||||||
"""Ensure that invalid embedded documents cannot be assigned to
|
"""Ensure that invalid embedded documents cannot be assigned to
|
||||||
@ -435,6 +879,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
name = StringField()
|
name = StringField()
|
||||||
children = ListField(EmbeddedDocumentField('self'))
|
children = ListField(EmbeddedDocumentField('self'))
|
||||||
|
|
||||||
|
Tree.drop_collection()
|
||||||
tree = Tree(name="Tree")
|
tree = Tree(name="Tree")
|
||||||
|
|
||||||
first_child = TreeNode(name="Child 1")
|
first_child = TreeNode(name="Child 1")
|
||||||
@ -442,18 +887,51 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
second_child = TreeNode(name="Child 2")
|
second_child = TreeNode(name="Child 2")
|
||||||
first_child.children.append(second_child)
|
first_child.children.append(second_child)
|
||||||
|
|
||||||
third_child = TreeNode(name="Child 3")
|
|
||||||
first_child.children.append(third_child)
|
|
||||||
|
|
||||||
tree.save()
|
tree.save()
|
||||||
|
|
||||||
tree_obj = Tree.objects.first()
|
tree = Tree.objects.first()
|
||||||
|
self.assertEqual(len(tree.children), 1)
|
||||||
|
|
||||||
|
self.assertEqual(len(tree.children[0].children), 1)
|
||||||
|
|
||||||
|
third_child = TreeNode(name="Child 3")
|
||||||
|
tree.children[0].children.append(third_child)
|
||||||
|
tree.save()
|
||||||
|
|
||||||
self.assertEqual(len(tree.children), 1)
|
self.assertEqual(len(tree.children), 1)
|
||||||
self.assertEqual(tree.children[0].name, first_child.name)
|
self.assertEqual(tree.children[0].name, first_child.name)
|
||||||
self.assertEqual(tree.children[0].children[0].name, second_child.name)
|
self.assertEqual(tree.children[0].children[0].name, second_child.name)
|
||||||
self.assertEqual(tree.children[0].children[1].name, third_child.name)
|
self.assertEqual(tree.children[0].children[1].name, third_child.name)
|
||||||
|
|
||||||
|
# Test updating
|
||||||
|
tree.children[0].name = 'I am Child 1'
|
||||||
|
tree.children[0].children[0].name = 'I am Child 2'
|
||||||
|
tree.children[0].children[1].name = 'I am Child 3'
|
||||||
|
tree.save()
|
||||||
|
|
||||||
|
self.assertEqual(tree.children[0].name, 'I am Child 1')
|
||||||
|
self.assertEqual(tree.children[0].children[0].name, 'I am Child 2')
|
||||||
|
self.assertEqual(tree.children[0].children[1].name, 'I am Child 3')
|
||||||
|
|
||||||
|
# Test removal
|
||||||
|
self.assertEqual(len(tree.children[0].children), 2)
|
||||||
|
del(tree.children[0].children[1])
|
||||||
|
|
||||||
|
tree.save()
|
||||||
|
self.assertEqual(len(tree.children[0].children), 1)
|
||||||
|
|
||||||
|
tree.children[0].children.pop(0)
|
||||||
|
tree.save()
|
||||||
|
self.assertEqual(len(tree.children[0].children), 0)
|
||||||
|
self.assertEqual(tree.children[0].children, [])
|
||||||
|
|
||||||
|
tree.children[0].children.insert(0, third_child)
|
||||||
|
tree.children[0].children.insert(0, second_child)
|
||||||
|
tree.save()
|
||||||
|
self.assertEqual(len(tree.children[0].children), 2)
|
||||||
|
self.assertEqual(tree.children[0].children[0].name, second_child.name)
|
||||||
|
self.assertEqual(tree.children[0].children[1].name, third_child.name)
|
||||||
|
|
||||||
def test_undefined_reference(self):
|
def test_undefined_reference(self):
|
||||||
"""Ensure that ReferenceFields may reference undefined Documents.
|
"""Ensure that ReferenceFields may reference undefined Documents.
|
||||||
"""
|
"""
|
||||||
@ -584,6 +1062,52 @@ class FieldTest(unittest.TestCase):
|
|||||||
Post.drop_collection()
|
Post.drop_collection()
|
||||||
User.drop_collection()
|
User.drop_collection()
|
||||||
|
|
||||||
|
|
||||||
|
def test_generic_reference_document_not_registered(self):
|
||||||
|
"""Ensure dereferencing out of the document registry throws a
|
||||||
|
`NotRegistered` error.
|
||||||
|
"""
|
||||||
|
class Link(Document):
|
||||||
|
title = StringField()
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
bookmarks = ListField(GenericReferenceField())
|
||||||
|
|
||||||
|
Link.drop_collection()
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
link_1 = Link(title="Pitchfork")
|
||||||
|
link_1.save()
|
||||||
|
|
||||||
|
user = User(bookmarks=[link_1])
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
# Mimic User and Link definitions being in a different file
|
||||||
|
# and the Link model not being imported in the User file.
|
||||||
|
del(_document_registry["Link"])
|
||||||
|
|
||||||
|
user = User.objects.first()
|
||||||
|
try:
|
||||||
|
user.bookmarks
|
||||||
|
raise AssertionError, "Link was removed from the registry"
|
||||||
|
except NotRegistered:
|
||||||
|
pass
|
||||||
|
|
||||||
|
Link.drop_collection()
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
def test_generic_reference_is_none(self):
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
city = GenericReferenceField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Person(name="Wilson Jr").save()
|
||||||
|
|
||||||
|
self.assertEquals(repr(Person.objects(city=None)),
|
||||||
|
"[<Person: Person object>]")
|
||||||
|
|
||||||
def test_binary_fields(self):
|
def test_binary_fields(self):
|
||||||
"""Ensure that binary fields can be stored and retrieved.
|
"""Ensure that binary fields can be stored and retrieved.
|
||||||
"""
|
"""
|
||||||
@ -644,7 +1168,8 @@ class FieldTest(unittest.TestCase):
|
|||||||
"""Ensure that value is in a container of allowed values.
|
"""Ensure that value is in a container of allowed values.
|
||||||
"""
|
"""
|
||||||
class Shirt(Document):
|
class Shirt(Document):
|
||||||
size = StringField(max_length=3, choices=('S','M','L','XL','XXL'))
|
size = StringField(max_length=3, choices=(('S', 'Small'), ('M', 'Medium'), ('L', 'Large'),
|
||||||
|
('XL', 'Extra Large'), ('XXL', 'Extra Extra Large')))
|
||||||
|
|
||||||
Shirt.drop_collection()
|
Shirt.drop_collection()
|
||||||
|
|
||||||
@ -659,6 +1184,35 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
Shirt.drop_collection()
|
Shirt.drop_collection()
|
||||||
|
|
||||||
|
def test_choices_get_field_display(self):
|
||||||
|
"""Test dynamic helper for returning the display value of a choices field.
|
||||||
|
"""
|
||||||
|
class Shirt(Document):
|
||||||
|
size = StringField(max_length=3, choices=(('S', 'Small'), ('M', 'Medium'), ('L', 'Large'),
|
||||||
|
('XL', 'Extra Large'), ('XXL', 'Extra Extra Large')))
|
||||||
|
style = StringField(max_length=3, choices=(('S', 'Small'), ('B', 'Baggy'), ('W', 'wide')), default='S')
|
||||||
|
|
||||||
|
Shirt.drop_collection()
|
||||||
|
|
||||||
|
shirt = Shirt()
|
||||||
|
|
||||||
|
self.assertEqual(shirt.get_size_display(), None)
|
||||||
|
self.assertEqual(shirt.get_style_display(), 'Small')
|
||||||
|
|
||||||
|
shirt.size = "XXL"
|
||||||
|
shirt.style = "B"
|
||||||
|
self.assertEqual(shirt.get_size_display(), 'Extra Extra Large')
|
||||||
|
self.assertEqual(shirt.get_style_display(), 'Baggy')
|
||||||
|
|
||||||
|
# Set as Z - an invalid choice
|
||||||
|
shirt.size = "Z"
|
||||||
|
shirt.style = "Z"
|
||||||
|
self.assertEqual(shirt.get_size_display(), 'Z')
|
||||||
|
self.assertEqual(shirt.get_style_display(), 'Z')
|
||||||
|
self.assertRaises(ValidationError, shirt.validate)
|
||||||
|
|
||||||
|
Shirt.drop_collection()
|
||||||
|
|
||||||
def test_file_fields(self):
|
def test_file_fields(self):
|
||||||
"""Ensure that file fields can be written to and their data retrieved
|
"""Ensure that file fields can be written to and their data retrieved
|
||||||
"""
|
"""
|
||||||
@ -700,6 +1254,12 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.assertTrue(streamfile == result)
|
self.assertTrue(streamfile == result)
|
||||||
self.assertEquals(result.file.read(), text + more_text)
|
self.assertEquals(result.file.read(), text + more_text)
|
||||||
self.assertEquals(result.file.content_type, content_type)
|
self.assertEquals(result.file.content_type, content_type)
|
||||||
|
result.file.seek(0)
|
||||||
|
self.assertEquals(result.file.tell(), 0)
|
||||||
|
self.assertEquals(result.file.read(len(text)), text)
|
||||||
|
self.assertEquals(result.file.tell(), len(text))
|
||||||
|
self.assertEquals(result.file.read(len(more_text)), more_text)
|
||||||
|
self.assertEquals(result.file.tell(), len(text + more_text))
|
||||||
result.file.delete()
|
result.file.delete()
|
||||||
|
|
||||||
# Ensure deleted file returns None
|
# Ensure deleted file returns None
|
||||||
@ -753,6 +1313,21 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
TestFile.drop_collection()
|
TestFile.drop_collection()
|
||||||
|
|
||||||
|
def test_file_boolean(self):
|
||||||
|
"""Ensure that a boolean test of a FileField indicates its presence
|
||||||
|
"""
|
||||||
|
class TestFile(Document):
|
||||||
|
file = FileField()
|
||||||
|
|
||||||
|
testfile = TestFile()
|
||||||
|
self.assertFalse(bool(testfile.file))
|
||||||
|
testfile.file = 'Hello, World!'
|
||||||
|
testfile.file.content_type = 'text/plain'
|
||||||
|
testfile.save()
|
||||||
|
self.assertTrue(bool(testfile.file))
|
||||||
|
|
||||||
|
TestFile.drop_collection()
|
||||||
|
|
||||||
def test_geo_indexes(self):
|
def test_geo_indexes(self):
|
||||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||||
"""
|
"""
|
||||||
@ -771,6 +1346,27 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
Event.drop_collection()
|
Event.drop_collection()
|
||||||
|
|
||||||
|
def test_geo_embedded_indexes(self):
|
||||||
|
"""Ensure that indexes are created automatically for GeoPointFields on
|
||||||
|
embedded documents.
|
||||||
|
"""
|
||||||
|
class Venue(EmbeddedDocument):
|
||||||
|
location = GeoPointField()
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Event(Document):
|
||||||
|
title = StringField()
|
||||||
|
venue = EmbeddedDocumentField(Venue)
|
||||||
|
|
||||||
|
Event.drop_collection()
|
||||||
|
venue = Venue(name="Double Door", location=[41.909889, -87.677137])
|
||||||
|
event = Event(title="Coltrane Motion", venue=venue)
|
||||||
|
event.save()
|
||||||
|
|
||||||
|
info = Event.objects._collection.index_information()
|
||||||
|
self.assertTrue(u'location_2d' in info)
|
||||||
|
self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')])
|
||||||
|
|
||||||
def test_ensure_unique_default_instances(self):
|
def test_ensure_unique_default_instances(self):
|
||||||
"""Ensure that every field has it's own unique default instance."""
|
"""Ensure that every field has it's own unique default instance."""
|
||||||
class D(Document):
|
class D(Document):
|
||||||
@ -784,5 +1380,139 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.assertEqual(d2.data, {})
|
self.assertEqual(d2.data, {})
|
||||||
self.assertEqual(d2.data2, {})
|
self.assertEqual(d2.data2, {})
|
||||||
|
|
||||||
|
def test_sequence_field(self):
|
||||||
|
class Person(Document):
|
||||||
|
id = SequenceField(primary_key=True)
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
self.db['mongoengine.counters'].drop()
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
for x in xrange(10):
|
||||||
|
p = Person(name="Person %s" % x)
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||||
|
self.assertEqual(c['next'], 10)
|
||||||
|
|
||||||
|
ids = [i.id for i in Person.objects]
|
||||||
|
self.assertEqual(ids, range(1, 11))
|
||||||
|
|
||||||
|
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||||
|
self.assertEqual(c['next'], 10)
|
||||||
|
|
||||||
|
def test_multiple_sequence_fields(self):
|
||||||
|
class Person(Document):
|
||||||
|
id = SequenceField(primary_key=True)
|
||||||
|
counter = SequenceField()
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
self.db['mongoengine.counters'].drop()
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
for x in xrange(10):
|
||||||
|
p = Person(name="Person %s" % x)
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||||
|
self.assertEqual(c['next'], 10)
|
||||||
|
|
||||||
|
ids = [i.id for i in Person.objects]
|
||||||
|
self.assertEqual(ids, range(1, 11))
|
||||||
|
|
||||||
|
counters = [i.counter for i in Person.objects]
|
||||||
|
self.assertEqual(counters, range(1, 11))
|
||||||
|
|
||||||
|
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||||
|
self.assertEqual(c['next'], 10)
|
||||||
|
|
||||||
|
def test_sequence_fields_reload(self):
|
||||||
|
class Animal(Document):
|
||||||
|
counter = SequenceField()
|
||||||
|
type = StringField()
|
||||||
|
|
||||||
|
self.db['mongoengine.counters'].drop()
|
||||||
|
Animal.drop_collection()
|
||||||
|
|
||||||
|
a = Animal(type="Boi")
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
self.assertEqual(a.counter, 1)
|
||||||
|
a.reload()
|
||||||
|
self.assertEqual(a.counter, 1)
|
||||||
|
|
||||||
|
a.counter = None
|
||||||
|
self.assertEqual(a.counter, 2)
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
self.assertEqual(a.counter, 2)
|
||||||
|
|
||||||
|
a = Animal.objects.first()
|
||||||
|
self.assertEqual(a.counter, 2)
|
||||||
|
a.reload()
|
||||||
|
self.assertEqual(a.counter, 2)
|
||||||
|
|
||||||
|
def test_multiple_sequence_fields_on_docs(self):
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
id = SequenceField(primary_key=True)
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
id = SequenceField(primary_key=True)
|
||||||
|
|
||||||
|
self.db['mongoengine.counters'].drop()
|
||||||
|
Animal.drop_collection()
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
for x in xrange(10):
|
||||||
|
a = Animal(name="Animal %s" % x)
|
||||||
|
a.save()
|
||||||
|
p = Person(name="Person %s" % x)
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||||
|
self.assertEqual(c['next'], 10)
|
||||||
|
|
||||||
|
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
|
||||||
|
self.assertEqual(c['next'], 10)
|
||||||
|
|
||||||
|
ids = [i.id for i in Person.objects]
|
||||||
|
self.assertEqual(ids, range(1, 11))
|
||||||
|
|
||||||
|
id = [i.id for i in Animal.objects]
|
||||||
|
self.assertEqual(id, range(1, 11))
|
||||||
|
|
||||||
|
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||||
|
self.assertEqual(c['next'], 10)
|
||||||
|
|
||||||
|
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
|
||||||
|
self.assertEqual(c['next'], 10)
|
||||||
|
|
||||||
|
|
||||||
|
def test_generic_embedded_document(self):
|
||||||
|
class Car(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Dish(EmbeddedDocument):
|
||||||
|
food = StringField(required=True)
|
||||||
|
number = IntField()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
like = GenericEmbeddedDocumentField()
|
||||||
|
|
||||||
|
person = Person(name='Test User')
|
||||||
|
person.like = Car(name='Fiat')
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
self.assertTrue(isinstance(person.like, Car))
|
||||||
|
|
||||||
|
person.like = Dish(food="arroz", number=15)
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
self.assertTrue(isinstance(person.like, Dish))
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
25
tests/fixtures.py
Normal file
25
tests/fixtures.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.base import BaseField
|
||||||
|
from mongoengine.connection import _get_db
|
||||||
|
|
||||||
|
|
||||||
|
class PickleEmbedded(EmbeddedDocument):
|
||||||
|
date = DateTimeField(default=datetime.now)
|
||||||
|
|
||||||
|
|
||||||
|
class PickleTest(Document):
|
||||||
|
number = IntField()
|
||||||
|
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||||
|
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||||
|
lists = ListField(StringField())
|
||||||
|
|
||||||
|
|
||||||
|
class Mixin(object):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
|
||||||
|
class Base(Document):
|
||||||
|
pass
|
1267
tests/queryset.py
1267
tests/queryset.py
File diff suppressed because it is too large
Load Diff
181
tests/signals.py
Normal file
181
tests/signals.py
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine import signals
|
||||||
|
|
||||||
|
signal_output = []
|
||||||
|
|
||||||
|
|
||||||
|
class SignalTests(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Testing signals before/after saving and deleting.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_signal_output(self, fn, *args, **kwargs):
|
||||||
|
# Flush any existing signal output
|
||||||
|
global signal_output
|
||||||
|
signal_output = []
|
||||||
|
fn(*args, **kwargs)
|
||||||
|
return signal_output
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
class Author(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pre_init(cls, sender, document, *args, **kwargs):
|
||||||
|
signal_output.append('pre_init signal, %s' % cls.__name__)
|
||||||
|
signal_output.append(str(kwargs['values']))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def post_init(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('post_init signal, %s' % document)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pre_save(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('pre_save signal, %s' % document)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def post_save(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('post_save signal, %s' % document)
|
||||||
|
if 'created' in kwargs:
|
||||||
|
if kwargs['created']:
|
||||||
|
signal_output.append('Is created')
|
||||||
|
else:
|
||||||
|
signal_output.append('Is updated')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pre_delete(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('pre_delete signal, %s' % document)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def post_delete(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('post_delete signal, %s' % document)
|
||||||
|
self.Author = Author
|
||||||
|
|
||||||
|
|
||||||
|
class Another(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pre_init(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('pre_init Another signal, %s' % cls.__name__)
|
||||||
|
signal_output.append(str(kwargs['values']))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def post_init(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('post_init Another signal, %s' % document)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pre_save(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('pre_save Another signal, %s' % document)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def post_save(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('post_save Another signal, %s' % document)
|
||||||
|
if 'created' in kwargs:
|
||||||
|
if kwargs['created']:
|
||||||
|
signal_output.append('Is created')
|
||||||
|
else:
|
||||||
|
signal_output.append('Is updated')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pre_delete(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('pre_delete Another signal, %s' % document)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def post_delete(cls, sender, document, **kwargs):
|
||||||
|
signal_output.append('post_delete Another signal, %s' % document)
|
||||||
|
|
||||||
|
self.Another = Another
|
||||||
|
# Save up the number of connected signals so that we can check at the end
|
||||||
|
# that all the signals we register get properly unregistered
|
||||||
|
self.pre_signals = (
|
||||||
|
len(signals.pre_init.receivers),
|
||||||
|
len(signals.post_init.receivers),
|
||||||
|
len(signals.pre_save.receivers),
|
||||||
|
len(signals.post_save.receivers),
|
||||||
|
len(signals.pre_delete.receivers),
|
||||||
|
len(signals.post_delete.receivers)
|
||||||
|
)
|
||||||
|
|
||||||
|
signals.pre_init.connect(Author.pre_init, sender=Author)
|
||||||
|
signals.post_init.connect(Author.post_init, sender=Author)
|
||||||
|
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||||
|
signals.post_save.connect(Author.post_save, sender=Author)
|
||||||
|
signals.pre_delete.connect(Author.pre_delete, sender=Author)
|
||||||
|
signals.post_delete.connect(Author.post_delete, sender=Author)
|
||||||
|
|
||||||
|
signals.pre_init.connect(Another.pre_init, sender=Another)
|
||||||
|
signals.post_init.connect(Another.post_init, sender=Another)
|
||||||
|
signals.pre_save.connect(Another.pre_save, sender=Another)
|
||||||
|
signals.post_save.connect(Another.post_save, sender=Another)
|
||||||
|
signals.pre_delete.connect(Another.pre_delete, sender=Another)
|
||||||
|
signals.post_delete.connect(Another.post_delete, sender=Another)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
signals.pre_init.disconnect(self.Author.pre_init)
|
||||||
|
signals.post_init.disconnect(self.Author.post_init)
|
||||||
|
signals.post_delete.disconnect(self.Author.post_delete)
|
||||||
|
signals.pre_delete.disconnect(self.Author.pre_delete)
|
||||||
|
signals.post_save.disconnect(self.Author.post_save)
|
||||||
|
signals.pre_save.disconnect(self.Author.pre_save)
|
||||||
|
|
||||||
|
signals.pre_init.disconnect(self.Another.pre_init)
|
||||||
|
signals.post_init.disconnect(self.Another.post_init)
|
||||||
|
signals.post_delete.disconnect(self.Another.post_delete)
|
||||||
|
signals.pre_delete.disconnect(self.Another.pre_delete)
|
||||||
|
signals.post_save.disconnect(self.Another.post_save)
|
||||||
|
signals.pre_save.disconnect(self.Another.pre_save)
|
||||||
|
|
||||||
|
# Check that all our signals got disconnected properly.
|
||||||
|
post_signals = (
|
||||||
|
len(signals.pre_init.receivers),
|
||||||
|
len(signals.post_init.receivers),
|
||||||
|
len(signals.pre_save.receivers),
|
||||||
|
len(signals.post_save.receivers),
|
||||||
|
len(signals.pre_delete.receivers),
|
||||||
|
len(signals.post_delete.receivers)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(self.pre_signals, post_signals)
|
||||||
|
|
||||||
|
def test_model_signals(self):
|
||||||
|
""" Model saves should throw some signals. """
|
||||||
|
|
||||||
|
def create_author():
|
||||||
|
a1 = self.Author(name='Bill Shakespeare')
|
||||||
|
|
||||||
|
self.assertEqual(self.get_signal_output(create_author), [
|
||||||
|
"pre_init signal, Author",
|
||||||
|
"{'name': 'Bill Shakespeare'}",
|
||||||
|
"post_init signal, Bill Shakespeare",
|
||||||
|
])
|
||||||
|
|
||||||
|
a1 = self.Author(name='Bill Shakespeare')
|
||||||
|
self.assertEqual(self.get_signal_output(a1.save), [
|
||||||
|
"pre_save signal, Bill Shakespeare",
|
||||||
|
"post_save signal, Bill Shakespeare",
|
||||||
|
"Is created"
|
||||||
|
])
|
||||||
|
|
||||||
|
a1.reload()
|
||||||
|
a1.name='William Shakespeare'
|
||||||
|
self.assertEqual(self.get_signal_output(a1.save), [
|
||||||
|
"pre_save signal, William Shakespeare",
|
||||||
|
"post_save signal, William Shakespeare",
|
||||||
|
"Is updated"
|
||||||
|
])
|
||||||
|
|
||||||
|
self.assertEqual(self.get_signal_output(a1.delete), [
|
||||||
|
'pre_delete signal, William Shakespeare',
|
||||||
|
'post_delete signal, William Shakespeare',
|
||||||
|
])
|
Loading…
x
Reference in New Issue
Block a user