Merge branch 'dev'
This commit is contained in:
commit
fdc34869ca
8
.gitignore
vendored
8
.gitignore
vendored
@ -1,3 +1,5 @@
|
||||
.*
|
||||
!.gitignore
|
||||
*.pyc
|
||||
.*.swp
|
||||
*.egg
|
||||
@ -6,4 +8,8 @@ docs/_build
|
||||
build/
|
||||
dist/
|
||||
mongoengine.egg-info/
|
||||
env/
|
||||
env/
|
||||
.settings
|
||||
.project
|
||||
.pydevproject
|
||||
tests/bugfix.py
|
||||
|
64
AUTHORS
64
AUTHORS
@ -1,5 +1,69 @@
|
||||
The PRIMARY AUTHORS are (and/or have been):
|
||||
|
||||
Harry Marr <harry@hmarr.com>
|
||||
Matt Dennewitz <mattdennewitz@gmail.com>
|
||||
Deepak Thukral <iapain@yahoo.com>
|
||||
Florian Schlachter <flori@n-schlachter.de>
|
||||
Steve Challis <steve@stevechallis.com>
|
||||
Ross Lawley <ross.lawley@gmail.com>
|
||||
Wilson Júnior <wilsonpjunior@gmail.com>
|
||||
Dan Crosta https://github.com/dcrosta
|
||||
|
||||
CONTRIBUTORS
|
||||
|
||||
Dervived from the git logs, inevitably incomplete but all of whom and others
|
||||
have submitted patches, reported bugs and generally helped make MongoEngine
|
||||
that much better:
|
||||
|
||||
* Harry Marr
|
||||
* Ross Lawley
|
||||
* blackbrrr
|
||||
* Florian Schlachter
|
||||
* Vincent Driessen
|
||||
* Steve Challis
|
||||
* flosch
|
||||
* Deepak Thukral
|
||||
* Colin Howe
|
||||
* Wilson Júnior
|
||||
* Alistair Roche
|
||||
* Dan Crosta
|
||||
* Viktor Kerkez
|
||||
* Stephan Jaekel
|
||||
* Rached Ben Mustapha
|
||||
* Greg Turner
|
||||
* Daniel Hasselrot
|
||||
* Mircea Pasoi
|
||||
* Matt Chisholm
|
||||
* James Punteney
|
||||
* TimothéePeignier
|
||||
* Stuart Rackham
|
||||
* Serge Matveenko
|
||||
* Matt Dennewitz
|
||||
* Don Spaulding
|
||||
* Ales Zoulek
|
||||
* sshwsfc
|
||||
* sib
|
||||
* Samuel Clay
|
||||
* Nick Vlku
|
||||
* martin
|
||||
* Flavio Amieiro
|
||||
* Анхбаяр Лхагвадорж
|
||||
* Zak Johnson
|
||||
* Victor Farazdagi
|
||||
* vandersonmota
|
||||
* Theo Julienne
|
||||
* sp
|
||||
* Slavi Pantaleev
|
||||
* Richard Henry
|
||||
* Nicolas Perriault
|
||||
* Nick Vlku Jr
|
||||
* Michael Henson
|
||||
* Leo Honkanen
|
||||
* kuno
|
||||
* Josh Ourisman
|
||||
* Jaime
|
||||
* Igor Ivanov
|
||||
* Gregg Lind
|
||||
* Gareth Lloyd
|
||||
* Albert Choi
|
||||
* John Arnfield
|
||||
|
@ -41,6 +41,8 @@ Fields
|
||||
|
||||
.. autoclass:: mongoengine.URLField
|
||||
|
||||
.. autoclass:: mongoengine.EmailField
|
||||
|
||||
.. autoclass:: mongoengine.IntField
|
||||
|
||||
.. autoclass:: mongoengine.FloatField
|
||||
@ -51,12 +53,16 @@ Fields
|
||||
|
||||
.. autoclass:: mongoengine.DateTimeField
|
||||
|
||||
.. autoclass:: mongoengine.ComplexDateTimeField
|
||||
|
||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||
|
||||
.. autoclass:: mongoengine.DictField
|
||||
|
||||
.. autoclass:: mongoengine.ListField
|
||||
|
||||
.. autoclass:: mongoengine.SortedListField
|
||||
|
||||
.. autoclass:: mongoengine.BinaryField
|
||||
|
||||
.. autoclass:: mongoengine.ObjectIdField
|
||||
|
@ -2,6 +2,72 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
Changes in dev
|
||||
==============
|
||||
|
||||
- Added InvalidDocumentError - so Document core methods can't be overwritten
|
||||
- Added GenericEmbeddedDocument - so you can embed any type of embeddable document
|
||||
- Added within_polygon support - for those with mongodb 1.9
|
||||
- Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments
|
||||
- Added where() - filter to allowing users to specify query expressions as Javascript
|
||||
- Added SequenceField - for creating sequential counters
|
||||
- Added update() convenience method to a document
|
||||
- Added cascading saves - so changes to Referenced documents are saved on .save()
|
||||
- Added select_related() support
|
||||
- Added support for the positional operator
|
||||
- Updated geo index checking to be recursive and check in embedded documents
|
||||
- Updated default collection naming convention
|
||||
- Added Document Mixin support
|
||||
- Fixed queryet __repr__ mid iteration
|
||||
- Added hint() support, so cantell Mongo the proper index to use for the query
|
||||
- Fixed issue with inconsitent setting of _cls breaking inherited referencing
|
||||
- Added help_text and verbose_name to fields to help with some form libs
|
||||
- Updated item_frequencies to handle embedded document lookups
|
||||
- Added delta tracking now only sets / unsets explicitly changed fields
|
||||
- Fixed saving so sets updated values rather than overwrites
|
||||
- Added ComplexDateTimeField - Handles datetimes correctly with microseconds
|
||||
- Added ComplexBaseField - for improved flexibility and performance
|
||||
- Added get_FIELD_display() method for easy choice field displaying
|
||||
- Added queryset.slave_okay(enabled) method
|
||||
- Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable
|
||||
- Added insert method for bulk inserts
|
||||
- Added blinker signal support
|
||||
- Added query_counter context manager for tests
|
||||
- Added map_reduce method item_frequencies and set as default (as db.eval doesn't work in sharded environments)
|
||||
- Added inline_map_reduce option to map_reduce
|
||||
- Updated connection exception so it provides more info on the cause.
|
||||
- Added searching multiple levels deep in ``DictField``
|
||||
- Added ``DictField`` entries containing strings to use matching operators
|
||||
- Added ``MapField``, similar to ``DictField``
|
||||
- Added Abstract Base Classes
|
||||
- Added Custom Objects Managers
|
||||
- Added sliced subfields updating
|
||||
- Added ``NotRegistered`` exception if dereferencing ``Document`` not in the registry
|
||||
- Added a write concern for ``save``, ``update``, ``update_one`` and ``get_or_create``
|
||||
- Added slicing / subarray fetching controls
|
||||
- Fixed various unique index and other index issues
|
||||
- Fixed threaded connection issues
|
||||
- Added spherical geospatial query operators
|
||||
- Updated queryset to handle latest version of pymongo
|
||||
map_reduce now requires an output.
|
||||
- Added ``Document`` __hash__, __ne__ for pickling
|
||||
- Added ``FileField`` optional size arg for read method
|
||||
- Fixed ``FileField`` seek and tell methods for reading files
|
||||
- Added ``QuerySet.clone`` to support copying querysets
|
||||
- Fixed item_frequencies when using name thats the same as a native js function
|
||||
- Added reverse delete rules
|
||||
- Fixed issue with unset operation
|
||||
- Fixed Q-object bug
|
||||
- Added ``QuerySet.all_fields`` resets previous .only() and .exclude()
|
||||
- Added ``QuerySet.exclude``
|
||||
- Added django style choices
|
||||
- Fixed order and filter issue
|
||||
- Added ``QuerySet.only`` subfield support
|
||||
- Added creation_counter to ``BaseField`` allowing fields to be sorted in the
|
||||
way the user has specified them
|
||||
- Fixed various errors
|
||||
- Added many tests
|
||||
|
||||
Changes in v0.4
|
||||
===============
|
||||
- Added ``GridFSStorage`` Django storage backend
|
||||
@ -32,7 +98,7 @@ Changes in v0.3
|
||||
===============
|
||||
- Added MapReduce support
|
||||
- Added ``contains``, ``startswith`` and ``endswith`` query operators (and
|
||||
case-insensitive versions that are prefixed with 'i')
|
||||
case-insensitive versions that are prefixed with 'i')
|
||||
- Deprecated fields' ``name`` parameter, replaced with ``db_field``
|
||||
- Added ``QuerySet.only`` for only retrieving specific fields
|
||||
- Added ``QuerySet.in_bulk()`` for bulk querying using ids
|
||||
@ -79,7 +145,7 @@ Changes in v0.2
|
||||
===============
|
||||
- Added ``Q`` class for building advanced queries
|
||||
- Added ``QuerySet`` methods for atomic updates to documents
|
||||
- Fields may now specify ``unique=True`` to enforce uniqueness across a
|
||||
- Fields may now specify ``unique=True`` to enforce uniqueness across a
|
||||
collection
|
||||
- Added option for default document ordering
|
||||
- Fixed bug in index definitions
|
||||
@ -87,7 +153,7 @@ Changes in v0.2
|
||||
Changes in v0.1.3
|
||||
=================
|
||||
- Added Django authentication backend
|
||||
- Added ``Document.meta`` support for indexes, which are ensured just before
|
||||
- Added ``Document.meta`` support for indexes, which are ensured just before
|
||||
querying takes place
|
||||
- A few minor bugfixes
|
||||
|
||||
|
@ -38,7 +38,7 @@ master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'MongoEngine'
|
||||
copyright = u'2009-2010, Harry Marr'
|
||||
copyright = u'2009-2011, Harry Marr'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
|
@ -4,14 +4,14 @@ Defining documents
|
||||
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
||||
working with relational databases, rows are stored in **tables**, which have a
|
||||
strict **schema** that the rows follow. MongoDB stores documents in
|
||||
**collections** rather than tables - the principle difference is that no schema
|
||||
is enforced at a database level.
|
||||
**collections** rather than tables - the principle difference is that no schema
|
||||
is enforced at a database level.
|
||||
|
||||
Defining a document's schema
|
||||
============================
|
||||
MongoEngine allows you to define schemata for documents as this helps to reduce
|
||||
coding errors, and allows for utility methods to be defined on fields which may
|
||||
be present.
|
||||
be present.
|
||||
|
||||
To define a schema for a document, create a class that inherits from
|
||||
:class:`~mongoengine.Document`. Fields are specified by adding **field
|
||||
@ -19,7 +19,7 @@ objects** as class attributes to the document class::
|
||||
|
||||
from mongoengine import *
|
||||
import datetime
|
||||
|
||||
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
date_modified = DateTimeField(default=datetime.datetime.now)
|
||||
@ -31,31 +31,35 @@ By default, fields are not required. To make a field mandatory, set the
|
||||
validation constraints available (such as :attr:`max_length` in the example
|
||||
above). Fields may also take default values, which will be used if a value is
|
||||
not provided. Default values may optionally be a callable, which will be called
|
||||
to retrieve the value (such as in the above example). The field types available
|
||||
to retrieve the value (such as in the above example). The field types available
|
||||
are as follows:
|
||||
|
||||
* :class:`~mongoengine.StringField`
|
||||
* :class:`~mongoengine.URLField`
|
||||
* :class:`~mongoengine.EmailField`
|
||||
* :class:`~mongoengine.IntField`
|
||||
* :class:`~mongoengine.FloatField`
|
||||
* :class:`~mongoengine.DecimalField`
|
||||
* :class:`~mongoengine.DateTimeField`
|
||||
* :class:`~mongoengine.ComplexDateTimeField`
|
||||
* :class:`~mongoengine.ListField`
|
||||
* :class:`~mongoengine.SortedListField`
|
||||
* :class:`~mongoengine.DictField`
|
||||
* :class:`~mongoengine.MapField`
|
||||
* :class:`~mongoengine.ObjectIdField`
|
||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.ReferenceField`
|
||||
* :class:`~mongoengine.GenericReferenceField`
|
||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
||||
* :class:`~mongoengine.BooleanField`
|
||||
* :class:`~mongoengine.FileField`
|
||||
* :class:`~mongoengine.EmailField`
|
||||
* :class:`~mongoengine.SortedListField`
|
||||
* :class:`~mongoengine.BinaryField`
|
||||
* :class:`~mongoengine.GeoPointField`
|
||||
* :class:`~mongoengine.SequenceField`
|
||||
|
||||
Field arguments
|
||||
---------------
|
||||
Each field type can be customized by keyword arguments. The following keyword
|
||||
Each field type can be customized by keyword arguments. The following keyword
|
||||
arguments can be set on all fields:
|
||||
|
||||
:attr:`db_field` (Default: None)
|
||||
@ -74,7 +78,7 @@ arguments can be set on all fields:
|
||||
|
||||
The definion of default parameters follow `the general rules on Python
|
||||
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
|
||||
which means that some care should be taken when dealing with default mutable objects
|
||||
which means that some care should be taken when dealing with default mutable objects
|
||||
(like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`)::
|
||||
|
||||
class ExampleFirst(Document):
|
||||
@ -89,7 +93,7 @@ arguments can be set on all fields:
|
||||
# This can make an .append call to add values to the default (and all the following objects),
|
||||
# instead to just an object
|
||||
values = ListField(IntField(), default=[1,2,3])
|
||||
|
||||
|
||||
|
||||
:attr:`unique` (Default: False)
|
||||
When True, no documents in the collection will have the same value for this
|
||||
@ -104,7 +108,13 @@ arguments can be set on all fields:
|
||||
|
||||
:attr:`choices` (Default: None)
|
||||
An iterable of choices to which the value of this field should be limited.
|
||||
|
||||
|
||||
:attr:`help_text` (Default: None)
|
||||
Optional help text to output with the field - used by form libraries
|
||||
|
||||
:attr:`verbose` (Default: None)
|
||||
Optional human-readable name for the field - used by form libraries
|
||||
|
||||
|
||||
List fields
|
||||
-----------
|
||||
@ -121,7 +131,7 @@ Embedded documents
|
||||
MongoDB has the ability to embed documents within other documents. Schemata may
|
||||
be defined for these embedded documents, just as they may be for regular
|
||||
documents. To create an embedded document, just define a document as usual, but
|
||||
inherit from :class:`~mongoengine.EmbeddedDocument` rather than
|
||||
inherit from :class:`~mongoengine.EmbeddedDocument` rather than
|
||||
:class:`~mongoengine.Document`::
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
@ -144,7 +154,7 @@ Often, an embedded document may be used instead of a dictionary -- generally
|
||||
this is recommended as dictionaries don't support validation or custom field
|
||||
types. However, sometimes you will not know the structure of what you want to
|
||||
store; in this situation a :class:`~mongoengine.DictField` is appropriate::
|
||||
|
||||
|
||||
class SurveyResponse(Document):
|
||||
date = DateTimeField()
|
||||
user = ReferenceField(User)
|
||||
@ -152,16 +162,19 @@ store; in this situation a :class:`~mongoengine.DictField` is appropriate::
|
||||
|
||||
survey_response = SurveyResponse(date=datetime.now(), user=request.user)
|
||||
response_form = ResponseForm(request.POST)
|
||||
survey_response.answers = response_form.cleaned_data()
|
||||
survey_response.answers = response_form.cleaned_data()
|
||||
survey_response.save()
|
||||
|
||||
Dictionaries can store complex data, other dictionaries, lists, references to
|
||||
other objects, so are the most flexible field type available.
|
||||
|
||||
Reference fields
|
||||
----------------
|
||||
References may be stored to other documents in the database using the
|
||||
:class:`~mongoengine.ReferenceField`. Pass in another document class as the
|
||||
first argument to the constructor, then simply assign document objects to the
|
||||
field::
|
||||
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
@ -193,19 +206,72 @@ as the constructor's argument::
|
||||
class ProfilePage(Document):
|
||||
content = StringField()
|
||||
|
||||
|
||||
Dealing with deletion of referred documents
|
||||
'''''''''''''''''''''''''''''''''''''''''''
|
||||
By default, MongoDB doesn't check the integrity of your data, so deleting
|
||||
documents that other documents still hold references to will lead to consistency
|
||||
issues. Mongoengine's :class:`ReferenceField` adds some functionality to
|
||||
safeguard against these kinds of database integrity problems, providing each
|
||||
reference with a delete rule specification. A delete rule is specified by
|
||||
supplying the :attr:`reverse_delete_rule` attributes on the
|
||||
:class:`ReferenceField` definition, like this::
|
||||
|
||||
class Employee(Document):
|
||||
...
|
||||
profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY)
|
||||
|
||||
The declaration in this example means that when an :class:`Employee` object is
|
||||
removed, the :class:`ProfilePage` that belongs to that employee is removed as
|
||||
well. If a whole batch of employees is removed, all profile pages that are
|
||||
linked are removed as well.
|
||||
|
||||
Its value can take any of the following constants:
|
||||
|
||||
:const:`mongoengine.DO_NOTHING`
|
||||
This is the default and won't do anything. Deletes are fast, but may cause
|
||||
database inconsistency or dangling references.
|
||||
:const:`mongoengine.DENY`
|
||||
Deletion is denied if there still exist references to the object being
|
||||
deleted.
|
||||
:const:`mongoengine.NULLIFY`
|
||||
Any object's fields still referring to the object being deleted are removed
|
||||
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
||||
:const:`mongoengine.CASCADE`
|
||||
Any object containing fields that are refererring to the object being deleted
|
||||
are deleted first.
|
||||
|
||||
|
||||
.. warning::
|
||||
A safety note on setting up these delete rules! Since the delete rules are
|
||||
not recorded on the database level by MongoDB itself, but instead at runtime,
|
||||
in-memory, by the MongoEngine module, it is of the upmost importance
|
||||
that the module that declares the relationship is loaded **BEFORE** the
|
||||
delete is invoked.
|
||||
|
||||
If, for example, the :class:`Employee` object lives in the
|
||||
:mod:`payroll` app, and the :class:`ProfilePage` in the :mod:`people`
|
||||
app, it is extremely important that the :mod:`people` app is loaded
|
||||
before any employee is removed, because otherwise, MongoEngine could
|
||||
never know this relationship exists.
|
||||
|
||||
In Django, be sure to put all apps that have such delete rule declarations in
|
||||
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
||||
|
||||
|
||||
Generic reference fields
|
||||
''''''''''''''''''''''''
|
||||
A second kind of reference field also exists,
|
||||
:class:`~mongoengine.GenericReferenceField`. This allows you to reference any
|
||||
kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
||||
kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
||||
:class:`~mongoengine.Document` subclass as a constructor argument::
|
||||
|
||||
class Link(Document):
|
||||
url = StringField()
|
||||
|
||||
|
||||
class Post(Document):
|
||||
title = StringField()
|
||||
|
||||
|
||||
class Bookmark(Document):
|
||||
bookmark_object = GenericReferenceField()
|
||||
|
||||
@ -219,9 +285,10 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
||||
Bookmark(bookmark_object=post).save()
|
||||
|
||||
.. note::
|
||||
|
||||
Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less
|
||||
efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if
|
||||
you will only be referencing one document type, prefer the standard
|
||||
you will only be referencing one document type, prefer the standard
|
||||
:class:`~mongoengine.ReferenceField`.
|
||||
|
||||
Uniqueness constraints
|
||||
@ -229,7 +296,7 @@ Uniqueness constraints
|
||||
MongoEngine allows you to specify that a field should be unique across a
|
||||
collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's
|
||||
constructor. If you try to save a document that has the same value for a unique
|
||||
field as a document that is already in the database, a
|
||||
field as a document that is already in the database, a
|
||||
:class:`~mongoengine.OperationError` will be raised. You may also specify
|
||||
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
|
||||
either a single field name, or a list or tuple of field names::
|
||||
@ -241,14 +308,14 @@ either a single field name, or a list or tuple of field names::
|
||||
|
||||
Skipping Document validation on save
|
||||
------------------------------------
|
||||
You can also skip the whole document validation process by setting
|
||||
``validate=False`` when caling the :meth:`~mongoengine.document.Document.save`
|
||||
You can also skip the whole document validation process by setting
|
||||
``validate=False`` when caling the :meth:`~mongoengine.document.Document.save`
|
||||
method::
|
||||
|
||||
class Recipient(Document):
|
||||
name = StringField()
|
||||
email = EmailField()
|
||||
|
||||
|
||||
recipient = Recipient(name='admin', email='root@localhost')
|
||||
recipient.save() # will raise a ValidationError while
|
||||
recipient.save(validate=False) # won't
|
||||
@ -276,7 +343,7 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
|
||||
stored in the collection, and :attr:`max_size` is the maximum size of the
|
||||
collection in bytes. If :attr:`max_size` is not specified and
|
||||
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
|
||||
The following example shows a :class:`Log` document that will be limited to
|
||||
The following example shows a :class:`Log` document that will be limited to
|
||||
1000 entries and 2MB of disk space::
|
||||
|
||||
class Log(Document):
|
||||
@ -288,9 +355,10 @@ Indexes
|
||||
You can specify indexes on collections to make querying faster. This is done
|
||||
by creating a list of index specifications called :attr:`indexes` in the
|
||||
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
||||
either be a single field name, or a tuple containing multiple field names. A
|
||||
direction may be specified on fields by prefixing the field name with a **+**
|
||||
or a **-** sign. Note that direction only matters on multi-field indexes. ::
|
||||
either be a single field name, a tuple containing multiple field names, or a
|
||||
dictionary containing a full index definition. A direction may be specified on
|
||||
fields by prefixing the field name with a **+** or a **-** sign. Note that
|
||||
direction only matters on multi-field indexes. ::
|
||||
|
||||
class Page(Document):
|
||||
title = StringField()
|
||||
@ -299,10 +367,26 @@ or a **-** sign. Note that direction only matters on multi-field indexes. ::
|
||||
'indexes': ['title', ('title', '-rating')]
|
||||
}
|
||||
|
||||
If a dictionary is passed then the following options are available:
|
||||
|
||||
:attr:`fields` (Default: None)
|
||||
The fields to index. Specified in the same format as described above.
|
||||
|
||||
:attr:`types` (Default: True)
|
||||
Whether the index should have the :attr:`_types` field added automatically
|
||||
to the start of the index.
|
||||
|
||||
:attr:`sparse` (Default: False)
|
||||
Whether the index should be sparse.
|
||||
|
||||
:attr:`unique` (Default: False)
|
||||
Whether the index should be sparse.
|
||||
|
||||
.. note::
|
||||
Geospatial indexes will be automatically created for all
|
||||
|
||||
Geospatial indexes will be automatically created for all
|
||||
:class:`~mongoengine.GeoPointField`\ s
|
||||
|
||||
|
||||
Ordering
|
||||
========
|
||||
A default ordering can be specified for your
|
||||
@ -324,7 +408,7 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
||||
blog_post_1 = BlogPost(title="Blog Post #1")
|
||||
blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0)
|
||||
|
||||
blog_post_2 = BlogPost(title="Blog Post #2")
|
||||
blog_post_2 = BlogPost(title="Blog Post #2")
|
||||
blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0)
|
||||
|
||||
blog_post_3 = BlogPost(title="Blog Post #3")
|
||||
@ -336,7 +420,7 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
||||
|
||||
# get the "first" BlogPost using default ordering
|
||||
# from BlogPost.meta.ordering
|
||||
latest_post = BlogPost.objects.first()
|
||||
latest_post = BlogPost.objects.first()
|
||||
assert latest_post.title == "Blog Post #3"
|
||||
|
||||
# override default ordering, order BlogPosts by "published_date"
|
||||
@ -365,7 +449,7 @@ Working with existing data
|
||||
To enable correct retrieval of documents involved in this kind of heirarchy,
|
||||
two extra attributes are stored on each document in the database: :attr:`_cls`
|
||||
and :attr:`_types`. These are hidden from the user through the MongoEngine
|
||||
interface, but may not be present if you are trying to use MongoEngine with
|
||||
interface, but may not be present if you are trying to use MongoEngine with
|
||||
an existing database. For this reason, you may disable this inheritance
|
||||
mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling
|
||||
you to work with existing databases. To disable inheritance on a document
|
||||
|
@ -4,12 +4,12 @@ Documents instances
|
||||
To create a new document object, create an instance of the relevant document
|
||||
class, providing values for its fields as its constructor keyword arguments.
|
||||
You may provide values for any of the fields on the document::
|
||||
|
||||
|
||||
>>> page = Page(title="Test Page")
|
||||
>>> page.title
|
||||
'Test Page'
|
||||
|
||||
You may also assign values to the document's fields using standard object
|
||||
You may also assign values to the document's fields using standard object
|
||||
attribute syntax::
|
||||
|
||||
>>> page.title = "Example Page"
|
||||
@ -18,10 +18,22 @@ attribute syntax::
|
||||
|
||||
Saving and deleting documents
|
||||
=============================
|
||||
To save the document to the database, call the
|
||||
:meth:`~mongoengine.Document.save` method. If the document does not exist in
|
||||
the database, it will be created. If it does already exist, it will be
|
||||
updated.
|
||||
MongoEngine tracks changes to documents to provide efficient saving. To save
|
||||
the document to the database, call the :meth:`~mongoengine.Document.save` method.
|
||||
If the document does not exist in the database, it will be created. If it does
|
||||
already exist, then any changes will be updated atomically. For example::
|
||||
|
||||
>>> page = Page(title="Test Page")
|
||||
>>> page.save() # Performs an insert
|
||||
>>> page.title = "My Page"
|
||||
>>> page.save() # Performs an atomic set on the title field.
|
||||
|
||||
.. note::
|
||||
|
||||
Changes to documents are tracked and on the whole perform `set` operations.
|
||||
|
||||
* ``list_field.pop(0)`` - *sets* the resulting list
|
||||
* ``del(list_field)`` - *unsets* whole list
|
||||
|
||||
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
||||
Note that this will only work if the document exists in the database and has a
|
||||
@ -67,6 +79,7 @@ is an alias to :attr:`id`::
|
||||
>>> page.id == page.pk
|
||||
|
||||
.. note::
|
||||
|
||||
If you define your own primary key field, the field implicitly becomes
|
||||
required, so a :class:`ValidationError` will be thrown if you don't provide
|
||||
it.
|
||||
|
@ -66,6 +66,7 @@ Deleting stored files is achieved with the :func:`delete` method::
|
||||
marmot.photo.delete()
|
||||
|
||||
.. note::
|
||||
|
||||
The FileField in a Document actually only stores the ID of a file in a
|
||||
separate GridFS collection. This means that deleting a document
|
||||
with a defined FileField does not actually delete the file. You must be
|
||||
|
@ -11,3 +11,4 @@ User Guide
|
||||
document-instances
|
||||
querying
|
||||
gridfs
|
||||
signals
|
||||
|
@ -1,31 +1,31 @@
|
||||
======================
|
||||
Installing MongoEngine
|
||||
======================
|
||||
|
||||
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
|
||||
and ensure it is running in an accessible location. You will also need
|
||||
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
||||
install MongoEngine using setuptools, then the dependencies will be handled for
|
||||
you.
|
||||
|
||||
MongoEngine is available on PyPI, so to use it you can use
|
||||
:program:`easy_install`:
|
||||
|
||||
MongoEngine is available on PyPI, so to use it you can use :program:`pip`:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# easy_install mongoengine
|
||||
$ pip install mongoengine
|
||||
|
||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# python setup.py install
|
||||
$ python setup.py install
|
||||
|
||||
To use the bleeding-edge version of MongoEngine, you can get the source from
|
||||
`GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# git clone git://github.com/hmarr/mongoengine
|
||||
# cd mongoengine
|
||||
# python setup.py install
|
||||
$ git clone git://github.com/hmarr/mongoengine
|
||||
$ cd mongoengine
|
||||
$ python setup.py install
|
||||
|
@ -5,8 +5,8 @@ Querying the database
|
||||
is used for accessing the objects in the database associated with the class.
|
||||
The :attr:`objects` attribute is actually a
|
||||
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
|
||||
a new :class:`~mongoengine.queryset.QuerySet` object on access. The
|
||||
:class:`~mongoengine.queryset.QuerySet` object may may be iterated over to
|
||||
:class:`~mongoengine.queryset.QuerySet` object on access. The
|
||||
:class:`~mongoengine.queryset.QuerySet` object may be iterated over to
|
||||
fetch documents from the database::
|
||||
|
||||
# Prints out the names of all the users in the database
|
||||
@ -14,6 +14,7 @@ fetch documents from the database::
|
||||
print user.name
|
||||
|
||||
.. note::
|
||||
|
||||
Once the iteration finishes (when :class:`StopIteration` is raised),
|
||||
:meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the
|
||||
:class:`~mongoengine.queryset.QuerySet` may be iterated over again. The
|
||||
@ -23,7 +24,7 @@ fetch documents from the database::
|
||||
Filtering queries
|
||||
=================
|
||||
The query may be filtered by calling the
|
||||
:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword
|
||||
:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword
|
||||
arguments. The keys in the keyword arguments correspond to fields on the
|
||||
:class:`~mongoengine.Document` you are querying::
|
||||
|
||||
@ -39,29 +40,6 @@ syntax::
|
||||
# been written by a user whose 'country' field is set to 'uk'
|
||||
uk_pages = Page.objects(author__country='uk')
|
||||
|
||||
Querying lists
|
||||
--------------
|
||||
On most fields, this syntax will look up documents where the field specified
|
||||
matches the given value exactly, but when the field refers to a
|
||||
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
||||
lists that contain that item will be matched::
|
||||
|
||||
class Page(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
# This will match all pages that have the word 'coding' as an item in the
|
||||
# 'tags' list
|
||||
Page.objects(tags='coding')
|
||||
|
||||
Raw queries
|
||||
-----------
|
||||
It is possible to provide a raw PyMongo query as a query parameter, which will
|
||||
be integrated directly into the query. This is done using the ``__raw__``
|
||||
keyword argument::
|
||||
|
||||
Page.objects(__raw__={'tags': 'coding'})
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Query operators
|
||||
===============
|
||||
@ -84,7 +62,7 @@ Available operators are as follows:
|
||||
* ``nin`` -- value is not in list (a list of values should be provided)
|
||||
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
|
||||
* ``all`` -- every item in list of values provided is in array
|
||||
* ``size`` -- the size of the array is
|
||||
* ``size`` -- the size of the array is
|
||||
* ``exists`` -- value for field exists
|
||||
|
||||
The following operators are available as shortcuts to querying with regular
|
||||
@ -99,26 +77,67 @@ expressions:
|
||||
* ``endswith`` -- string field ends with value
|
||||
* ``iendswith`` -- string field ends with value (case insensitive)
|
||||
|
||||
.. versionadded:: 0.3
|
||||
|
||||
There are a few special operators for performing geographical queries, that
|
||||
may used with :class:`~mongoengine.GeoPointField`\ s:
|
||||
|
||||
* ``within_distance`` -- provide a list containing a point and a maximum
|
||||
distance (e.g. [(41.342, -87.653), 5])
|
||||
* ``within_spherical_distance`` -- Same as above but using the spherical geo model
|
||||
(e.g. [(41.342, -87.653), 5/earth_radius])
|
||||
* ``near`` -- order the documents by how close they are to a given point
|
||||
* ``near_sphere`` -- Same as above but using the spherical geo model
|
||||
* ``within_box`` -- filter documents to those within a given bounding box (e.g.
|
||||
[(35.0, -125.0), (40.0, -100.0)])
|
||||
* ``near`` -- order the documents by how close they are to a given point
|
||||
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
|
||||
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
|
||||
.. note:: Requires Mongo Server 2.0
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Querying by position
|
||||
====================
|
||||
Querying lists
|
||||
--------------
|
||||
On most fields, this syntax will look up documents where the field specified
|
||||
matches the given value exactly, but when the field refers to a
|
||||
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
||||
lists that contain that item will be matched::
|
||||
|
||||
class Page(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
# This will match all pages that have the word 'coding' as an item in the
|
||||
# 'tags' list
|
||||
Page.objects(tags='coding')
|
||||
|
||||
It is possible to query by position in a list by using a numerical value as a
|
||||
query operator. So if you wanted to find all pages whose first tag was ``db``,
|
||||
you could use the following query::
|
||||
|
||||
BlogPost.objects(tags__0='db')
|
||||
Page.objects(tags__0='db')
|
||||
|
||||
If you only want to fetch part of a list eg: you want to paginate a list, then
|
||||
the `slice` operator is required::
|
||||
|
||||
# comments - skip 5, limit 10
|
||||
Page.objects.fields(slice__comments=[5, 10])
|
||||
|
||||
For updating documents, if you don't know the position in a list, you can use
|
||||
the $ positional operator ::
|
||||
|
||||
Post.objects(comments__by="joe").update(**{'inc__comments__$__votes': 1})
|
||||
|
||||
However, this doesn't map well to the syntax so you can also use a capital S instead ::
|
||||
|
||||
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
||||
|
||||
.. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query.
|
||||
|
||||
|
||||
Raw queries
|
||||
-----------
|
||||
It is possible to provide a raw PyMongo query as a query parameter, which will
|
||||
be integrated directly into the query. This is done using the ``__raw__``
|
||||
keyword argument::
|
||||
|
||||
Page.objects(__raw__={'tags': 'coding'})
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
@ -163,9 +182,9 @@ To retrieve a result that should be unique in the collection, use
|
||||
and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one
|
||||
document matched the query.
|
||||
|
||||
A variation of this method exists,
|
||||
A variation of this method exists,
|
||||
:meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new
|
||||
document with the query arguments if no documents match the query. An
|
||||
document with the query arguments if no documents match the query. An
|
||||
additional keyword argument, :attr:`defaults` may be provided, which will be
|
||||
used as default values for the new document, in the case that it should need
|
||||
to be created::
|
||||
@ -175,6 +194,22 @@ to be created::
|
||||
>>> a.name == b.name and a.age == b.age
|
||||
True
|
||||
|
||||
Dereferencing results
|
||||
---------------------
|
||||
When iterating the results of :class:`~mongoengine.ListField` or
|
||||
:class:`~mongoengine.DictField` we automatically dereference any
|
||||
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
||||
number the queries to mongo.
|
||||
|
||||
There are times when that efficiency is not enough, documents that have
|
||||
:class:`~mongoengine.ReferenceField` objects or
|
||||
:class:`~mongoengine.GenericReferenceField` objects at the top level are
|
||||
expensive as the number of queries to MongoDB can quickly rise.
|
||||
|
||||
To limit the number of queries use
|
||||
:func:`~mongoengine.queryset.QuerySet.select_related` which converts the
|
||||
QuerySet to a list and dereferences as efficiently as possible.
|
||||
|
||||
Default Document queries
|
||||
========================
|
||||
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
|
||||
@ -240,7 +275,7 @@ Javascript code that is executed on the database server.
|
||||
Counting results
|
||||
----------------
|
||||
Just as with limiting and skipping results, there is a method on
|
||||
:class:`~mongoengine.queryset.QuerySet` objects --
|
||||
:class:`~mongoengine.queryset.QuerySet` objects --
|
||||
:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic
|
||||
way of achieving this::
|
||||
|
||||
@ -254,6 +289,7 @@ You may sum over the values of a specific field on documents using
|
||||
yearly_expense = Employee.objects.sum('salary')
|
||||
|
||||
.. note::
|
||||
|
||||
If the field isn't present on a document, that document will be ignored from
|
||||
the sum.
|
||||
|
||||
@ -302,6 +338,11 @@ will be given::
|
||||
>>> f.rating # default value
|
||||
3
|
||||
|
||||
.. note::
|
||||
|
||||
The :meth:`~mongoengine.queryset.QuerySet.exclude` is the opposite of
|
||||
:meth:`~mongoengine.queryset.QuerySet.only` if you want to exclude a field.
|
||||
|
||||
If you later need the missing fields, just call
|
||||
:meth:`~mongoengine.Document.reload` on your document.
|
||||
|
||||
@ -309,11 +350,11 @@ Advanced queries
|
||||
================
|
||||
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
||||
arguments can't fully express the query you want to use -- for example if you
|
||||
need to combine a number of constraints using *and* and *or*. This is made
|
||||
need to combine a number of constraints using *and* and *or*. This is made
|
||||
possible in MongoEngine through the :class:`~mongoengine.queryset.Q` class.
|
||||
A :class:`~mongoengine.queryset.Q` object represents part of a query, and
|
||||
can be initialised using the same keyword-argument syntax you use to query
|
||||
documents. To build a complex query, you may combine
|
||||
documents. To build a complex query, you may combine
|
||||
:class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or)
|
||||
operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the
|
||||
first positional argument to :attr:`Document.objects` when you filter it by
|
||||
@ -325,11 +366,66 @@ calling it with keyword arguments::
|
||||
# Get top posts
|
||||
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
|
||||
|
||||
.. warning::
|
||||
Only use these advanced queries if absolutely necessary as they will execute
|
||||
significantly slower than regular queries. This is because they are not
|
||||
natively supported by MongoDB -- they are compiled to Javascript and sent
|
||||
to the server for execution.
|
||||
.. _guide-atomic-updates:
|
||||
|
||||
Atomic updates
|
||||
==============
|
||||
Documents may be updated atomically by using the
|
||||
:meth:`~mongoengine.queryset.QuerySet.update_one` and
|
||||
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
|
||||
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
||||
that you may use with these methods:
|
||||
|
||||
* ``set`` -- set a particular value
|
||||
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
||||
* ``inc`` -- increment a value by a given amount
|
||||
* ``dec`` -- decrement a value by a given amount
|
||||
* ``pop`` -- remove the last item from a list
|
||||
* ``push`` -- append a value to a list
|
||||
* ``push_all`` -- append several values to a list
|
||||
* ``pop`` -- remove the first or last element of a list
|
||||
* ``pull`` -- remove a value from a list
|
||||
* ``pull_all`` -- remove several values from a list
|
||||
* ``add_to_set`` -- add value to a list only if its not in the list already
|
||||
|
||||
The syntax for atomic updates is similar to the querying syntax, but the
|
||||
modifier comes before the field, not after it::
|
||||
|
||||
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
|
||||
>>> post.save()
|
||||
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
|
||||
>>> post.reload() # the document has been changed, so we need to reload it
|
||||
>>> post.page_views
|
||||
1
|
||||
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
|
||||
>>> post.reload()
|
||||
>>> post.title
|
||||
'Example Post'
|
||||
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
|
||||
>>> post.reload()
|
||||
>>> post.tags
|
||||
['database', 'nosql']
|
||||
|
||||
.. note ::
|
||||
|
||||
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
|
||||
on changed documents by tracking changes to that document.
|
||||
|
||||
The positional operator allows you to update list items without knowing the
|
||||
index position, therefore making the update a single atomic operation. As we
|
||||
cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
|
||||
|
||||
>>> post = BlogPost(title='Test', page_views=0, tags=['database', 'mongo'])
|
||||
>>> post.save()
|
||||
>>> BlogPost.objects(id=post.id, tags='mongo').update(set__tags__S='mongodb')
|
||||
>>> post.reload()
|
||||
>>> post.tags
|
||||
['database', 'mongodb']
|
||||
|
||||
.. note ::
|
||||
Currently only top level lists are handled, future versions of mongodb /
|
||||
pymongo plan to support nested positional operators. See `The $ positional
|
||||
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
||||
|
||||
Server-side javascript execution
|
||||
================================
|
||||
@ -433,43 +529,3 @@ following example shows how the substitutions are made::
|
||||
return comments;
|
||||
}
|
||||
""")
|
||||
|
||||
.. _guide-atomic-updates:
|
||||
|
||||
Atomic updates
|
||||
==============
|
||||
Documents may be updated atomically by using the
|
||||
:meth:`~mongoengine.queryset.QuerySet.update_one` and
|
||||
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
|
||||
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
||||
that you may use with these methods:
|
||||
|
||||
* ``set`` -- set a particular value
|
||||
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
||||
* ``inc`` -- increment a value by a given amount
|
||||
* ``dec`` -- decrement a value by a given amount
|
||||
* ``pop`` -- remove the last item from a list
|
||||
* ``push`` -- append a value to a list
|
||||
* ``push_all`` -- append several values to a list
|
||||
* ``pop`` -- remove the first or last element of a list
|
||||
* ``pull`` -- remove a value from a list
|
||||
* ``pull_all`` -- remove several values from a list
|
||||
* ``add_to_set`` -- add value to a list only if its not in the list already
|
||||
|
||||
The syntax for atomic updates is similar to the querying syntax, but the
|
||||
modifier comes before the field, not after it::
|
||||
|
||||
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
|
||||
>>> post.save()
|
||||
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
|
||||
>>> post.reload() # the document has been changed, so we need to reload it
|
||||
>>> post.page_views
|
||||
1
|
||||
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
|
||||
>>> post.reload()
|
||||
>>> post.title
|
||||
'Example Post'
|
||||
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
|
||||
>>> post.reload()
|
||||
>>> post.tags
|
||||
['database', 'nosql']
|
||||
|
49
docs/guide/signals.rst
Normal file
49
docs/guide/signals.rst
Normal file
@ -0,0 +1,49 @@
|
||||
.. _signals:
|
||||
|
||||
Signals
|
||||
=======
|
||||
|
||||
.. versionadded:: 0.5
|
||||
|
||||
Signal support is provided by the excellent `blinker`_ library and
|
||||
will gracefully fall back if it is not available.
|
||||
|
||||
|
||||
The following document signals exist in MongoEngine and are pretty self explaintary:
|
||||
|
||||
* `mongoengine.signals.pre_init`
|
||||
* `mongoengine.signals.post_init`
|
||||
* `mongoengine.signals.pre_save`
|
||||
* `mongoengine.signals.post_save`
|
||||
* `mongoengine.signals.pre_delete`
|
||||
* `mongoengine.signals.post_delete`
|
||||
|
||||
Example usage::
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine import signals
|
||||
|
||||
class Author(Document):
|
||||
name = StringField()
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def pre_save(cls, sender, document, **kwargs):
|
||||
logging.debug("Pre Save: %s" % document.name)
|
||||
|
||||
@classmethod
|
||||
def post_save(cls, sender, document, **kwargs):
|
||||
logging.debug("Post Save: %s" % document.name)
|
||||
if 'created' in kwargs:
|
||||
if kwargs['created']:
|
||||
logging.debug("Created")
|
||||
else:
|
||||
logging.debug("Updated")
|
||||
|
||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
|
||||
|
||||
.. _blinker: http://pypi.python.org/pypi/blinker
|
@ -2,34 +2,62 @@
|
||||
MongoEngine User Documentation
|
||||
==============================
|
||||
|
||||
MongoEngine is an Object-Document Mapper, written in Python for working with
|
||||
**MongoEngine** is an Object-Document Mapper, written in Python for working with
|
||||
MongoDB. To install it, simply run
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# pip install -U mongoengine
|
||||
|
||||
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_.
|
||||
:doc:`tutorial`
|
||||
Start here for a quick overview.
|
||||
|
||||
:doc:`guide/index`
|
||||
The Full guide to MongoEngine
|
||||
|
||||
:doc:`apireference`
|
||||
The complete API documentation.
|
||||
|
||||
:doc:`django`
|
||||
Using MongoEngine and Django
|
||||
|
||||
Community
|
||||
---------
|
||||
|
||||
To get help with using MongoEngine, use the `MongoEngine Users mailing list
|
||||
<http://groups.google.com/group/mongoengine-users>`_ or come chat on the
|
||||
`#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_.
|
||||
|
||||
If you are interested in contributing, join the developers' `mailing list
|
||||
Contributing
|
||||
------------
|
||||
|
||||
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and
|
||||
contributions are always encouraged. Contributions can be as simple as
|
||||
minor tweaks to this documentation. To contribute, fork the project on
|
||||
`GitHub <http://github.com/hmarr/mongoengine>`_ and send a
|
||||
pull request.
|
||||
|
||||
Also, you can join the developers' `mailing list
|
||||
<http://groups.google.com/group/mongoengine-dev>`_.
|
||||
|
||||
Changes
|
||||
-------
|
||||
See the :doc:`changelog` for a full list of changes to MongoEngine.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
|
||||
tutorial
|
||||
guide/index
|
||||
apireference
|
||||
django
|
||||
changelog
|
||||
upgrade
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
------------------
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
||||
|
@ -22,7 +22,7 @@ function. The only argument we need to provide is the name of the MongoDB
|
||||
database to use::
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
|
||||
connect('tumblelog')
|
||||
|
||||
For more information about connecting to MongoDB see :ref:`guide-connecting`.
|
||||
@ -112,7 +112,7 @@ link table, we can just store a list of tags in each post. So, for both
|
||||
efficiency and simplicity's sake, we'll store the tags as strings directly
|
||||
within the post, rather than storing references to tags in a separate
|
||||
collection. Especially as tags are generally very short (often even shorter
|
||||
than a document's id), this denormalisation won't impact very strongly on the
|
||||
than a document's id), this denormalisation won't impact very strongly on the
|
||||
size of our database. So let's take a look that the code our modified
|
||||
:class:`Post` class::
|
||||
|
||||
@ -152,6 +152,21 @@ We can then store a list of comment documents in our post document::
|
||||
tags = ListField(StringField(max_length=30))
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
Handling deletions of references
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The :class:`~mongoengine.ReferenceField` object takes a keyword
|
||||
`reverse_delete_rule` for handling deletion rules if the reference is deleted.
|
||||
To delete all the posts if a user is deleted set the rule::
|
||||
|
||||
class Post(Document):
|
||||
title = StringField(max_length=120, required=True)
|
||||
author = ReferenceField(User, reverse_delete_rule=CASCADE)
|
||||
tags = ListField(StringField(max_length=30))
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
See :class:`~mongoengine.ReferenceField` for more information.
|
||||
|
||||
Adding data to our Tumblelog
|
||||
============================
|
||||
Now that we've defined how our documents will be structured, let's start adding
|
||||
@ -250,5 +265,5 @@ the first matched by the query you provide. Aggregation functions may also be
|
||||
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
||||
|
||||
num_posts = Post.objects(tags='mongodb').count()
|
||||
print 'Found % posts with tag "mongodb"' % num_posts
|
||||
|
||||
print 'Found %d posts with tag "mongodb"' % num_posts
|
||||
|
||||
|
97
docs/upgrade.rst
Normal file
97
docs/upgrade.rst
Normal file
@ -0,0 +1,97 @@
|
||||
=========
|
||||
Upgrading
|
||||
=========
|
||||
|
||||
0.4 to 0.5
|
||||
===========
|
||||
|
||||
There have been the following backwards incompatibilities from 0.4 to 0.5. The
|
||||
main areas of changed are: choices in fields, map_reduce and collection names.
|
||||
|
||||
Choice options:
|
||||
--------------
|
||||
|
||||
Are now expected to be an iterable of tuples, with the first element in each
|
||||
tuple being the actual value to be stored. The second element is the
|
||||
human-readable name for the option.
|
||||
|
||||
|
||||
PyMongo / MongoDB
|
||||
-----------------
|
||||
|
||||
map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output
|
||||
parameters, have been depreciated.
|
||||
|
||||
More methods now use map_reduce as db.eval is not supported for sharding as such
|
||||
the following have been changed:
|
||||
|
||||
* :meth:`~mongoengine.queryset.QuerySet.sum`
|
||||
* :meth:`~mongoengine.queryset.QuerySet.average`
|
||||
* :meth:`~mongoengine.queryset.QuerySet.item_frequencies`
|
||||
|
||||
|
||||
Default collection naming
|
||||
-------------------------
|
||||
|
||||
Previously it was just lowercase, its now much more pythonic and readable as its
|
||||
lowercase and underscores, previously ::
|
||||
|
||||
class MyAceDocument(Document):
|
||||
pass
|
||||
|
||||
MyAceDocument._meta['collection'] == myacedocument
|
||||
|
||||
In 0.5 this will change to ::
|
||||
|
||||
class MyAceDocument(Document):
|
||||
pass
|
||||
|
||||
MyAceDocument._get_collection_name() == my_ace_document
|
||||
|
||||
To upgrade use a Mixin class to set meta like so ::
|
||||
|
||||
class BaseMixin(object):
|
||||
meta = {
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
|
||||
class MyAceDocument(Document, BaseMixin):
|
||||
pass
|
||||
|
||||
MyAceDocument._get_collection_name() == myacedocument
|
||||
|
||||
Alternatively, you can rename your collections eg ::
|
||||
|
||||
from mongoengine.connection import _get_db
|
||||
from mongoengine.base import _document_registry
|
||||
|
||||
def rename_collections():
|
||||
db = _get_db()
|
||||
|
||||
failure = False
|
||||
|
||||
collection_names = [d._get_collection_name() for d in _document_registry.values()]
|
||||
|
||||
for new_style_name in collection_names:
|
||||
if not new_style_name: # embedded documents don't have collections
|
||||
continue
|
||||
old_style_name = new_style_name.replace('_', '')
|
||||
|
||||
if old_style_name == new_style_name:
|
||||
continue # Nothing to do
|
||||
|
||||
existing = db.collection_names()
|
||||
if old_style_name in existing:
|
||||
if new_style_name in existing:
|
||||
failure = True
|
||||
print "FAILED to rename: %s to %s (already exists)" % (
|
||||
old_style_name, new_style_name)
|
||||
else:
|
||||
db[old_style_name].rename(new_style_name)
|
||||
print "Renamed: %s to %s" % (old_style_name, new_style_name)
|
||||
|
||||
if failure:
|
||||
print "Upgrading collection names failed"
|
||||
else:
|
||||
print "Upgraded collection names"
|
||||
|
@ -6,13 +6,16 @@ import connection
|
||||
from connection import *
|
||||
import queryset
|
||||
from queryset import *
|
||||
import signals
|
||||
from signals import *
|
||||
|
||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||
queryset.__all__)
|
||||
queryset.__all__ + signals.__all__)
|
||||
|
||||
__author__ = 'Harry Marr'
|
||||
|
||||
VERSION = (0, 4, 0)
|
||||
VERSION = (0, 4, 1)
|
||||
|
||||
|
||||
def get_version():
|
||||
version = '%s.%s' % (VERSION[0], VERSION[1])
|
||||
@ -21,4 +24,3 @@ def get_version():
|
||||
return version
|
||||
|
||||
__version__ = get_version()
|
||||
|
||||
|
@ -1,33 +1,68 @@
|
||||
from queryset import QuerySet, QuerySetManager
|
||||
from queryset import DoesNotExist, MultipleObjectsReturned
|
||||
from queryset import DO_NOTHING
|
||||
|
||||
from mongoengine import signals
|
||||
|
||||
import weakref
|
||||
import sys
|
||||
import pymongo
|
||||
import pymongo.objectid
|
||||
import operator
|
||||
from functools import partial
|
||||
|
||||
|
||||
_document_registry = {}
|
||||
class NotRegistered(Exception):
|
||||
pass
|
||||
|
||||
def get_document(name):
|
||||
return _document_registry[name]
|
||||
|
||||
class InvalidDocumentError(Exception):
|
||||
pass
|
||||
|
||||
class ValidationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
_document_registry = {}
|
||||
|
||||
|
||||
def get_document(name):
|
||||
doc = _document_registry.get(name, None)
|
||||
if not doc:
|
||||
# Possible old style names
|
||||
end = ".%s" % name
|
||||
possible_match = [k for k in _document_registry.keys() if k.endswith(end)]
|
||||
if len(possible_match) == 1:
|
||||
doc = _document_registry.get(possible_match.pop(), None)
|
||||
if not doc:
|
||||
raise NotRegistered("""
|
||||
`%s` has not been registered in the document registry.
|
||||
Importing the document class automatically registers it, has it
|
||||
been imported?
|
||||
""".strip() % name)
|
||||
return doc
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
"""A base class for fields in a MongoDB document. Instances of this class
|
||||
may be added to subclasses of `Document` to define a document's schema.
|
||||
|
||||
.. versionchanged:: 0.5 - added verbose and help text
|
||||
"""
|
||||
|
||||
# Fields may have _types inserted into indexes by default
|
||||
# Fields may have _types inserted into indexes by default
|
||||
_index_with_types = True
|
||||
_geo_index = False
|
||||
|
||||
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||
# These track each time a Field instance is created. Used to retain order.
|
||||
# The auto_creation_counter is used for fields that MongoEngine implicitly
|
||||
# creates, creation_counter is used for all user-specified fields.
|
||||
creation_counter = 0
|
||||
auto_creation_counter = -1
|
||||
|
||||
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||
unique=False, unique_with=None, primary_key=False,
|
||||
validation=None, choices=None):
|
||||
validation=None, choices=None, verbose_name=None, help_text=None):
|
||||
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||
if name:
|
||||
import warnings
|
||||
@ -41,9 +76,19 @@ class BaseField(object):
|
||||
self.primary_key = primary_key
|
||||
self.validation = validation
|
||||
self.choices = choices
|
||||
self.verbose_name = verbose_name
|
||||
self.help_text = help_text
|
||||
|
||||
# Adjust the appropriate creation counter, and save our local copy.
|
||||
if self.db_field == '_id':
|
||||
self.creation_counter = BaseField.auto_creation_counter
|
||||
BaseField.auto_creation_counter -= 1
|
||||
else:
|
||||
self.creation_counter = BaseField.creation_counter
|
||||
BaseField.creation_counter += 1
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
"""Descriptor for retrieving a value from a field in a document. Do
|
||||
"""Descriptor for retrieving a value from a field in a document. Do
|
||||
any necessary conversion between Python and MongoDB types.
|
||||
"""
|
||||
if instance is None:
|
||||
@ -57,12 +102,19 @@ class BaseField(object):
|
||||
# Allow callable default values
|
||||
if callable(value):
|
||||
value = value()
|
||||
|
||||
# Convert lists / values so we can watch for any changes on them
|
||||
if isinstance(value, (list, tuple)) and not isinstance(value, BaseList):
|
||||
value = BaseList(value, instance=instance, name=self.name)
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
value = BaseDict(value, instance=instance, name=self.name)
|
||||
return value
|
||||
|
||||
def __set__(self, instance, value):
|
||||
"""Descriptor for assigning a value to a field in a document.
|
||||
"""
|
||||
instance._data[self.name] = value
|
||||
instance._mark_as_changed(self.name)
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert a MongoDB-compatible type to a Python type.
|
||||
@ -87,9 +139,9 @@ class BaseField(object):
|
||||
def _validate(self, value):
|
||||
# check choices
|
||||
if self.choices is not None:
|
||||
if value not in self.choices:
|
||||
raise ValidationError("Value must be one of %s."
|
||||
% unicode(self.choices))
|
||||
option_keys = [option_key for option_key, option_value in self.choices]
|
||||
if value not in option_keys:
|
||||
raise ValidationError("Value must be one of %s." % unicode(option_keys))
|
||||
|
||||
# check validation argument
|
||||
if self.validation is not None:
|
||||
@ -102,13 +154,159 @@ class BaseField(object):
|
||||
|
||||
self.validate(value)
|
||||
|
||||
|
||||
class ComplexBaseField(BaseField):
|
||||
"""Handles complex fields, such as lists / dictionaries.
|
||||
|
||||
Allows for nesting of embedded documents inside complex types.
|
||||
Handles the lazy dereferencing of a queryset by lazily dereferencing all
|
||||
items in a list / dict rather than one at a time.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
field = None
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
"""Descriptor to automatically dereference references.
|
||||
"""
|
||||
if instance is None:
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
|
||||
from dereference import dereference
|
||||
instance._data[self.name] = dereference(
|
||||
instance._data.get(self.name), max_depth=1, instance=instance, name=self.name, get=True
|
||||
)
|
||||
return super(ComplexBaseField, self).__get__(instance, owner)
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert a MongoDB-compatible type to a Python type.
|
||||
"""
|
||||
from mongoengine import Document
|
||||
|
||||
if isinstance(value, basestring):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_python'):
|
||||
return value.to_python()
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
try:
|
||||
is_list = True
|
||||
value = dict([(k,v) for k,v in enumerate(value)])
|
||||
except TypeError: # Not iterable return the value
|
||||
return value
|
||||
|
||||
if self.field:
|
||||
value_dict = dict([(key, self.field.to_python(item)) for key, item in value.items()])
|
||||
else:
|
||||
value_dict = {}
|
||||
for k,v in value.items():
|
||||
if isinstance(v, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if v.pk is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
collection = v._get_collection_name()
|
||||
value_dict[k] = pymongo.dbref.DBRef(collection, v.pk)
|
||||
elif hasattr(v, 'to_python'):
|
||||
value_dict[k] = v.to_python()
|
||||
else:
|
||||
value_dict[k] = self.to_python(v)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
return [v for k,v in sorted(value_dict.items(), key=operator.itemgetter(0))]
|
||||
return value_dict
|
||||
|
||||
def to_mongo(self, value):
|
||||
"""Convert a Python type to a MongoDB-compatible type.
|
||||
"""
|
||||
from mongoengine import Document
|
||||
|
||||
if isinstance(value, basestring):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_mongo'):
|
||||
return value.to_mongo()
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
try:
|
||||
is_list = True
|
||||
value = dict([(k,v) for k,v in enumerate(value)])
|
||||
except TypeError: # Not iterable return the value
|
||||
return value
|
||||
|
||||
if self.field:
|
||||
value_dict = dict([(key, self.field.to_mongo(item)) for key, item in value.items()])
|
||||
else:
|
||||
value_dict = {}
|
||||
for k,v in value.items():
|
||||
if isinstance(v, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if v.pk is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
|
||||
# If its a document that is not inheritable it won't have
|
||||
# _types / _cls data so make it a generic reference allows
|
||||
# us to dereference
|
||||
meta = getattr(v, 'meta', getattr(v, '_meta', {}))
|
||||
if meta and not meta['allow_inheritance'] and not self.field:
|
||||
from fields import GenericReferenceField
|
||||
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||
else:
|
||||
collection = v._get_collection_name()
|
||||
value_dict[k] = pymongo.dbref.DBRef(collection, v.pk)
|
||||
elif hasattr(v, 'to_mongo'):
|
||||
value_dict[k] = v.to_mongo()
|
||||
else:
|
||||
value_dict[k] = self.to_mongo(v)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
return [v for k,v in sorted(value_dict.items(), key=operator.itemgetter(0))]
|
||||
return value_dict
|
||||
|
||||
def validate(self, value):
|
||||
"""If field provided ensure the value is valid.
|
||||
"""
|
||||
if self.field:
|
||||
try:
|
||||
if hasattr(value, 'iteritems'):
|
||||
[self.field.validate(v) for k,v in value.iteritems()]
|
||||
else:
|
||||
[self.field.validate(v) for v in value]
|
||||
except Exception, err:
|
||||
raise ValidationError('Invalid %s item (%s)' % (
|
||||
self.field.__class__.__name__, str(v)))
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return self.to_mongo(value)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
if self.field:
|
||||
return self.field.lookup_member(member_name)
|
||||
return None
|
||||
|
||||
def _set_owner_document(self, owner_document):
|
||||
if self.field:
|
||||
self.field.owner_document = owner_document
|
||||
self._owner_document = owner_document
|
||||
|
||||
def _get_owner_document(self, owner_document):
|
||||
self._owner_document = owner_document
|
||||
|
||||
owner_document = property(_get_owner_document, _set_owner_document)
|
||||
|
||||
|
||||
class ObjectIdField(BaseField):
|
||||
"""An field wrapper around MongoDB's ObjectIds.
|
||||
"""
|
||||
|
||||
def to_python(self, value):
|
||||
return value
|
||||
# return unicode(value)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if not isinstance(value, pymongo.objectid.ObjectId):
|
||||
@ -143,25 +341,30 @@ class DocumentMetaclass(type):
|
||||
class_name = [name]
|
||||
superclasses = {}
|
||||
simple_class = True
|
||||
|
||||
for base in bases:
|
||||
# Include all fields present in superclasses
|
||||
if hasattr(base, '_fields'):
|
||||
doc_fields.update(base._fields)
|
||||
class_name.append(base._class_name)
|
||||
# Get superclasses from superclass
|
||||
superclasses[base._class_name] = base
|
||||
superclasses.update(base._superclasses)
|
||||
else: # Add any mixin fields
|
||||
attrs.update(dict([(k,v) for k,v in base.__dict__.items()
|
||||
if issubclass(v.__class__, BaseField)]))
|
||||
|
||||
if hasattr(base, '_meta'):
|
||||
# Ensure that the Document class may be subclassed -
|
||||
# inheritance may be disabled to remove dependency on
|
||||
if hasattr(base, '_meta') and not base._meta.get('abstract'):
|
||||
# Ensure that the Document class may be subclassed -
|
||||
# inheritance may be disabled to remove dependency on
|
||||
# additional fields _cls and _types
|
||||
class_name.append(base._class_name)
|
||||
if base._meta.get('allow_inheritance', True) == False:
|
||||
raise ValueError('Document %s may not be subclassed' %
|
||||
base.__name__)
|
||||
else:
|
||||
simple_class = False
|
||||
|
||||
doc_class_name = '.'.join(reversed(class_name))
|
||||
meta = attrs.get('_meta', attrs.get('meta', {}))
|
||||
|
||||
if 'allow_inheritance' not in meta:
|
||||
@ -169,12 +372,11 @@ class DocumentMetaclass(type):
|
||||
|
||||
# Only simple classes - direct subclasses of Document - may set
|
||||
# allow_inheritance to False
|
||||
if not simple_class and not meta['allow_inheritance']:
|
||||
if not simple_class and not meta['allow_inheritance'] and not meta['abstract']:
|
||||
raise ValueError('Only direct subclasses of Document may set '
|
||||
'"allow_inheritance" to False')
|
||||
attrs['_meta'] = meta
|
||||
|
||||
attrs['_class_name'] = '.'.join(reversed(class_name))
|
||||
attrs['_class_name'] = doc_class_name
|
||||
attrs['_superclasses'] = superclasses
|
||||
|
||||
# Add the document's fields to the _fields attribute
|
||||
@ -186,26 +388,37 @@ class DocumentMetaclass(type):
|
||||
attr_value.db_field = attr_name
|
||||
doc_fields[attr_name] = attr_value
|
||||
attrs['_fields'] = doc_fields
|
||||
attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items() if k!=v.db_field])
|
||||
attrs['_reverse_db_field_map'] = dict([(v, k) for k, v in attrs['_db_field_map'].items()])
|
||||
|
||||
from mongoengine import Document
|
||||
|
||||
new_class = super_new(cls, name, bases, attrs)
|
||||
for field in new_class._fields.values():
|
||||
field.owner_document = new_class
|
||||
delete_rule = getattr(field, 'reverse_delete_rule', DO_NOTHING)
|
||||
if delete_rule != DO_NOTHING:
|
||||
field.document_type.register_delete_rule(new_class, field.name,
|
||||
delete_rule)
|
||||
|
||||
if field.name and hasattr(Document, field.name):
|
||||
raise InvalidDocumentError("%s is a document method and not a valid field name" % field.name)
|
||||
|
||||
module = attrs.get('__module__')
|
||||
|
||||
base_excs = tuple(base.DoesNotExist for base in bases
|
||||
base_excs = tuple(base.DoesNotExist for base in bases
|
||||
if hasattr(base, 'DoesNotExist')) or (DoesNotExist,)
|
||||
exc = subclass_exception('DoesNotExist', base_excs, module)
|
||||
new_class.add_to_class('DoesNotExist', exc)
|
||||
|
||||
base_excs = tuple(base.MultipleObjectsReturned for base in bases
|
||||
base_excs = tuple(base.MultipleObjectsReturned for base in bases
|
||||
if hasattr(base, 'MultipleObjectsReturned'))
|
||||
base_excs = base_excs or (MultipleObjectsReturned,)
|
||||
exc = subclass_exception('MultipleObjectsReturned', base_excs, module)
|
||||
new_class.add_to_class('MultipleObjectsReturned', exc)
|
||||
|
||||
global _document_registry
|
||||
_document_registry[name] = new_class
|
||||
_document_registry[doc_class_name] = new_class
|
||||
|
||||
return new_class
|
||||
|
||||
@ -220,15 +433,24 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
def __new__(cls, name, bases, attrs):
|
||||
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
||||
# Classes defined in this package are abstract and should not have
|
||||
# Classes defined in this package are abstract and should not have
|
||||
# their own metadata with DB collection, etc.
|
||||
# __metaclass__ is only set on the class with the __metaclass__
|
||||
# __metaclass__ is only set on the class with the __metaclass__
|
||||
# attribute (i.e. it is not set on subclasses). This differentiates
|
||||
# 'real' documents from the 'Document' class
|
||||
if attrs.get('__metaclass__') == TopLevelDocumentMetaclass:
|
||||
#
|
||||
# Also assume a class is abstract if it has abstract set to True in
|
||||
# its meta dictionary. This allows custom Document superclasses.
|
||||
if (attrs.get('__metaclass__') == TopLevelDocumentMetaclass or
|
||||
('meta' in attrs and attrs['meta'].get('abstract', False))):
|
||||
# Make sure no base class was non-abstract
|
||||
non_abstract_bases = [b for b in bases
|
||||
if hasattr(b,'_meta') and not b._meta.get('abstract', False)]
|
||||
if non_abstract_bases:
|
||||
raise ValueError("Abstract document cannot have non-abstract base")
|
||||
return super_new(cls, name, bases, attrs)
|
||||
|
||||
collection = name.lower()
|
||||
collection = ''.join('_%s' % c if c.isupper() else c for c in name).strip('_').lower()
|
||||
|
||||
id_field = None
|
||||
base_indexes = []
|
||||
@ -236,28 +458,45 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
# Subclassed documents inherit collection from superclass
|
||||
for base in bases:
|
||||
if hasattr(base, '_meta') and 'collection' in base._meta:
|
||||
collection = base._meta['collection']
|
||||
|
||||
if hasattr(base, '_meta'):
|
||||
if 'collection' in attrs.get('meta', {}) and not base._meta.get('abstract', False):
|
||||
import warnings
|
||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||
warnings.warn(msg, SyntaxWarning)
|
||||
del(attrs['meta']['collection'])
|
||||
if base._get_collection_name():
|
||||
collection = base._get_collection_name()
|
||||
# Propagate index options.
|
||||
for key in ('index_background', 'index_drop_dups', 'index_opts'):
|
||||
if key in base._meta:
|
||||
base_meta[key] = base._meta[key]
|
||||
if key in base._meta:
|
||||
base_meta[key] = base._meta[key]
|
||||
|
||||
id_field = id_field or base._meta.get('id_field')
|
||||
base_indexes += base._meta.get('indexes', [])
|
||||
# Propagate 'allow_inheritance'
|
||||
if 'allow_inheritance' in base._meta:
|
||||
base_meta['allow_inheritance'] = base._meta['allow_inheritance']
|
||||
if 'queryset_class' in base._meta:
|
||||
base_meta['queryset_class'] = base._meta['queryset_class']
|
||||
try:
|
||||
base_meta['objects'] = base.__getattribute__(base, 'objects')
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
meta = {
|
||||
'abstract': False,
|
||||
'collection': collection,
|
||||
'max_documents': None,
|
||||
'max_size': None,
|
||||
'ordering': [], # default ordering applied at runtime
|
||||
'indexes': [], # indexes to be ensured at runtime
|
||||
'ordering': [], # default ordering applied at runtime
|
||||
'indexes': [], # indexes to be ensured at runtime
|
||||
'id_field': id_field,
|
||||
'index_background': False,
|
||||
'index_drop_dups': False,
|
||||
'index_opts': {},
|
||||
'queryset_class': QuerySet,
|
||||
'delete_rules': {},
|
||||
'allow_inheritance': True
|
||||
}
|
||||
meta.update(base_meta)
|
||||
|
||||
@ -269,14 +508,44 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
# DocumentMetaclass before instantiating CollectionManager object
|
||||
new_class = super_new(cls, name, bases, attrs)
|
||||
|
||||
collection = attrs['_meta'].get('collection', None)
|
||||
if callable(collection):
|
||||
new_class._meta['collection'] = collection(new_class)
|
||||
|
||||
# Provide a default queryset unless one has been manually provided
|
||||
if not hasattr(new_class, 'objects'):
|
||||
new_class.objects = QuerySetManager()
|
||||
manager = attrs.get('objects', meta.get('objects', QuerySetManager()))
|
||||
if hasattr(manager, 'queryset_class'):
|
||||
meta['queryset_class'] = manager.queryset_class
|
||||
new_class.objects = manager
|
||||
|
||||
user_indexes = [QuerySet._build_index_spec(new_class, spec)
|
||||
for spec in meta['indexes']] + base_indexes
|
||||
new_class._meta['indexes'] = user_indexes
|
||||
|
||||
unique_indexes = cls._unique_with_indexes(new_class)
|
||||
new_class._meta['unique_indexes'] = unique_indexes
|
||||
|
||||
for field_name, field in new_class._fields.items():
|
||||
# Check for custom primary key
|
||||
if field.primary_key:
|
||||
current_pk = new_class._meta['id_field']
|
||||
if current_pk and current_pk != field_name:
|
||||
raise ValueError('Cannot override primary key field')
|
||||
|
||||
if not current_pk:
|
||||
new_class._meta['id_field'] = field_name
|
||||
# Make 'Document.id' an alias to the real primary key field
|
||||
new_class.id = field
|
||||
|
||||
if not new_class._meta['id_field']:
|
||||
new_class._meta['id_field'] = 'id'
|
||||
new_class._fields['id'] = ObjectIdField(db_field='_id')
|
||||
new_class.id = new_class._fields['id']
|
||||
|
||||
return new_class
|
||||
|
||||
@classmethod
|
||||
def _unique_with_indexes(cls, new_class, namespace=""):
|
||||
unique_indexes = []
|
||||
for field_name, field in new_class._fields.items():
|
||||
# Generate a list of indexes needed by uniqueness constraints
|
||||
@ -302,52 +571,50 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
unique_fields += unique_with
|
||||
|
||||
# Add the new index to the list
|
||||
index = [(f, pymongo.ASCENDING) for f in unique_fields]
|
||||
index = [("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields]
|
||||
unique_indexes.append(index)
|
||||
|
||||
# Check for custom primary key
|
||||
if field.primary_key:
|
||||
current_pk = new_class._meta['id_field']
|
||||
if current_pk and current_pk != field_name:
|
||||
raise ValueError('Cannot override primary key field')
|
||||
# Grab any embedded document field unique indexes
|
||||
if field.__class__.__name__ == "EmbeddedDocumentField":
|
||||
field_namespace = "%s." % field_name
|
||||
unique_indexes += cls._unique_with_indexes(field.document_type,
|
||||
field_namespace)
|
||||
|
||||
if not current_pk:
|
||||
new_class._meta['id_field'] = field_name
|
||||
# Make 'Document.id' an alias to the real primary key field
|
||||
new_class.id = field
|
||||
|
||||
new_class._meta['unique_indexes'] = unique_indexes
|
||||
|
||||
if not new_class._meta['id_field']:
|
||||
new_class._meta['id_field'] = 'id'
|
||||
new_class._fields['id'] = ObjectIdField(db_field='_id')
|
||||
new_class.id = new_class._fields['id']
|
||||
|
||||
return new_class
|
||||
return unique_indexes
|
||||
|
||||
|
||||
class BaseDocument(object):
|
||||
|
||||
def __init__(self, **values):
|
||||
signals.pre_init.send(self.__class__, document=self, values=values)
|
||||
|
||||
self._data = {}
|
||||
self._initialised = False
|
||||
# Assign default values to instance
|
||||
for attr_name in self._fields.keys():
|
||||
# Use default value if present
|
||||
for attr_name, field in self._fields.items():
|
||||
value = getattr(self, attr_name, None)
|
||||
setattr(self, attr_name, value)
|
||||
|
||||
# Assign initial values to instance
|
||||
for attr_name in values.keys():
|
||||
try:
|
||||
setattr(self, attr_name, values.pop(attr_name))
|
||||
value = values.pop(attr_name)
|
||||
setattr(self, attr_name, value)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Set any get_fieldname_display methods
|
||||
self.__set_field_display()
|
||||
# Flag initialised
|
||||
self._initialised = True
|
||||
signals.post_init.send(self.__class__, document=self)
|
||||
|
||||
def validate(self):
|
||||
"""Ensure that all fields' values are valid and that required fields
|
||||
are present.
|
||||
"""
|
||||
# Get a list of tuples of field names and their current values
|
||||
fields = [(field, getattr(self, name))
|
||||
fields = [(field, getattr(self, name))
|
||||
for name, field in self._fields.items()]
|
||||
|
||||
# Ensure that each field is matched to a valid value
|
||||
@ -356,11 +623,44 @@ class BaseDocument(object):
|
||||
try:
|
||||
field._validate(value)
|
||||
except (ValueError, AttributeError, AssertionError), e:
|
||||
raise ValidationError('Invalid value for field of type "%s": %s'
|
||||
% (field.__class__.__name__, value))
|
||||
raise ValidationError('Invalid value for field named "%s" of type "%s": %s'
|
||||
% (field.name, field.__class__.__name__, value))
|
||||
elif field.required:
|
||||
raise ValidationError('Field "%s" is required' % field.name)
|
||||
|
||||
@apply
|
||||
def pk():
|
||||
"""Primary key alias
|
||||
"""
|
||||
def fget(self):
|
||||
return getattr(self, self._meta['id_field'])
|
||||
def fset(self, value):
|
||||
return setattr(self, self._meta['id_field'], value)
|
||||
return property(fget, fset)
|
||||
|
||||
def to_mongo(self):
|
||||
"""Return data dictionary ready for use with MongoDB.
|
||||
"""
|
||||
data = {}
|
||||
for field_name, field in self._fields.items():
|
||||
value = getattr(self, field_name, None)
|
||||
if value is not None:
|
||||
data[field.db_field] = field.to_mongo(value)
|
||||
# Only add _cls and _types if allow_inheritance is not False
|
||||
if not (hasattr(self, '_meta') and
|
||||
self._meta.get('allow_inheritance', True) == False):
|
||||
data['_cls'] = self._class_name
|
||||
data['_types'] = self._superclasses.keys() + [self._class_name]
|
||||
if '_id' in data and data['_id'] is None:
|
||||
del data['_id']
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def _get_collection_name(cls):
|
||||
"""Returns the collection name for this class.
|
||||
"""
|
||||
return cls._meta.get('collection', None)
|
||||
|
||||
@classmethod
|
||||
def _get_subclasses(cls):
|
||||
"""Return a dictionary of all subclasses (found recursively).
|
||||
@ -376,15 +676,184 @@ class BaseDocument(object):
|
||||
all_subclasses.update(subclass._get_subclasses())
|
||||
return all_subclasses
|
||||
|
||||
@apply
|
||||
def pk():
|
||||
"""Primary key alias
|
||||
@classmethod
|
||||
def _from_son(cls, son):
|
||||
"""Create an instance of a Document (subclass) from a PyMongo SON.
|
||||
"""
|
||||
def fget(self):
|
||||
return getattr(self, self._meta['id_field'])
|
||||
def fset(self, value):
|
||||
return setattr(self, self._meta['id_field'], value)
|
||||
return property(fget, fset)
|
||||
# get the class name from the document, falling back to the given
|
||||
# class if unavailable
|
||||
class_name = son.get(u'_cls', cls._class_name)
|
||||
data = dict((str(key), value) for key, value in son.items())
|
||||
|
||||
if '_types' in data:
|
||||
del data['_types']
|
||||
|
||||
if '_cls' in data:
|
||||
del data['_cls']
|
||||
|
||||
# Return correct subclass for document type
|
||||
if class_name != cls._class_name:
|
||||
subclasses = cls._get_subclasses()
|
||||
if class_name not in subclasses:
|
||||
# Type of document is probably more generic than the class
|
||||
# that has been queried to return this SON
|
||||
raise NotRegistered("""
|
||||
`%s` has not been registered in the document registry.
|
||||
Importing the document class automatically registers it,
|
||||
has it been imported?
|
||||
""".strip() % class_name)
|
||||
cls = subclasses[class_name]
|
||||
|
||||
present_fields = data.keys()
|
||||
for field_name, field in cls._fields.items():
|
||||
if field.db_field in data:
|
||||
value = data[field.db_field]
|
||||
data[field_name] = (value if value is None
|
||||
else field.to_python(value))
|
||||
|
||||
obj = cls(**data)
|
||||
obj._changed_fields = []
|
||||
return obj
|
||||
|
||||
def _mark_as_changed(self, key):
|
||||
"""Marks a key as explicitly changed by the user
|
||||
"""
|
||||
if not key:
|
||||
return
|
||||
key = self._db_field_map.get(key, key)
|
||||
if hasattr(self, '_changed_fields') and key not in self._changed_fields:
|
||||
self._changed_fields.append(key)
|
||||
|
||||
def _get_changed_fields(self, key=''):
|
||||
"""Returns a list of all fields that have explicitly been changed.
|
||||
"""
|
||||
from mongoengine import EmbeddedDocument
|
||||
_changed_fields = []
|
||||
_changed_fields += getattr(self, '_changed_fields', [])
|
||||
for field_name in self._fields:
|
||||
db_field_name = self._db_field_map.get(field_name, field_name)
|
||||
key = '%s.' % db_field_name
|
||||
field = getattr(self, field_name, None)
|
||||
if isinstance(field, EmbeddedDocument) and db_field_name not in _changed_fields: # Grab all embedded fields that have been changed
|
||||
_changed_fields += ["%s%s" % (key, k) for k in field._get_changed_fields(key) if k]
|
||||
elif isinstance(field, (list, tuple, dict)) and db_field_name not in _changed_fields: # Loop list / dict fields as they contain documents
|
||||
# Determine the iterator to use
|
||||
if not hasattr(field, 'items'):
|
||||
iterator = enumerate(field)
|
||||
else:
|
||||
iterator = field.iteritems()
|
||||
for index, value in iterator:
|
||||
if not hasattr(value, '_get_changed_fields'):
|
||||
continue
|
||||
list_key = "%s%s." % (key, index)
|
||||
_changed_fields += ["%s%s" % (list_key, k) for k in value._get_changed_fields(list_key) if k]
|
||||
|
||||
return _changed_fields
|
||||
|
||||
def _delta(self):
|
||||
"""Returns the delta (set, unset) of the changes for a document.
|
||||
Gets any values that have been explicitly changed.
|
||||
"""
|
||||
# Handles cases where not loaded from_son but has _id
|
||||
doc = self.to_mongo()
|
||||
set_fields = self._get_changed_fields()
|
||||
set_data = {}
|
||||
unset_data = {}
|
||||
if hasattr(self, '_changed_fields'):
|
||||
set_data = {}
|
||||
# Fetch each set item from its path
|
||||
for path in set_fields:
|
||||
parts = path.split('.')
|
||||
d = doc
|
||||
for p in parts:
|
||||
if hasattr(d, '__getattr__'):
|
||||
d = getattr(p, d)
|
||||
elif p.isdigit():
|
||||
d = d[int(p)]
|
||||
else:
|
||||
d = d.get(p)
|
||||
set_data[path] = d
|
||||
else:
|
||||
set_data = doc
|
||||
if '_id' in set_data:
|
||||
del(set_data['_id'])
|
||||
|
||||
# Determine if any changed items were actually unset.
|
||||
for path, value in set_data.items():
|
||||
if value:
|
||||
continue
|
||||
|
||||
# If we've set a value that ain't the default value dont unset it.
|
||||
default = None
|
||||
|
||||
if path in self._fields:
|
||||
default = self._fields[path].default
|
||||
else: # Perform a full lookup for lists / embedded lookups
|
||||
d = self
|
||||
parts = path.split('.')
|
||||
db_field_name = parts.pop()
|
||||
for p in parts:
|
||||
if p.isdigit():
|
||||
d = d[int(p)]
|
||||
elif hasattr(d, '__getattribute__') and not isinstance(d, dict):
|
||||
real_path = d._reverse_db_field_map.get(p, p)
|
||||
d = getattr(d, real_path)
|
||||
else:
|
||||
d = d.get(p)
|
||||
|
||||
if hasattr(d, '_fields'):
|
||||
field_name = d._reverse_db_field_map.get(db_field_name,
|
||||
db_field_name)
|
||||
|
||||
default = d._fields[field_name].default
|
||||
|
||||
if default is not None:
|
||||
if callable(default):
|
||||
default = default()
|
||||
if default != value:
|
||||
continue
|
||||
|
||||
del(set_data[path])
|
||||
unset_data[path] = 1
|
||||
return set_data, unset_data
|
||||
|
||||
@classmethod
|
||||
def _geo_indices(cls, inspected_classes=None):
|
||||
inspected_classes = inspected_classes or []
|
||||
geo_indices = []
|
||||
inspected_classes.append(cls)
|
||||
for field in cls._fields.values():
|
||||
if hasattr(field, 'document_type'):
|
||||
field_cls = field.document_type
|
||||
if field_cls in inspected_classes:
|
||||
continue
|
||||
if hasattr(field_cls, '_geo_indices'):
|
||||
geo_indices += field_cls._geo_indices(inspected_classes)
|
||||
elif field._geo_index:
|
||||
geo_indices.append(field)
|
||||
return geo_indices
|
||||
|
||||
def __getstate__(self):
|
||||
self_dict = self.__dict__
|
||||
removals = ["get_%s_display" % k for k,v in self._fields.items() if v.choices]
|
||||
for k in removals:
|
||||
if hasattr(self, k):
|
||||
delattr(self, k)
|
||||
return self.__dict__
|
||||
|
||||
def __setstate__(self, __dict__):
|
||||
self.__dict__ = __dict__
|
||||
self.__set_field_display()
|
||||
|
||||
def __set_field_display(self):
|
||||
for attr_name, field in self._fields.items():
|
||||
if field.choices: # dynamically adds a way to get the display value for a field with choices
|
||||
setattr(self, 'get_%s_display' % attr_name, partial(self.__get_field_display, field=field))
|
||||
|
||||
def __get_field_display(self, field):
|
||||
"""Returns the display value for a choice field"""
|
||||
value = getattr(self, field.name)
|
||||
return dict(field.choices).get(value, value)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._fields)
|
||||
@ -429,60 +898,6 @@ class BaseDocument(object):
|
||||
return unicode(self).encode('utf-8')
|
||||
return '%s object' % self.__class__.__name__
|
||||
|
||||
def to_mongo(self):
|
||||
"""Return data dictionary ready for use with MongoDB.
|
||||
"""
|
||||
data = {}
|
||||
for field_name, field in self._fields.items():
|
||||
value = getattr(self, field_name, None)
|
||||
if value is not None:
|
||||
data[field.db_field] = field.to_mongo(value)
|
||||
# Only add _cls and _types if allow_inheritance is not False
|
||||
if not (hasattr(self, '_meta') and
|
||||
self._meta.get('allow_inheritance', True) == False):
|
||||
data['_cls'] = self._class_name
|
||||
data['_types'] = self._superclasses.keys() + [self._class_name]
|
||||
if data.has_key('_id') and not data['_id']:
|
||||
del data['_id']
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def _from_son(cls, son):
|
||||
"""Create an instance of a Document (subclass) from a PyMongo SON.
|
||||
"""
|
||||
# get the class name from the document, falling back to the given
|
||||
# class if unavailable
|
||||
class_name = son.get(u'_cls', cls._class_name)
|
||||
|
||||
data = dict((str(key), value) for key, value in son.items())
|
||||
|
||||
if '_types' in data:
|
||||
del data['_types']
|
||||
|
||||
if '_cls' in data:
|
||||
del data['_cls']
|
||||
|
||||
# Return correct subclass for document type
|
||||
if class_name != cls._class_name:
|
||||
subclasses = cls._get_subclasses()
|
||||
if class_name not in subclasses:
|
||||
# Type of document is probably more generic than the class
|
||||
# that has been queried to return this SON
|
||||
return None
|
||||
cls = subclasses[class_name]
|
||||
|
||||
present_fields = data.keys()
|
||||
|
||||
for field_name, field in cls._fields.items():
|
||||
if field.db_field in data:
|
||||
value = data[field.db_field]
|
||||
data[field_name] = (value if value is None
|
||||
else field.to_python(value))
|
||||
|
||||
obj = cls(**data)
|
||||
obj._present_fields = present_fields
|
||||
return obj
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__) and hasattr(other, 'id'):
|
||||
if self.id == other.id:
|
||||
@ -493,16 +908,115 @@ class BaseDocument(object):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __hash__(self):
|
||||
""" For list, dic key """
|
||||
if self.pk is None:
|
||||
# For new object
|
||||
return super(BaseDocument,self).__hash__()
|
||||
else:
|
||||
return hash(self.pk)
|
||||
|
||||
|
||||
class BaseList(list):
|
||||
"""A special list so we can watch any changes
|
||||
"""
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
self.instance = instance
|
||||
self.name = name
|
||||
super(BaseList, self).__init__(list_items)
|
||||
|
||||
def __setitem__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
super(BaseList, self).__setitem__(*args, **kwargs)
|
||||
|
||||
def __delitem__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
super(BaseList, self).__delitem__(*args, **kwargs)
|
||||
|
||||
def append(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).append(*args, **kwargs)
|
||||
|
||||
def extend(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).extend(*args, **kwargs)
|
||||
|
||||
def insert(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).insert(*args, **kwargs)
|
||||
|
||||
def pop(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).pop(*args, **kwargs)
|
||||
|
||||
def remove(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).remove(*args, **kwargs)
|
||||
|
||||
def reverse(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).reverse(*args, **kwargs)
|
||||
|
||||
def sort(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).sort(*args, **kwargs)
|
||||
|
||||
def _mark_as_changed(self):
|
||||
"""Marks a list as changed if has an instance and a name"""
|
||||
if hasattr(self, 'instance') and hasattr(self, 'name'):
|
||||
self.instance._mark_as_changed(self.name)
|
||||
|
||||
|
||||
class BaseDict(dict):
|
||||
"""A special dict so we can watch any changes
|
||||
"""
|
||||
|
||||
def __init__(self, dict_items, instance, name):
|
||||
self.instance = instance
|
||||
self.name = name
|
||||
super(BaseDict, self).__init__(dict_items)
|
||||
|
||||
def __setitem__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
super(BaseDict, self).__setitem__(*args, **kwargs)
|
||||
|
||||
def __setattr__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
super(BaseDict, self).__setattr__(*args, **kwargs)
|
||||
|
||||
def __delete__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
super(BaseDict, self).__delete__(*args, **kwargs)
|
||||
|
||||
def __delitem__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
super(BaseDict, self).__delitem__(*args, **kwargs)
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
super(BaseDict, self).__delattr__(*args, **kwargs)
|
||||
|
||||
def clear(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
super(BaseDict, self).clear(*args, **kwargs)
|
||||
|
||||
def pop(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
super(BaseDict, self).clear(*args, **kwargs)
|
||||
|
||||
def popitem(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
super(BaseDict, self).clear(*args, **kwargs)
|
||||
|
||||
def _mark_as_changed(self):
|
||||
"""Marks a dict as changed if has an instance and a name"""
|
||||
if hasattr(self, 'instance') and hasattr(self, 'name'):
|
||||
self.instance._mark_as_changed(self.name)
|
||||
|
||||
if sys.version_info < (2, 5):
|
||||
# Prior to Python 2.5, Exception was an old-style class
|
||||
import types
|
||||
def subclass_exception(name, parents, unused):
|
||||
import types
|
||||
return types.ClassType(name, parents, {})
|
||||
else:
|
||||
def subclass_exception(name, parents, module):
|
||||
|
@ -1,5 +1,6 @@
|
||||
from pymongo import Connection
|
||||
import multiprocessing
|
||||
import threading
|
||||
|
||||
__all__ = ['ConnectionError', 'connect']
|
||||
|
||||
@ -22,17 +23,22 @@ class ConnectionError(Exception):
|
||||
|
||||
|
||||
def _get_connection(reconnect=False):
|
||||
"""Handles the connection to the database
|
||||
"""
|
||||
global _connection
|
||||
identity = get_identity()
|
||||
# Connect to the database if not already connected
|
||||
if _connection.get(identity) is None or reconnect:
|
||||
try:
|
||||
_connection[identity] = Connection(**_connection_settings)
|
||||
except:
|
||||
raise ConnectionError('Cannot connect to the database')
|
||||
except Exception, e:
|
||||
raise ConnectionError("Cannot connect to the database:\n%s" % e)
|
||||
return _connection[identity]
|
||||
|
||||
def _get_db(reconnect=False):
|
||||
"""Handles database connections and authentication based on the current
|
||||
identity
|
||||
"""
|
||||
global _db, _connection
|
||||
identity = get_identity()
|
||||
# Connect if not already connected
|
||||
@ -52,12 +58,17 @@ def _get_db(reconnect=False):
|
||||
return _db[identity]
|
||||
|
||||
def get_identity():
|
||||
"""Creates an identity key based on the current process and thread
|
||||
identity.
|
||||
"""
|
||||
identity = multiprocessing.current_process()._identity
|
||||
identity = 0 if not identity else identity[0]
|
||||
|
||||
identity = (identity, threading.current_thread().ident)
|
||||
return identity
|
||||
|
||||
|
||||
def connect(db, username=None, password=None, **kwargs):
|
||||
"""Connect to the database specified by the 'db' argument. Connection
|
||||
"""Connect to the database specified by the 'db' argument. Connection
|
||||
settings may be provided here as well if the database is not running on
|
||||
the default port on localhost. If authentication is needed, provide
|
||||
username and password arguments as well.
|
||||
|
184
mongoengine/dereference.py
Normal file
184
mongoengine/dereference.py
Normal file
@ -0,0 +1,184 @@
|
||||
import operator
|
||||
|
||||
import pymongo
|
||||
|
||||
from base import BaseDict, BaseList, get_document, TopLevelDocumentMetaclass
|
||||
from fields import ReferenceField
|
||||
from connection import _get_db
|
||||
from queryset import QuerySet
|
||||
from document import Document
|
||||
|
||||
|
||||
class DeReference(object):
|
||||
|
||||
def __call__(self, items, max_depth=1, instance=None, name=None, get=False):
|
||||
"""
|
||||
Cheaply dereferences the items to a set depth.
|
||||
Also handles the convertion of complex data types.
|
||||
|
||||
:param items: The iterable (dict, list, queryset) to be dereferenced.
|
||||
:param max_depth: The maximum depth to recurse to
|
||||
:param instance: The owning instance used for tracking changes by
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param name: The name of the field, used for tracking changes by
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param get: A boolean determining if being called by __get__
|
||||
"""
|
||||
if items is None or isinstance(items, basestring):
|
||||
return items
|
||||
|
||||
# cheapest way to convert a queryset to a list
|
||||
# list(queryset) uses a count() query to determine length
|
||||
if isinstance(items, QuerySet):
|
||||
items = [i for i in items]
|
||||
|
||||
self.max_depth = max_depth
|
||||
|
||||
doc_type = None
|
||||
if instance and instance._fields:
|
||||
doc_type = instance._fields[name].field
|
||||
|
||||
if isinstance(doc_type, ReferenceField):
|
||||
doc_type = doc_type.document_type
|
||||
|
||||
self.reference_map = self._find_references(items)
|
||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||
return self._attach_objects(items, 0, instance, name, get)
|
||||
|
||||
def _find_references(self, items, depth=0):
|
||||
"""
|
||||
Recursively finds all db references to be dereferenced
|
||||
|
||||
:param items: The iterable (dict, list, queryset)
|
||||
:param depth: The current depth of recursion
|
||||
"""
|
||||
reference_map = {}
|
||||
if not items:
|
||||
return reference_map
|
||||
|
||||
# Determine the iterator to use
|
||||
if not hasattr(items, 'items'):
|
||||
iterator = enumerate(items)
|
||||
else:
|
||||
iterator = items.iteritems()
|
||||
|
||||
# Recursively find dbreferences
|
||||
for k, item in iterator:
|
||||
if hasattr(item, '_fields'):
|
||||
for field_name, field in item._fields.iteritems():
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, (pymongo.dbref.DBRef)):
|
||||
reference_map.setdefault(field.document_type, []).append(v.id)
|
||||
elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v:
|
||||
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||
references = self._find_references(v, depth)
|
||||
for key, refs in references.iteritems():
|
||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||
key = field_cls
|
||||
reference_map.setdefault(key, []).extend(refs)
|
||||
elif isinstance(item, (pymongo.dbref.DBRef)):
|
||||
reference_map.setdefault(item.collection, []).append(item.id)
|
||||
elif isinstance(item, (dict, pymongo.son.SON)) and '_ref' in item:
|
||||
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
references = self._find_references(item, depth)
|
||||
for key, refs in references.iteritems():
|
||||
reference_map.setdefault(key, []).extend(refs)
|
||||
depth += 1
|
||||
return reference_map
|
||||
|
||||
def _fetch_objects(self, doc_type=None):
|
||||
"""Fetch all references and convert to their document objects
|
||||
"""
|
||||
object_map = {}
|
||||
for col, dbrefs in self.reference_map.iteritems():
|
||||
keys = object_map.keys()
|
||||
refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys]))
|
||||
if hasattr(col, 'objects'): # We have a document class for the refs
|
||||
references = col.objects.in_bulk(refs)
|
||||
for key, doc in references.iteritems():
|
||||
object_map[key] = doc
|
||||
else: # Generic reference: use the refs data to convert to document
|
||||
references = _get_db()[col].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
if '_cls' in ref:
|
||||
doc = get_document(ref['_cls'])._from_son(ref)
|
||||
else:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
return object_map
|
||||
|
||||
def _attach_objects(self, items, depth=0, instance=None, name=None, get=False):
|
||||
"""
|
||||
Recursively finds all db references to be dereferenced
|
||||
|
||||
:param items: The iterable (dict, list, queryset)
|
||||
:param depth: The current depth of recursion
|
||||
:param instance: The owning instance used for tracking changes by
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param name: The name of the field, used for tracking changes by
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param get: A boolean determining if being called by __get__
|
||||
"""
|
||||
if not items:
|
||||
if isinstance(items, (BaseDict, BaseList)):
|
||||
return items
|
||||
|
||||
if instance:
|
||||
if isinstance(items, dict):
|
||||
return BaseDict(items, instance=instance, name=name)
|
||||
else:
|
||||
return BaseList(items, instance=instance, name=name)
|
||||
|
||||
if isinstance(items, (dict, pymongo.son.SON)):
|
||||
if '_ref' in items:
|
||||
return self.object_map.get(items['_ref'].id, items)
|
||||
elif '_types' in items and '_cls' in items:
|
||||
doc = get_document(items['_cls'])._from_son(items)
|
||||
if not get:
|
||||
doc._data = self._attach_objects(doc._data, depth, doc, name, get)
|
||||
return doc
|
||||
|
||||
if not hasattr(items, 'items'):
|
||||
is_list = True
|
||||
iterator = enumerate(items)
|
||||
data = []
|
||||
else:
|
||||
is_list = False
|
||||
iterator = items.iteritems()
|
||||
data = {}
|
||||
|
||||
for k, v in iterator:
|
||||
if is_list:
|
||||
data.append(v)
|
||||
else:
|
||||
data[k] = v
|
||||
|
||||
if k in self.object_map:
|
||||
data[k] = self.object_map[k]
|
||||
elif hasattr(v, '_fields'):
|
||||
for field_name, field in v._fields.iteritems():
|
||||
v = data[k]._data.get(field_name, None)
|
||||
if isinstance(v, (pymongo.dbref.DBRef)):
|
||||
data[k]._data[field_name] = self.object_map.get(v.id, v)
|
||||
elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v:
|
||||
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
|
||||
elif isinstance(v, dict) and depth < self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||
elif isinstance(v, (list, tuple)):
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth < self.max_depth:
|
||||
data[k] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||
elif hasattr(v, 'id'):
|
||||
data[k] = self.object_map.get(v.id, v)
|
||||
|
||||
if instance and name:
|
||||
if is_list:
|
||||
return BaseList(data, instance=instance, name=name)
|
||||
return BaseDict(data, instance=instance, name=name)
|
||||
depth += 1
|
||||
return data
|
||||
|
||||
dereference = DeReference()
|
@ -3,6 +3,7 @@ from mongoengine import *
|
||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||
from django.utils.encoding import smart_str
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
import datetime
|
||||
|
||||
@ -21,16 +22,38 @@ class User(Document):
|
||||
"""A User document that aims to mirror most of the API specified by Django
|
||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||
"""
|
||||
username = StringField(max_length=30, required=True)
|
||||
first_name = StringField(max_length=30)
|
||||
last_name = StringField(max_length=30)
|
||||
email = StringField()
|
||||
password = StringField(max_length=128)
|
||||
is_staff = BooleanField(default=False)
|
||||
is_active = BooleanField(default=True)
|
||||
is_superuser = BooleanField(default=False)
|
||||
last_login = DateTimeField(default=datetime.datetime.now)
|
||||
date_joined = DateTimeField(default=datetime.datetime.now)
|
||||
username = StringField(max_length=30, required=True,
|
||||
verbose_name=_('username'),
|
||||
help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
|
||||
|
||||
first_name = StringField(max_length=30,
|
||||
verbose_name=_('first name'))
|
||||
|
||||
last_name = StringField(max_length=30,
|
||||
verbose_name=_('last name'))
|
||||
email = EmailField(verbose_name=_('e-mail address'))
|
||||
password = StringField(max_length=128,
|
||||
verbose_name=_('password'),
|
||||
help_text=_("Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
||||
is_staff = BooleanField(default=False,
|
||||
verbose_name=_('staff status'),
|
||||
help_text=_("Designates whether the user can log into this admin site."))
|
||||
is_active = BooleanField(default=True,
|
||||
verbose_name=_('active'),
|
||||
help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
|
||||
is_superuser = BooleanField(default=False,
|
||||
verbose_name=_('superuser status'),
|
||||
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
|
||||
last_login = DateTimeField(default=datetime.datetime.now,
|
||||
verbose_name=_('last login'))
|
||||
date_joined = DateTimeField(default=datetime.datetime.now,
|
||||
verbose_name=_('date joined'))
|
||||
|
||||
meta = {
|
||||
'indexes': [
|
||||
{'fields': ['username'], 'unique': True}
|
||||
]
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return self.username
|
||||
@ -86,7 +109,7 @@ class User(Document):
|
||||
else:
|
||||
email = '@'.join([email_name, domain_part.lower()])
|
||||
|
||||
user = User(username=username, email=email, date_joined=now)
|
||||
user = cls(username=username, email=email, date_joined=now)
|
||||
user.set_password(password)
|
||||
user.save()
|
||||
return user
|
||||
@ -99,6 +122,10 @@ class MongoEngineBackend(object):
|
||||
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
||||
"""
|
||||
|
||||
supports_object_permissions = False
|
||||
supports_anonymous_user = False
|
||||
supports_inactive_user = False
|
||||
|
||||
def authenticate(self, username=None, password=None):
|
||||
user = User.objects(username=username).first()
|
||||
if user:
|
||||
|
@ -1,6 +1,7 @@
|
||||
from django.http import Http404
|
||||
from mongoengine.queryset import QuerySet
|
||||
from mongoengine.base import BaseDocument
|
||||
from mongoengine.base import ValidationError
|
||||
|
||||
def _get_queryset(cls):
|
||||
"""Inspired by django.shortcuts.*"""
|
||||
@ -25,7 +26,7 @@ def get_document_or_404(cls, *args, **kwargs):
|
||||
queryset = _get_queryset(cls)
|
||||
try:
|
||||
return queryset.get(*args, **kwargs)
|
||||
except queryset._document.DoesNotExist:
|
||||
except (queryset._document.DoesNotExist, ValidationError):
|
||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||
|
||||
def get_list_or_404(cls, *args, **kwargs):
|
||||
|
@ -1,12 +1,17 @@
|
||||
from mongoengine import signals
|
||||
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
||||
ValidationError)
|
||||
ValidationError, BaseDict, BaseList)
|
||||
from queryset import OperationError
|
||||
from connection import _get_db
|
||||
|
||||
import pymongo
|
||||
|
||||
__all__ = ['Document', 'EmbeddedDocument', 'ValidationError',
|
||||
'OperationError', 'InvalidCollectionError']
|
||||
|
||||
__all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError']
|
||||
|
||||
class InvalidCollectionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class EmbeddedDocument(BaseDocument):
|
||||
@ -18,6 +23,18 @@ class EmbeddedDocument(BaseDocument):
|
||||
|
||||
__metaclass__ = DocumentMetaclass
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
"""Handle deletions of fields"""
|
||||
field_name = args[0]
|
||||
if field_name in self._fields:
|
||||
default = self._fields[field_name].default
|
||||
if callable(default):
|
||||
default = default()
|
||||
setattr(self, field_name, default)
|
||||
else:
|
||||
super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
|
||||
|
||||
|
||||
|
||||
class Document(BaseDocument):
|
||||
"""The base class used for defining the structure and properties of
|
||||
@ -40,44 +57,125 @@ class Document(BaseDocument):
|
||||
presence of `_cls` and `_types`, set :attr:`allow_inheritance` to
|
||||
``False`` in the :attr:`meta` dictionary.
|
||||
|
||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
||||
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
|
||||
dictionary. :attr:`max_documents` is the maximum number of documents that
|
||||
is allowed to be stored in the collection, and :attr:`max_size` is the
|
||||
maximum size of the collection in bytes. If :attr:`max_size` is not
|
||||
specified and :attr:`max_documents` is, :attr:`max_size` defaults to
|
||||
is allowed to be stored in the collection, and :attr:`max_size` is the
|
||||
maximum size of the collection in bytes. If :attr:`max_size` is not
|
||||
specified and :attr:`max_documents` is, :attr:`max_size` defaults to
|
||||
10000000 bytes (10MB).
|
||||
|
||||
Indexes may be created by specifying :attr:`indexes` in the :attr:`meta`
|
||||
dictionary. The value should be a list of field names or tuples of field
|
||||
dictionary. The value should be a list of field names or tuples of field
|
||||
names. Index direction may be specified by prefixing the field names with
|
||||
a **+** or **-** sign.
|
||||
"""
|
||||
|
||||
By default, _types will be added to the start of every index (that
|
||||
doesn't contain a list) if allow_inheritence is True. This can be
|
||||
disabled by either setting types to False on the specific index or
|
||||
by setting index_types to False on the meta dictionary for the document.
|
||||
"""
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
|
||||
def save(self, safe=True, force_insert=False, validate=True):
|
||||
@classmethod
|
||||
def _get_collection(self):
|
||||
"""Returns the collection for the document."""
|
||||
db = _get_db()
|
||||
collection_name = self._get_collection_name()
|
||||
|
||||
if not hasattr(self, '_collection') or self._collection is None:
|
||||
# Create collection as a capped collection if specified
|
||||
if self._meta['max_size'] or self._meta['max_documents']:
|
||||
# Get max document limit and max byte size from meta
|
||||
max_size = self._meta['max_size'] or 10000000 # 10MB default
|
||||
max_documents = self._meta['max_documents']
|
||||
|
||||
if collection_name in db.collection_names():
|
||||
self._collection = db[collection_name]
|
||||
# The collection already exists, check if its capped
|
||||
# options match the specified capped options
|
||||
options = self._collection.options()
|
||||
if options.get('max') != max_documents or \
|
||||
options.get('size') != max_size:
|
||||
msg = ('Cannot create collection "%s" as a capped '
|
||||
'collection as it already exists') % self._collection
|
||||
raise InvalidCollectionError(msg)
|
||||
else:
|
||||
# Create the collection as a capped collection
|
||||
opts = {'capped': True, 'size': max_size}
|
||||
if max_documents:
|
||||
opts['max'] = max_documents
|
||||
self._collection = db.create_collection(
|
||||
collection_name, **opts
|
||||
)
|
||||
else:
|
||||
self._collection = db[collection_name]
|
||||
return self._collection
|
||||
|
||||
def save(self, safe=True, force_insert=False, validate=True, write_options=None, _refs=None):
|
||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||
document already exists, it will be updated, otherwise it will be
|
||||
created.
|
||||
|
||||
If ``safe=True`` and the operation is unsuccessful, an
|
||||
If ``safe=True`` and the operation is unsuccessful, an
|
||||
:class:`~mongoengine.OperationError` will be raised.
|
||||
|
||||
:param safe: check if the operation succeeded before returning
|
||||
:param force_insert: only try to create a new document, don't allow
|
||||
:param force_insert: only try to create a new document, don't allow
|
||||
updates of existing documents
|
||||
:param validate: validates the document; set to ``False`` to skip.
|
||||
:param write_options: Extra keyword arguments are passed down to
|
||||
:meth:`~pymongo.collection.Collection.save` OR
|
||||
:meth:`~pymongo.collection.Collection.insert`
|
||||
which will be used as options for the resultant ``getLastError`` command.
|
||||
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
|
||||
have recorded the write and will force an fsync on each server being written to.
|
||||
|
||||
.. versionchanged:: 0.5
|
||||
In existing documents it only saves changed fields using set / unset
|
||||
Saves are cascaded and any :class:`~pymongo.dbref.DBRef` objects
|
||||
that have changes are saved as well.
|
||||
"""
|
||||
from fields import ReferenceField, GenericReferenceField
|
||||
|
||||
signals.pre_save.send(self.__class__, document=self)
|
||||
|
||||
if validate:
|
||||
self.validate()
|
||||
|
||||
if not write_options:
|
||||
write_options = {}
|
||||
|
||||
doc = self.to_mongo()
|
||||
|
||||
created = '_id' in doc
|
||||
creation_mode = force_insert or not created
|
||||
try:
|
||||
collection = self.__class__.objects._collection
|
||||
if force_insert:
|
||||
object_id = collection.insert(doc, safe=safe)
|
||||
if creation_mode:
|
||||
if force_insert:
|
||||
object_id = collection.insert(doc, safe=safe, **write_options)
|
||||
else:
|
||||
object_id = collection.save(doc, safe=safe, **write_options)
|
||||
else:
|
||||
object_id = collection.save(doc, safe=safe)
|
||||
object_id = doc['_id']
|
||||
updates, removals = self._delta()
|
||||
if updates:
|
||||
collection.update({'_id': object_id}, {"$set": updates}, upsert=True, safe=safe, **write_options)
|
||||
if removals:
|
||||
collection.update({'_id': object_id}, {"$unset": removals}, upsert=True, safe=safe, **write_options)
|
||||
|
||||
# Save any references / generic references
|
||||
_refs = _refs or []
|
||||
for name, cls in self._fields.items():
|
||||
if isinstance(cls, (ReferenceField, GenericReferenceField)):
|
||||
ref = getattr(self, name)
|
||||
if ref and str(ref) not in _refs:
|
||||
_refs.append(str(ref))
|
||||
ref.save(safe=safe, force_insert=force_insert,
|
||||
validate=validate, write_options=write_options,
|
||||
_refs=_refs)
|
||||
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
message = 'Could not save document (%s)'
|
||||
if u'duplicate key' in unicode(err):
|
||||
@ -86,12 +184,42 @@ class Document(BaseDocument):
|
||||
id_field = self._meta['id_field']
|
||||
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||
|
||||
def reset_changed_fields(doc, inspected_docs=None):
|
||||
"""Loop through and reset changed fields lists"""
|
||||
|
||||
inspected_docs = inspected_docs or []
|
||||
inspected_docs.append(doc)
|
||||
if hasattr(doc, '_changed_fields'):
|
||||
doc._changed_fields = []
|
||||
|
||||
for field_name in doc._fields:
|
||||
field = getattr(doc, field_name)
|
||||
if field not in inspected_docs and hasattr(field, '_changed_fields'):
|
||||
reset_changed_fields(field, inspected_docs)
|
||||
|
||||
reset_changed_fields(self)
|
||||
signals.post_save.send(self.__class__, document=self, created=creation_mode)
|
||||
|
||||
def update(self, **kwargs):
|
||||
"""Performs an update on the :class:`~mongoengine.Document`
|
||||
A convenience wrapper to :meth:`~mongoengine.QuerySet.update`.
|
||||
|
||||
Raises :class:`OperationError` if called on an object that has not yet
|
||||
been saved.
|
||||
"""
|
||||
if not self.pk:
|
||||
raise OperationError('attempt to update a document not yet saved')
|
||||
|
||||
return self.__class__.objects(pk=self.pk).update_one(**kwargs)
|
||||
|
||||
def delete(self, safe=False):
|
||||
"""Delete the :class:`~mongoengine.Document` from the database. This
|
||||
will only take effect if the document has been previously saved.
|
||||
|
||||
:param safe: check if the operation succeeded before returning
|
||||
"""
|
||||
signals.pre_delete.send(self.__class__, document=self)
|
||||
|
||||
id_field = self._meta['id_field']
|
||||
object_id = self._fields[id_field].to_mongo(self[id_field])
|
||||
try:
|
||||
@ -100,6 +228,18 @@ class Document(BaseDocument):
|
||||
message = u'Could not delete document (%s)' % err.message
|
||||
raise OperationError(message)
|
||||
|
||||
signals.post_delete.send(self.__class__, document=self)
|
||||
|
||||
def select_related(self, max_depth=1):
|
||||
"""Handles dereferencing of :class:`~pymongo.dbref.DBRef` objects to
|
||||
a maximum depth in order to cut down the number queries to mongodb.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
from dereference import dereference
|
||||
self._data = dereference(self._data, max_depth)
|
||||
return self
|
||||
|
||||
def reload(self):
|
||||
"""Reloads all attributes from the database.
|
||||
|
||||
@ -108,7 +248,37 @@ class Document(BaseDocument):
|
||||
id_field = self._meta['id_field']
|
||||
obj = self.__class__.objects(**{id_field: self[id_field]}).first()
|
||||
for field in self._fields:
|
||||
setattr(self, field, obj[field])
|
||||
setattr(self, field, self._reload(field, obj[field]))
|
||||
self._changed_fields = []
|
||||
|
||||
def _reload(self, key, value):
|
||||
"""Used by :meth:`~mongoengine.Document.reload` to ensure the
|
||||
correct instance is linked to self.
|
||||
"""
|
||||
if isinstance(value, BaseDict):
|
||||
value = [(k, self._reload(k,v)) for k,v in value.items()]
|
||||
value = BaseDict(value, instance=self, name=key)
|
||||
elif isinstance(value, BaseList):
|
||||
value = [self._reload(key, v) for v in value]
|
||||
value = BaseList(value, instance=self, name=key)
|
||||
elif isinstance(value, EmbeddedDocument):
|
||||
value._changed_fields = []
|
||||
return value
|
||||
|
||||
def to_dbref(self):
|
||||
"""Returns an instance of :class:`~pymongo.dbref.DBRef` useful in
|
||||
`__raw__` queries."""
|
||||
if not self.pk:
|
||||
msg = "Only saved documents can have a valid dbref"
|
||||
raise OperationError(msg)
|
||||
return pymongo.dbref.DBRef(self.__class__._get_collection_name(), self.pk)
|
||||
|
||||
@classmethod
|
||||
def register_delete_rule(cls, document_cls, field_name, rule):
|
||||
"""This method registers the delete rules to apply when removing this
|
||||
object.
|
||||
"""
|
||||
cls._meta['delete_rules'][(document_cls, field_name)] = rule
|
||||
|
||||
@classmethod
|
||||
def drop_collection(cls):
|
||||
@ -116,16 +286,16 @@ class Document(BaseDocument):
|
||||
:class:`~mongoengine.Document` type from the database.
|
||||
"""
|
||||
db = _get_db()
|
||||
db.drop_collection(cls._meta['collection'])
|
||||
db.drop_collection(cls._get_collection_name())
|
||||
|
||||
|
||||
class MapReduceDocument(object):
|
||||
"""A document returned from a map/reduce query.
|
||||
|
||||
:param collection: An instance of :class:`~pymongo.Collection`
|
||||
:param key: Document/result key, often an instance of
|
||||
:class:`~pymongo.objectid.ObjectId`. If supplied as
|
||||
an ``ObjectId`` found in the given ``collection``,
|
||||
:param key: Document/result key, often an instance of
|
||||
:class:`~pymongo.objectid.ObjectId`. If supplied as
|
||||
an ``ObjectId`` found in the given ``collection``,
|
||||
the object can be accessed via the ``object`` property.
|
||||
:param value: The result(s) for this key.
|
||||
|
||||
@ -140,7 +310,7 @@ class MapReduceDocument(object):
|
||||
|
||||
@property
|
||||
def object(self):
|
||||
"""Lazy-load the object referenced by ``self.key``. ``self.key``
|
||||
"""Lazy-load the object referenced by ``self.key``. ``self.key``
|
||||
should be the ``primary_key``.
|
||||
"""
|
||||
id_field = self._document()._meta['id_field']
|
||||
|
@ -1,4 +1,6 @@
|
||||
from base import BaseField, ObjectIdField, ValidationError, get_document
|
||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||
ValidationError, get_document)
|
||||
from queryset import DO_NOTHING
|
||||
from document import Document, EmbeddedDocument
|
||||
from connection import _get_db
|
||||
from operator import itemgetter
|
||||
@ -8,18 +10,18 @@ import pymongo
|
||||
import pymongo.dbref
|
||||
import pymongo.son
|
||||
import pymongo.binary
|
||||
import datetime
|
||||
import datetime, time
|
||||
import decimal
|
||||
import gridfs
|
||||
import warnings
|
||||
import types
|
||||
|
||||
|
||||
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
||||
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
||||
'ObjectIdField', 'ReferenceField', 'ValidationError',
|
||||
'DecimalField', 'URLField', 'GenericReferenceField', 'FileField',
|
||||
'BinaryField', 'SortedListField', 'EmailField', 'GeoPointField']
|
||||
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
|
||||
'DecimalField', 'ComplexDateTimeField', 'URLField',
|
||||
'GenericReferenceField', 'FileField', 'BinaryField',
|
||||
'SortedListField', 'EmailField', 'GeoPointField',
|
||||
'SequenceField', 'GenericEmbeddedDocumentField']
|
||||
|
||||
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
||||
|
||||
@ -118,8 +120,8 @@ class EmailField(StringField):
|
||||
|
||||
EMAIL_REGEX = re.compile(
|
||||
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
|
||||
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string
|
||||
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE # domain
|
||||
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string
|
||||
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE # domain
|
||||
)
|
||||
|
||||
def validate(self, value):
|
||||
@ -150,6 +152,9 @@ class IntField(BaseField):
|
||||
if self.max_value is not None and value > self.max_value:
|
||||
raise ValidationError('Integer value is too large')
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return int(value)
|
||||
|
||||
|
||||
class FloatField(BaseField):
|
||||
"""An floating point number field.
|
||||
@ -173,6 +178,10 @@ class FloatField(BaseField):
|
||||
if self.max_value is not None and value > self.max_value:
|
||||
raise ValidationError('Float value is too large')
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return float(value)
|
||||
|
||||
|
||||
class DecimalField(BaseField):
|
||||
"""A fixed-point decimal number field.
|
||||
|
||||
@ -222,15 +231,151 @@ class BooleanField(BaseField):
|
||||
|
||||
class DateTimeField(BaseField):
|
||||
"""A datetime field.
|
||||
|
||||
Note: Microseconds are rounded to the nearest millisecond.
|
||||
Pre UTC microsecond support is effecively broken.
|
||||
Use :class:`~mongoengine.fields.ComplexDateTimeField` if you
|
||||
need accurate microsecond support.
|
||||
"""
|
||||
|
||||
def validate(self, value):
|
||||
assert isinstance(value, datetime.datetime)
|
||||
assert isinstance(value, (datetime.datetime, datetime.date))
|
||||
|
||||
def to_mongo(self, value):
|
||||
return self.prepare_query_value(None, value)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return value
|
||||
if isinstance(value, datetime.datetime):
|
||||
return value
|
||||
if isinstance(value, datetime.date):
|
||||
return datetime.datetime(value.year, value.month, value.day)
|
||||
|
||||
# Attempt to parse a datetime:
|
||||
# value = smart_str(value)
|
||||
# split usecs, because they are not recognized by strptime.
|
||||
if '.' in value:
|
||||
try:
|
||||
value, usecs = value.split('.')
|
||||
usecs = int(usecs)
|
||||
except ValueError:
|
||||
return None
|
||||
else:
|
||||
usecs = 0
|
||||
kwargs = {'microsecond': usecs}
|
||||
try: # Seconds are optional, so try converting seconds first.
|
||||
return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6],
|
||||
**kwargs)
|
||||
except ValueError:
|
||||
try: # Try without seconds.
|
||||
return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M')[:5],
|
||||
**kwargs)
|
||||
except ValueError: # Try without hour/minutes/seconds.
|
||||
try:
|
||||
return datetime.datetime(*time.strptime(value, '%Y-%m-%d')[:3],
|
||||
**kwargs)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
class ComplexDateTimeField(StringField):
|
||||
"""
|
||||
ComplexDateTimeField handles microseconds exactly instead of rounding
|
||||
like DateTimeField does.
|
||||
|
||||
Derives from a StringField so you can do `gte` and `lte` filtering by
|
||||
using lexicographical comparison when filtering / sorting strings.
|
||||
|
||||
The stored string has the following format:
|
||||
|
||||
YYYY,MM,DD,HH,MM,SS,NNNNNN
|
||||
|
||||
Where NNNNNN is the number of microseconds of the represented `datetime`.
|
||||
The `,` as the separator can be easily modified by passing the `separator`
|
||||
keyword when initializing the field.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
def __init__(self, separator=',', **kwargs):
|
||||
self.names = ['year', 'month', 'day', 'hour', 'minute', 'second',
|
||||
'microsecond']
|
||||
self.separtor = separator
|
||||
super(ComplexDateTimeField, self).__init__(**kwargs)
|
||||
|
||||
def _leading_zero(self, number):
|
||||
"""
|
||||
Converts the given number to a string.
|
||||
|
||||
If it has only one digit, a leading zero so as it has always at least
|
||||
two digits.
|
||||
"""
|
||||
if int(number) < 10:
|
||||
return "0%s" % number
|
||||
else:
|
||||
return str(number)
|
||||
|
||||
def _convert_from_datetime(self, val):
|
||||
"""
|
||||
Convert a `datetime` object to a string representation (which will be
|
||||
stored in MongoDB). This is the reverse function of
|
||||
`_convert_from_string`.
|
||||
|
||||
>>> a = datetime(2011, 6, 8, 20, 26, 24, 192284)
|
||||
>>> RealDateTimeField()._convert_from_datetime(a)
|
||||
'2011,06,08,20,26,24,192284'
|
||||
"""
|
||||
data = []
|
||||
for name in self.names:
|
||||
data.append(self._leading_zero(getattr(val, name)))
|
||||
return ','.join(data)
|
||||
|
||||
def _convert_from_string(self, data):
|
||||
"""
|
||||
Convert a string representation to a `datetime` object (the object you
|
||||
will manipulate). This is the reverse function of
|
||||
`_convert_from_datetime`.
|
||||
|
||||
>>> a = '2011,06,08,20,26,24,192284'
|
||||
>>> ComplexDateTimeField()._convert_from_string(a)
|
||||
datetime.datetime(2011, 6, 8, 20, 26, 24, 192284)
|
||||
"""
|
||||
data = data.split(',')
|
||||
data = map(int, data)
|
||||
values = {}
|
||||
for i in range(7):
|
||||
values[self.names[i]] = data[i]
|
||||
return datetime.datetime(**values)
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
data = super(ComplexDateTimeField, self).__get__(instance, owner)
|
||||
if data == None:
|
||||
return datetime.datetime.now()
|
||||
return self._convert_from_string(data)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
value = self._convert_from_datetime(value)
|
||||
return super(ComplexDateTimeField, self).__set__(instance, value)
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, datetime.datetime):
|
||||
raise ValidationError('Only datetime objects may used in a \
|
||||
ComplexDateTimeField')
|
||||
|
||||
def to_python(self, value):
|
||||
return self._convert_from_string(value)
|
||||
|
||||
def to_mongo(self, value):
|
||||
return self._convert_from_datetime(value)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return self._convert_from_datetime(value)
|
||||
|
||||
|
||||
class EmbeddedDocumentField(BaseField):
|
||||
"""An embedded document field. Only valid values are subclasses of
|
||||
:class:`~mongoengine.EmbeddedDocument`.
|
||||
"""An embedded document field - with a declared document_type.
|
||||
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||
"""
|
||||
|
||||
def __init__(self, document_type, **kwargs):
|
||||
@ -256,6 +401,8 @@ class EmbeddedDocumentField(BaseField):
|
||||
return value
|
||||
|
||||
def to_mongo(self, value):
|
||||
if not isinstance(value, self.document_type):
|
||||
return value
|
||||
return self.document_type.to_mongo(value)
|
||||
|
||||
def validate(self, value):
|
||||
@ -275,7 +422,41 @@ class EmbeddedDocumentField(BaseField):
|
||||
return self.to_mongo(value)
|
||||
|
||||
|
||||
class ListField(BaseField):
|
||||
class GenericEmbeddedDocumentField(BaseField):
|
||||
"""A generic embedded document field - allows any
|
||||
:class:`~mongoengine.EmbeddedDocument` to be stored.
|
||||
|
||||
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||
"""
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return self.to_mongo(value)
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(value, dict):
|
||||
doc_cls = get_document(value['_cls'])
|
||||
value = doc_cls._from_son(value)
|
||||
|
||||
return value
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, EmbeddedDocument):
|
||||
raise ValidationError('Invalid embedded document instance '
|
||||
'provided to an GenericEmbeddedDocumentField')
|
||||
|
||||
value.validate()
|
||||
|
||||
def to_mongo(self, document):
|
||||
if document is None:
|
||||
return None
|
||||
|
||||
data = document.to_mongo()
|
||||
if not '_cls' in data:
|
||||
data['_cls'] = document._class_name
|
||||
return data
|
||||
|
||||
|
||||
class ListField(ComplexBaseField):
|
||||
"""A list field that wraps a standard field, allowing multiple instances
|
||||
of the field to be used as a list in the database.
|
||||
"""
|
||||
@ -283,84 +464,26 @@ class ListField(BaseField):
|
||||
# ListFields cannot be indexed with _types - MongoDB doesn't support this
|
||||
_index_with_types = False
|
||||
|
||||
def __init__(self, field, **kwargs):
|
||||
if not isinstance(field, BaseField):
|
||||
raise ValidationError('Argument to ListField constructor must be '
|
||||
'a valid field')
|
||||
def __init__(self, field=None, **kwargs):
|
||||
self.field = field
|
||||
kwargs.setdefault('default', lambda: [])
|
||||
super(ListField, self).__init__(**kwargs)
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
"""Descriptor to automatically dereference references.
|
||||
"""
|
||||
if instance is None:
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
|
||||
if isinstance(self.field, ReferenceField):
|
||||
referenced_type = self.field.document_type
|
||||
# Get value from document instance if available
|
||||
value_list = instance._data.get(self.name)
|
||||
if value_list:
|
||||
deref_list = []
|
||||
for value in value_list:
|
||||
# Dereference DBRefs
|
||||
if isinstance(value, (pymongo.dbref.DBRef)):
|
||||
value = _get_db().dereference(value)
|
||||
deref_list.append(referenced_type._from_son(value))
|
||||
else:
|
||||
deref_list.append(value)
|
||||
instance._data[self.name] = deref_list
|
||||
|
||||
if isinstance(self.field, GenericReferenceField):
|
||||
value_list = instance._data.get(self.name)
|
||||
if value_list:
|
||||
deref_list = []
|
||||
for value in value_list:
|
||||
# Dereference DBRefs
|
||||
if isinstance(value, (dict, pymongo.son.SON)):
|
||||
deref_list.append(self.field.dereference(value))
|
||||
else:
|
||||
deref_list.append(value)
|
||||
instance._data[self.name] = deref_list
|
||||
|
||||
return super(ListField, self).__get__(instance, owner)
|
||||
|
||||
def to_python(self, value):
|
||||
return [self.field.to_python(item) for item in value]
|
||||
|
||||
def to_mongo(self, value):
|
||||
return [self.field.to_mongo(item) for item in value]
|
||||
|
||||
def validate(self, value):
|
||||
"""Make sure that a list of valid fields is being used.
|
||||
"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
raise ValidationError('Only lists and tuples may be used in a '
|
||||
'list field')
|
||||
|
||||
try:
|
||||
[self.field.validate(item) for item in value]
|
||||
except Exception, err:
|
||||
raise ValidationError('Invalid ListField item (%s)' % str(item))
|
||||
super(ListField, self).validate(value)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if op in ('set', 'unset'):
|
||||
return [self.field.prepare_query_value(op, v) for v in value]
|
||||
return self.field.prepare_query_value(op, value)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return self.field.lookup_member(member_name)
|
||||
|
||||
def _set_owner_document(self, owner_document):
|
||||
self.field.owner_document = owner_document
|
||||
self._owner_document = owner_document
|
||||
|
||||
def _get_owner_document(self, owner_document):
|
||||
self._owner_document = owner_document
|
||||
|
||||
owner_document = property(_get_owner_document, _set_owner_document)
|
||||
if self.field:
|
||||
if op in ('set', 'unset') and (not isinstance(value, basestring)
|
||||
and hasattr(value, '__iter__')):
|
||||
return [self.field.prepare_query_value(op, v) for v in value]
|
||||
return self.field.prepare_query_value(op, value)
|
||||
return super(ListField, self).prepare_query_value(op, value)
|
||||
|
||||
|
||||
class SortedListField(ListField):
|
||||
@ -379,20 +502,22 @@ class SortedListField(ListField):
|
||||
super(SortedListField, self).__init__(field, **kwargs)
|
||||
|
||||
def to_mongo(self, value):
|
||||
value = super(SortedListField, self).to_mongo(value)
|
||||
if self._ordering is not None:
|
||||
return sorted([self.field.to_mongo(item) for item in value],
|
||||
key=itemgetter(self._ordering))
|
||||
return sorted([self.field.to_mongo(item) for item in value])
|
||||
return sorted(value, key=itemgetter(self._ordering))
|
||||
return sorted(value)
|
||||
|
||||
|
||||
class DictField(BaseField):
|
||||
class DictField(ComplexBaseField):
|
||||
"""A dictionary field that wraps a standard Python dictionary. This is
|
||||
similar to an embedded document, but the structure is not defined.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
.. versionchanged:: 0.5 - Can now handle complex / varying types of data
|
||||
"""
|
||||
|
||||
def __init__(self, basecls=None, *args, **kwargs):
|
||||
def __init__(self, basecls=None, field=None, *args, **kwargs):
|
||||
self.field = field
|
||||
self.basecls = basecls or BaseField
|
||||
assert issubclass(self.basecls, BaseField)
|
||||
kwargs.setdefault('default', lambda: {})
|
||||
@ -408,21 +533,67 @@ class DictField(BaseField):
|
||||
if any(('.' in k or '$' in k) for k in value):
|
||||
raise ValidationError('Invalid dictionary key name - keys may not '
|
||||
'contain "." or "$" characters')
|
||||
super(DictField, self).validate(value)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return self.basecls(db_field=member_name)
|
||||
return DictField(basecls=self.basecls, db_field=member_name)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
match_operators = ['contains', 'icontains', 'startswith',
|
||||
'istartswith', 'endswith', 'iendswith',
|
||||
'exact', 'iexact']
|
||||
|
||||
if op in match_operators and isinstance(value, basestring):
|
||||
return StringField().prepare_query_value(op, value)
|
||||
|
||||
return super(DictField, self).prepare_query_value(op, value)
|
||||
|
||||
|
||||
class MapField(DictField):
|
||||
"""A field that maps a name to a specified field type. Similar to
|
||||
a DictField, except the 'value' of each item must match the specified
|
||||
field type.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
def __init__(self, field=None, *args, **kwargs):
|
||||
if not isinstance(field, BaseField):
|
||||
raise ValidationError('Argument to MapField constructor must be '
|
||||
'a valid field')
|
||||
super(MapField, self).__init__(field=field, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
class ReferenceField(BaseField):
|
||||
"""A reference to a document that will be automatically dereferenced on
|
||||
access (lazily).
|
||||
|
||||
Use the `reverse_delete_rule` to handle what should happen if the document
|
||||
the field is referencing is deleted.
|
||||
|
||||
The options are:
|
||||
|
||||
* DO_NOTHING - don't do anything (default).
|
||||
* NULLIFY - Updates the reference to null.
|
||||
* CASCADE - Deletes the documents associated with the reference.
|
||||
* DENY - Prevent the deletion of the reference object.
|
||||
|
||||
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
||||
"""
|
||||
|
||||
def __init__(self, document_type, **kwargs):
|
||||
def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs):
|
||||
"""Initialises the Reference Field.
|
||||
|
||||
:param reverse_delete_rule: Determines what to do when the referring
|
||||
object is deleted
|
||||
"""
|
||||
if not isinstance(document_type, basestring):
|
||||
if not issubclass(document_type, (Document, basestring)):
|
||||
raise ValidationError('Argument to ReferenceField constructor '
|
||||
'must be a document class or a string')
|
||||
self.document_type_obj = document_type
|
||||
self.reverse_delete_rule = reverse_delete_rule
|
||||
super(ReferenceField, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
@ -465,7 +636,7 @@ class ReferenceField(BaseField):
|
||||
id_ = document
|
||||
|
||||
id_ = id_field.to_mongo(id_)
|
||||
collection = self.document_type._meta['collection']
|
||||
collection = self.document_type._get_collection_name()
|
||||
return pymongo.dbref.DBRef(collection, id_)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
@ -474,6 +645,11 @@ class ReferenceField(BaseField):
|
||||
def validate(self, value):
|
||||
assert isinstance(value, (self.document_type, pymongo.dbref.DBRef))
|
||||
|
||||
if isinstance(value, Document) and value.id is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return self.document_type._fields.get(member_name)
|
||||
|
||||
@ -482,6 +658,9 @@ class GenericReferenceField(BaseField):
|
||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||
that will be automatically dereferenced on access (lazily).
|
||||
|
||||
..note :: Any documents used as a generic reference must be registered in the
|
||||
document registry. Importing the model will automatically register it.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
|
||||
@ -495,6 +674,15 @@ class GenericReferenceField(BaseField):
|
||||
|
||||
return super(GenericReferenceField, self).__get__(instance, owner)
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, (Document, pymongo.dbref.DBRef)):
|
||||
raise ValidationError('GenericReferences can only contain documents')
|
||||
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if isinstance(value, Document) and value.id is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
|
||||
def dereference(self, value):
|
||||
doc_cls = get_document(value['_cls'])
|
||||
reference = value['_ref']
|
||||
@ -504,6 +692,9 @@ class GenericReferenceField(BaseField):
|
||||
return doc
|
||||
|
||||
def to_mongo(self, document):
|
||||
if document is None:
|
||||
return None
|
||||
|
||||
id_field_name = document.__class__._meta['id_field']
|
||||
id_field = document.__class__._fields[id_field_name]
|
||||
|
||||
@ -517,9 +708,9 @@ class GenericReferenceField(BaseField):
|
||||
id_ = document
|
||||
|
||||
id_ = id_field.to_mongo(id_)
|
||||
collection = document._meta['collection']
|
||||
collection = document._get_collection_name()
|
||||
ref = pymongo.dbref.DBRef(collection, id_)
|
||||
return {'_cls': document.__class__.__name__, '_ref': ref}
|
||||
return {'_cls': document._class_name, '_ref': ref}
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return self.to_mongo(value)
|
||||
@ -555,12 +746,16 @@ class GridFSProxy(object):
|
||||
"""Proxy object to handle writing and reading of files to and from GridFS
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 - added optional size param to read
|
||||
"""
|
||||
|
||||
def __init__(self, grid_id=None):
|
||||
def __init__(self, grid_id=None, key=None, instance=None):
|
||||
self.fs = gridfs.GridFS(_get_db()) # Filesystem instance
|
||||
self.newfile = None # Used for partial writes
|
||||
self.grid_id = grid_id # Store GridFS id for file
|
||||
self.gridout = None
|
||||
self.key = key
|
||||
self.instance = instance
|
||||
|
||||
def __getattr__(self, name):
|
||||
obj = self.get()
|
||||
@ -571,11 +766,18 @@ class GridFSProxy(object):
|
||||
def __get__(self, instance, value):
|
||||
return self
|
||||
|
||||
def __nonzero__(self):
|
||||
return bool(self.grid_id)
|
||||
|
||||
def get(self, id=None):
|
||||
if id:
|
||||
self.grid_id = id
|
||||
if self.grid_id is None:
|
||||
return None
|
||||
try:
|
||||
return self.fs.get(id or self.grid_id)
|
||||
if self.gridout is None:
|
||||
self.gridout = self.fs.get(self.grid_id)
|
||||
return self.gridout
|
||||
except:
|
||||
# File has been deleted
|
||||
return None
|
||||
@ -584,11 +786,12 @@ class GridFSProxy(object):
|
||||
self.newfile = self.fs.new_file(**kwargs)
|
||||
self.grid_id = self.newfile._id
|
||||
|
||||
def put(self, file, **kwargs):
|
||||
def put(self, file_obj, **kwargs):
|
||||
if self.grid_id:
|
||||
raise GridFSError('This document already has a file. Either delete '
|
||||
'it or call replace to overwrite it')
|
||||
self.grid_id = self.fs.put(file, **kwargs)
|
||||
self.grid_id = self.fs.put(file_obj, **kwargs)
|
||||
self._mark_as_changed()
|
||||
|
||||
def write(self, string):
|
||||
if self.grid_id:
|
||||
@ -603,11 +806,11 @@ class GridFSProxy(object):
|
||||
if not self.newfile:
|
||||
self.new_file()
|
||||
self.grid_id = self.newfile._id
|
||||
self.newfile.writelines(lines)
|
||||
self.newfile.writelines(lines)
|
||||
|
||||
def read(self):
|
||||
def read(self, size=-1):
|
||||
try:
|
||||
return self.get().read()
|
||||
return self.get().read(size)
|
||||
except:
|
||||
return None
|
||||
|
||||
@ -615,20 +818,28 @@ class GridFSProxy(object):
|
||||
# Delete file from GridFS, FileField still remains
|
||||
self.fs.delete(self.grid_id)
|
||||
self.grid_id = None
|
||||
self.gridout = None
|
||||
self._mark_as_changed()
|
||||
|
||||
def replace(self, file, **kwargs):
|
||||
def replace(self, file_obj, **kwargs):
|
||||
self.delete()
|
||||
self.put(file, **kwargs)
|
||||
self.put(file_obj, **kwargs)
|
||||
|
||||
def close(self):
|
||||
if self.newfile:
|
||||
self.newfile.close()
|
||||
|
||||
def _mark_as_changed(self):
|
||||
"""Inform the instance that `self.key` has been changed"""
|
||||
if self.instance:
|
||||
self.instance._mark_as_changed(self.key)
|
||||
|
||||
|
||||
class FileField(BaseField):
|
||||
"""A GridFS storage field.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 added optional size param for read
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
@ -641,11 +852,15 @@ class FileField(BaseField):
|
||||
# Check if a file already exists for this model
|
||||
grid_file = instance._data.get(self.name)
|
||||
self.grid_file = grid_file
|
||||
if self.grid_file:
|
||||
if isinstance(self.grid_file, GridFSProxy):
|
||||
if not self.grid_file.key:
|
||||
self.grid_file.key = self.name
|
||||
self.grid_file.instance = instance
|
||||
return self.grid_file
|
||||
return GridFSProxy()
|
||||
return GridFSProxy(key=self.name, instance=instance)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
key = self.name
|
||||
if isinstance(value, file) or isinstance(value, str):
|
||||
# using "FileField() = file/string" notation
|
||||
grid_file = instance._data.get(self.name)
|
||||
@ -659,10 +874,12 @@ class FileField(BaseField):
|
||||
grid_file.put(value)
|
||||
else:
|
||||
# Create a new proxy object as we don't already have one
|
||||
instance._data[self.name] = GridFSProxy()
|
||||
instance._data[self.name].put(value)
|
||||
instance._data[key] = GridFSProxy(key=key, instance=instance)
|
||||
instance._data[key].put(value)
|
||||
else:
|
||||
instance._data[self.name] = value
|
||||
instance._data[key] = value
|
||||
|
||||
instance._mark_as_changed(key)
|
||||
|
||||
def to_mongo(self, value):
|
||||
# Store the GridFS file id in MongoDB
|
||||
@ -700,3 +917,61 @@ class GeoPointField(BaseField):
|
||||
if (not isinstance(value[0], (float, int)) and
|
||||
not isinstance(value[1], (float, int))):
|
||||
raise ValidationError('Both values in point must be float or int.')
|
||||
|
||||
|
||||
class SequenceField(IntField):
|
||||
"""Provides a sequental counter.
|
||||
|
||||
..note:: Although traditional databases often use increasing sequence
|
||||
numbers for primary keys. In MongoDB, the preferred approach is to
|
||||
use Object IDs instead. The concept is that in a very large
|
||||
cluster of machines, it is easier to create an object ID than have
|
||||
global, uniformly increasing sequence numbers.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
def __init__(self, collection_name=None, *args, **kwargs):
|
||||
self.collection_name = collection_name or 'mongoengine.counters'
|
||||
return super(SequenceField, self).__init__(*args, **kwargs)
|
||||
|
||||
def generate_new_value(self):
|
||||
"""
|
||||
Generate and Increment the counter
|
||||
"""
|
||||
sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(),
|
||||
self.name)
|
||||
collection = _get_db()[self.collection_name]
|
||||
counter = collection.find_and_modify(query={"_id": sequence_id},
|
||||
update={"$inc": {"next": 1}},
|
||||
new=True,
|
||||
upsert=True)
|
||||
return counter['next']
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
if not instance._data:
|
||||
return
|
||||
|
||||
value = instance._data.get(self.name)
|
||||
|
||||
if not value and instance._initialised:
|
||||
value = self.generate_new_value()
|
||||
instance._data[self.name] = value
|
||||
instance._mark_as_changed(self.name)
|
||||
|
||||
return value
|
||||
|
||||
def __set__(self, instance, value):
|
||||
|
||||
if value is None and instance._initialised:
|
||||
value = self.generate_new_value()
|
||||
|
||||
return super(SequenceField, self).__set__(instance, value)
|
||||
|
||||
def to_python(self, value):
|
||||
if value is None:
|
||||
value = self.generate_new_value()
|
||||
return value
|
||||
|
File diff suppressed because it is too large
Load Diff
44
mongoengine/signals.py
Normal file
44
mongoengine/signals.py
Normal file
@ -0,0 +1,44 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
__all__ = ['pre_init', 'post_init', 'pre_save', 'post_save',
|
||||
'pre_delete', 'post_delete']
|
||||
|
||||
signals_available = False
|
||||
try:
|
||||
from blinker import Namespace
|
||||
signals_available = True
|
||||
except ImportError:
|
||||
class Namespace(object):
|
||||
def signal(self, name, doc=None):
|
||||
return _FakeSignal(name, doc)
|
||||
|
||||
class _FakeSignal(object):
|
||||
"""If blinker is unavailable, create a fake class with the same
|
||||
interface that allows sending of signals but will fail with an
|
||||
error on anything else. Instead of doing anything on send, it
|
||||
will just ignore the arguments and do nothing instead.
|
||||
"""
|
||||
|
||||
def __init__(self, name, doc=None):
|
||||
self.name = name
|
||||
self.__doc__ = doc
|
||||
|
||||
def _fail(self, *args, **kwargs):
|
||||
raise RuntimeError('signalling support is unavailable '
|
||||
'because the blinker library is '
|
||||
'not installed.')
|
||||
send = lambda *a, **kw: None
|
||||
connect = disconnect = has_receivers_for = receivers_for = \
|
||||
temporarily_connected_to = _fail
|
||||
del _fail
|
||||
|
||||
# the namespace for code signals. If you are not mongoengine code, do
|
||||
# not put signals in here. Create your own namespace instead.
|
||||
_signals = Namespace()
|
||||
|
||||
pre_init = _signals.signal('pre_init')
|
||||
post_init = _signals.signal('post_init')
|
||||
pre_save = _signals.signal('pre_save')
|
||||
post_save = _signals.signal('post_save')
|
||||
pre_delete = _signals.signal('pre_delete')
|
||||
post_delete = _signals.signal('post_delete')
|
59
mongoengine/tests.py
Normal file
59
mongoengine/tests.py
Normal file
@ -0,0 +1,59 @@
|
||||
from mongoengine.connection import _get_db
|
||||
|
||||
|
||||
class query_counter(object):
|
||||
""" Query_counter contextmanager to get the number of queries. """
|
||||
|
||||
def __init__(self):
|
||||
""" Construct the query_counter. """
|
||||
self.counter = 0
|
||||
self.db = _get_db()
|
||||
|
||||
def __enter__(self):
|
||||
""" On every with block we need to drop the profile collection. """
|
||||
self.db.set_profiling_level(0)
|
||||
self.db.system.profile.drop()
|
||||
self.db.set_profiling_level(2)
|
||||
return self
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
""" Reset the profiling level. """
|
||||
self.db.set_profiling_level(0)
|
||||
|
||||
def __eq__(self, value):
|
||||
""" == Compare querycounter. """
|
||||
return value == self._get_count()
|
||||
|
||||
def __ne__(self, value):
|
||||
""" != Compare querycounter. """
|
||||
return not self.__eq__(value)
|
||||
|
||||
def __lt__(self, value):
|
||||
""" < Compare querycounter. """
|
||||
return self._get_count() < value
|
||||
|
||||
def __le__(self, value):
|
||||
""" <= Compare querycounter. """
|
||||
return self._get_count() <= value
|
||||
|
||||
def __gt__(self, value):
|
||||
""" > Compare querycounter. """
|
||||
return self._get_count() > value
|
||||
|
||||
def __ge__(self, value):
|
||||
""" >= Compare querycounter. """
|
||||
return self._get_count() >= value
|
||||
|
||||
def __int__(self):
|
||||
""" int representation. """
|
||||
return self._get_count()
|
||||
|
||||
def __repr__(self):
|
||||
""" repr query_counter as the number of queries. """
|
||||
return u"%s" % self._get_count()
|
||||
|
||||
def _get_count(self):
|
||||
""" Get the number of queries. """
|
||||
count = self.db.system.profile.find().count() - self.counter
|
||||
self.counter += 1
|
||||
return count
|
3
setup.py
3
setup.py
@ -15,7 +15,7 @@ def get_version(version_tuple):
|
||||
version = '%s.%s' % (version, version_tuple[2])
|
||||
return version
|
||||
|
||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||
# file is read
|
||||
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
||||
@ -47,4 +47,5 @@ setup(name='mongoengine',
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=['pymongo'],
|
||||
test_suite='tests',
|
||||
tests_require=['blinker', 'django==1.3']
|
||||
)
|
||||
|
658
tests/dereference.py
Normal file
658
tests/dereference.py
Normal file
@ -0,0 +1,658 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import _get_db
|
||||
from mongoengine.tests import query_counter
|
||||
|
||||
|
||||
class FieldTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = _get_db()
|
||||
|
||||
def test_list_item_dereference(self):
|
||||
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||
"""
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class Group(Document):
|
||||
members = ListField(ReferenceField(User))
|
||||
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
for i in xrange(1, 51):
|
||||
user = User(name='user %s' % i)
|
||||
user.save()
|
||||
|
||||
group = Group(members=User.objects)
|
||||
group.save()
|
||||
|
||||
group = Group(members=User.objects)
|
||||
group.save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first()
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first().select_related()
|
||||
self.assertEqual(q, 2)
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
group_objs = Group.objects.select_related()
|
||||
self.assertEqual(q, 2)
|
||||
for group_obj in group_objs:
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
def test_recursive_reference(self):
|
||||
"""Ensure that ReferenceFields can reference their own documents.
|
||||
"""
|
||||
class Employee(Document):
|
||||
name = StringField()
|
||||
boss = ReferenceField('self')
|
||||
friends = ListField(ReferenceField('self'))
|
||||
|
||||
Employee.drop_collection()
|
||||
|
||||
bill = Employee(name='Bill Lumbergh')
|
||||
bill.save()
|
||||
|
||||
michael = Employee(name='Michael Bolton')
|
||||
michael.save()
|
||||
|
||||
samir = Employee(name='Samir Nagheenanajar')
|
||||
samir.save()
|
||||
|
||||
friends = [michael, samir]
|
||||
peter = Employee(name='Peter Gibbons', boss=bill, friends=friends)
|
||||
peter.save()
|
||||
|
||||
Employee(name='Funky Gibbon', boss=bill, friends=friends).save()
|
||||
Employee(name='Funky Gibbon', boss=bill, friends=friends).save()
|
||||
Employee(name='Funky Gibbon', boss=bill, friends=friends).save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
peter = Employee.objects.with_id(peter.id)
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
peter.boss
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
peter.friends
|
||||
self.assertEqual(q, 3)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
peter = Employee.objects.with_id(peter.id).select_related()
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
self.assertEquals(peter.boss, bill)
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
self.assertEquals(peter.friends, friends)
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
employees = Employee.objects(boss=bill).select_related()
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for employee in employees:
|
||||
self.assertEquals(employee.boss, bill)
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
self.assertEquals(employee.friends, friends)
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
def test_generic_reference(self):
|
||||
|
||||
class UserA(Document):
|
||||
name = StringField()
|
||||
|
||||
class UserB(Document):
|
||||
name = StringField()
|
||||
|
||||
class UserC(Document):
|
||||
name = StringField()
|
||||
|
||||
class Group(Document):
|
||||
members = ListField(GenericReferenceField())
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
UserC.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
a = UserA(name='User A %s' % i)
|
||||
a.save()
|
||||
|
||||
b = UserB(name='User B %s' % i)
|
||||
b.save()
|
||||
|
||||
c = UserC(name='User C %s' % i)
|
||||
c.save()
|
||||
|
||||
members += [a, b, c]
|
||||
|
||||
group = Group(members=members)
|
||||
group.save()
|
||||
|
||||
group = Group(members=members)
|
||||
group.save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first()
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first().select_related()
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_objs = Group.objects.select_related()
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for group_obj in group_objs:
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
UserC.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
def test_list_field_complex(self):
|
||||
|
||||
class UserA(Document):
|
||||
name = StringField()
|
||||
|
||||
class UserB(Document):
|
||||
name = StringField()
|
||||
|
||||
class UserC(Document):
|
||||
name = StringField()
|
||||
|
||||
class Group(Document):
|
||||
members = ListField()
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
UserC.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
a = UserA(name='User A %s' % i)
|
||||
a.save()
|
||||
|
||||
b = UserB(name='User B %s' % i)
|
||||
b.save()
|
||||
|
||||
c = UserC(name='User C %s' % i)
|
||||
c.save()
|
||||
|
||||
members += [a, b, c]
|
||||
|
||||
group = Group(members=members)
|
||||
group.save()
|
||||
|
||||
group = Group(members=members)
|
||||
group.save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first()
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first().select_related()
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_objs = Group.objects.select_related()
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for group_obj in group_objs:
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
UserC.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
def test_map_field_reference(self):
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class Group(Document):
|
||||
members = MapField(ReferenceField(User))
|
||||
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
user = User(name='user %s' % i)
|
||||
user.save()
|
||||
members.append(user)
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group.save()
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group.save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first()
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, User))
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first().select_related()
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, User))
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_objs = Group.objects.select_related()
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for group_obj in group_objs:
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, User))
|
||||
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
def test_dict_field(self):
|
||||
|
||||
class UserA(Document):
|
||||
name = StringField()
|
||||
|
||||
class UserB(Document):
|
||||
name = StringField()
|
||||
|
||||
class UserC(Document):
|
||||
name = StringField()
|
||||
|
||||
class Group(Document):
|
||||
members = DictField()
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
UserC.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
a = UserA(name='User A %s' % i)
|
||||
a.save()
|
||||
|
||||
b = UserB(name='User B %s' % i)
|
||||
b.save()
|
||||
|
||||
c = UserC(name='User C %s' % i)
|
||||
c.save()
|
||||
|
||||
members += [a, b, c]
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group.save()
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group.save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first()
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first().select_related()
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_objs = Group.objects.select_related()
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for group_obj in group_objs:
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
Group.objects.delete()
|
||||
Group().save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first()
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 1)
|
||||
self.assertEqual(group_obj.members, {})
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
UserC.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
def test_dict_field_no_field_inheritance(self):
|
||||
|
||||
class UserA(Document):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': False}
|
||||
|
||||
class Group(Document):
|
||||
members = DictField()
|
||||
|
||||
UserA.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
a = UserA(name='User A %s' % i)
|
||||
a.save()
|
||||
|
||||
members += [a]
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group.save()
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group.save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first()
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, UserA))
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first().select_related()
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, UserA))
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_objs = Group.objects.select_related()
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for group_obj in group_objs:
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, UserA))
|
||||
|
||||
UserA.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
def test_generic_reference_map_field(self):
|
||||
|
||||
class UserA(Document):
|
||||
name = StringField()
|
||||
|
||||
class UserB(Document):
|
||||
name = StringField()
|
||||
|
||||
class UserC(Document):
|
||||
name = StringField()
|
||||
|
||||
class Group(Document):
|
||||
members = MapField(GenericReferenceField())
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
UserC.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
a = UserA(name='User A %s' % i)
|
||||
a.save()
|
||||
|
||||
b = UserB(name='User B %s' % i)
|
||||
b.save()
|
||||
|
||||
c = UserC(name='User C %s' % i)
|
||||
c.save()
|
||||
|
||||
members += [a, b, c]
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group.save()
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group.save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first()
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first().select_related()
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_objs = Group.objects.select_related()
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for group_obj in group_objs:
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
|
||||
Group.objects.delete()
|
||||
Group().save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
group_obj = Group.objects.first()
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
UserC.drop_collection()
|
||||
Group.drop_collection()
|
69
tests/django_tests.py
Normal file
69
tests/django_tests.py
Normal file
@ -0,0 +1,69 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.django.shortcuts import get_document_or_404
|
||||
|
||||
from django.http import Http404
|
||||
from django.template import Context, Template
|
||||
from django.conf import settings
|
||||
settings.configure()
|
||||
|
||||
class QuerySetTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
self.Person = Person
|
||||
|
||||
def test_order_by_in_django_template(self):
|
||||
"""Ensure that QuerySets are properly ordered in Django template.
|
||||
"""
|
||||
self.Person.drop_collection()
|
||||
|
||||
self.Person(name="A", age=20).save()
|
||||
self.Person(name="D", age=10).save()
|
||||
self.Person(name="B", age=40).save()
|
||||
self.Person(name="C", age=30).save()
|
||||
|
||||
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
||||
|
||||
d = {"ol": self.Person.objects.order_by('-name')}
|
||||
self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:')
|
||||
d = {"ol": self.Person.objects.order_by('+name')}
|
||||
self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:')
|
||||
d = {"ol": self.Person.objects.order_by('-age')}
|
||||
self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:')
|
||||
d = {"ol": self.Person.objects.order_by('+age')}
|
||||
self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:')
|
||||
|
||||
self.Person.drop_collection()
|
||||
|
||||
def test_q_object_filter_in_template(self):
|
||||
|
||||
self.Person.drop_collection()
|
||||
|
||||
self.Person(name="A", age=20).save()
|
||||
self.Person(name="D", age=10).save()
|
||||
self.Person(name="B", age=40).save()
|
||||
self.Person(name="C", age=30).save()
|
||||
|
||||
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
||||
|
||||
d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))}
|
||||
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
||||
|
||||
# Check double rendering doesn't throw an error
|
||||
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
||||
|
||||
def test_get_document_or_404(self):
|
||||
p = self.Person(name="G404")
|
||||
p.save()
|
||||
|
||||
self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
|
||||
self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
|
||||
|
1784
tests/document.py
1784
tests/document.py
File diff suppressed because it is too large
Load Diff
814
tests/fields.py
814
tests/fields.py
File diff suppressed because it is too large
Load Diff
25
tests/fixtures.py
Normal file
25
tests/fixtures.py
Normal file
@ -0,0 +1,25 @@
|
||||
from datetime import datetime
|
||||
import pymongo
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import BaseField
|
||||
from mongoengine.connection import _get_db
|
||||
|
||||
|
||||
class PickleEmbedded(EmbeddedDocument):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
|
||||
|
||||
class PickleTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
|
||||
|
||||
class Mixin(object):
|
||||
name = StringField()
|
||||
|
||||
|
||||
class Base(Document):
|
||||
pass
|
1345
tests/queryset.py
1345
tests/queryset.py
File diff suppressed because it is too large
Load Diff
181
tests/signals.py
Normal file
181
tests/signals.py
Normal file
@ -0,0 +1,181 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine import signals
|
||||
|
||||
signal_output = []
|
||||
|
||||
|
||||
class SignalTests(unittest.TestCase):
|
||||
"""
|
||||
Testing signals before/after saving and deleting.
|
||||
"""
|
||||
|
||||
def get_signal_output(self, fn, *args, **kwargs):
|
||||
# Flush any existing signal output
|
||||
global signal_output
|
||||
signal_output = []
|
||||
fn(*args, **kwargs)
|
||||
return signal_output
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
class Author(Document):
|
||||
name = StringField()
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def pre_init(cls, sender, document, *args, **kwargs):
|
||||
signal_output.append('pre_init signal, %s' % cls.__name__)
|
||||
signal_output.append(str(kwargs['values']))
|
||||
|
||||
@classmethod
|
||||
def post_init(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_init signal, %s' % document)
|
||||
|
||||
@classmethod
|
||||
def pre_save(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_save signal, %s' % document)
|
||||
|
||||
@classmethod
|
||||
def post_save(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_save signal, %s' % document)
|
||||
if 'created' in kwargs:
|
||||
if kwargs['created']:
|
||||
signal_output.append('Is created')
|
||||
else:
|
||||
signal_output.append('Is updated')
|
||||
|
||||
@classmethod
|
||||
def pre_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_delete signal, %s' % document)
|
||||
|
||||
@classmethod
|
||||
def post_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_delete signal, %s' % document)
|
||||
self.Author = Author
|
||||
|
||||
|
||||
class Another(Document):
|
||||
name = StringField()
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def pre_init(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_init Another signal, %s' % cls.__name__)
|
||||
signal_output.append(str(kwargs['values']))
|
||||
|
||||
@classmethod
|
||||
def post_init(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_init Another signal, %s' % document)
|
||||
|
||||
@classmethod
|
||||
def pre_save(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_save Another signal, %s' % document)
|
||||
|
||||
@classmethod
|
||||
def post_save(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_save Another signal, %s' % document)
|
||||
if 'created' in kwargs:
|
||||
if kwargs['created']:
|
||||
signal_output.append('Is created')
|
||||
else:
|
||||
signal_output.append('Is updated')
|
||||
|
||||
@classmethod
|
||||
def pre_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_delete Another signal, %s' % document)
|
||||
|
||||
@classmethod
|
||||
def post_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_delete Another signal, %s' % document)
|
||||
|
||||
self.Another = Another
|
||||
# Save up the number of connected signals so that we can check at the end
|
||||
# that all the signals we register get properly unregistered
|
||||
self.pre_signals = (
|
||||
len(signals.pre_init.receivers),
|
||||
len(signals.post_init.receivers),
|
||||
len(signals.pre_save.receivers),
|
||||
len(signals.post_save.receivers),
|
||||
len(signals.pre_delete.receivers),
|
||||
len(signals.post_delete.receivers)
|
||||
)
|
||||
|
||||
signals.pre_init.connect(Author.pre_init, sender=Author)
|
||||
signals.post_init.connect(Author.post_init, sender=Author)
|
||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
signals.pre_delete.connect(Author.pre_delete, sender=Author)
|
||||
signals.post_delete.connect(Author.post_delete, sender=Author)
|
||||
|
||||
signals.pre_init.connect(Another.pre_init, sender=Another)
|
||||
signals.post_init.connect(Another.post_init, sender=Another)
|
||||
signals.pre_save.connect(Another.pre_save, sender=Another)
|
||||
signals.post_save.connect(Another.post_save, sender=Another)
|
||||
signals.pre_delete.connect(Another.pre_delete, sender=Another)
|
||||
signals.post_delete.connect(Another.post_delete, sender=Another)
|
||||
|
||||
def tearDown(self):
|
||||
signals.pre_init.disconnect(self.Author.pre_init)
|
||||
signals.post_init.disconnect(self.Author.post_init)
|
||||
signals.post_delete.disconnect(self.Author.post_delete)
|
||||
signals.pre_delete.disconnect(self.Author.pre_delete)
|
||||
signals.post_save.disconnect(self.Author.post_save)
|
||||
signals.pre_save.disconnect(self.Author.pre_save)
|
||||
|
||||
signals.pre_init.disconnect(self.Another.pre_init)
|
||||
signals.post_init.disconnect(self.Another.post_init)
|
||||
signals.post_delete.disconnect(self.Another.post_delete)
|
||||
signals.pre_delete.disconnect(self.Another.pre_delete)
|
||||
signals.post_save.disconnect(self.Another.post_save)
|
||||
signals.pre_save.disconnect(self.Another.pre_save)
|
||||
|
||||
# Check that all our signals got disconnected properly.
|
||||
post_signals = (
|
||||
len(signals.pre_init.receivers),
|
||||
len(signals.post_init.receivers),
|
||||
len(signals.pre_save.receivers),
|
||||
len(signals.post_save.receivers),
|
||||
len(signals.pre_delete.receivers),
|
||||
len(signals.post_delete.receivers)
|
||||
)
|
||||
|
||||
self.assertEqual(self.pre_signals, post_signals)
|
||||
|
||||
def test_model_signals(self):
|
||||
""" Model saves should throw some signals. """
|
||||
|
||||
def create_author():
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
|
||||
self.assertEqual(self.get_signal_output(create_author), [
|
||||
"pre_init signal, Author",
|
||||
"{'name': 'Bill Shakespeare'}",
|
||||
"post_init signal, Bill Shakespeare",
|
||||
])
|
||||
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
self.assertEqual(self.get_signal_output(a1.save), [
|
||||
"pre_save signal, Bill Shakespeare",
|
||||
"post_save signal, Bill Shakespeare",
|
||||
"Is created"
|
||||
])
|
||||
|
||||
a1.reload()
|
||||
a1.name='William Shakespeare'
|
||||
self.assertEqual(self.get_signal_output(a1.save), [
|
||||
"pre_save signal, William Shakespeare",
|
||||
"post_save signal, William Shakespeare",
|
||||
"Is updated"
|
||||
])
|
||||
|
||||
self.assertEqual(self.get_signal_output(a1.delete), [
|
||||
'pre_delete signal, William Shakespeare',
|
||||
'post_delete signal, William Shakespeare',
|
||||
])
|
Loading…
x
Reference in New Issue
Block a user