Compare commits

...

120 Commits
v0.2.1 ... v0.3

Author SHA1 Message Date
Harry Marr
00c8d7e6f5 Bump to v0.3 2010-03-17 16:50:13 +00:00
Harry Marr
0d89e967f2 Merge branch 'mapreduce' of git://github.com/blackbrrr/mongoengine 2010-03-17 16:44:24 +00:00
blackbrrr
447f8d0113 MapReduceDocument.object works with custom primary keys. test included. 2010-03-17 11:31:17 -05:00
Harry Marr
60802796cb Made ListField validation exceptions more helpful 2010-03-17 15:10:10 +00:00
Harry Marr
5b42578cb1 Added ~ field name substitution to mapreduce funcs 2010-03-17 14:06:31 +00:00
Harry Marr
25a0a5364a Deprecated 'name' arg for fields in favour of 'db_field' 2010-03-17 13:47:23 +00:00
Harry Marr
047cc218a6 Merge branch 'mapreduce' of git://github.com/blackbrrr/mongoengine
Conflicts:
	mongoengine/queryset.py
2010-03-17 12:31:08 +00:00
Harry Marr
39fc862676 Merge branch 'upsert' of git://github.com/blackbrrr/mongoengine 2010-03-17 12:30:18 +00:00
blackbrrr
f47d926f29 touched up comments in advanced map/reduce test 2010-03-17 00:56:34 -05:00
blackbrrr
f4d0938e3d rewrite simple map/reduce test 2010-03-17 00:51:01 -05:00
blackbrrr
f156da4ec2 bumped version 2010-03-17 00:50:44 -05:00
blackbrrr
0c1e5da9a8 added mongoengine.MapReduceDocument to api ref 2010-03-17 00:50:07 -05:00
blackbrrr
d6b317c552 Merge branch 'master' into mapreduce 2010-03-17 00:34:29 -05:00
blackbrrr
01826c6876 Merge branch 'master' of github.com:blackbrrr/mongoengine into mapreduce 2010-03-17 00:34:19 -05:00
blackbrrr
0b62c9d2f6 Merge branch 'master' of git://github.com/hmarr/mongoengine 2010-03-17 00:34:00 -05:00
Deepak Thukral
72161a9b71 no message in expection in future version of python 2010-03-11 21:10:04 +01:00
Deepak Thukral
df8f4e7251 no message in expection in future version of python 2010-03-11 20:42:27 +01:00
blackbrrr
aa13ab37c4 fixed field_js merge artifact 2010-03-09 15:31:52 -06:00
blackbrrr
acda64a837 fixed field_js merge artifact 2010-03-09 15:31:28 -06:00
blackbrrr
49a001a93a re-added missing QuerySet._ordering 2010-03-09 15:28:55 -06:00
blackbrrr
22a6ec7794 merged conflicts 2010-03-09 15:19:14 -06:00
blackbrrr
26c6e4997c added 'upsert' arg to QuerySet.update and QuerySet.update_one 2010-03-08 21:59:54 -06:00
Harry Marr
d7086fc4a3 Added rewind behaviour and BinaryField to docs 2010-03-08 22:23:40 +00:00
Harry Marr
92150e07d3 Merge branch 'binary-fields' of git://github.com/benmur/mongoengine 2010-03-08 22:17:21 +00:00
Harry Marr
ac3c857e1a Added rewind to QuerySet, which is implicitly called when iteration finishes 2010-03-08 22:15:40 +00:00
Harry Marr
48e313fb44 Merge branch 'master' of git://github.com/iapain/mongoengine 2010-03-08 21:59:42 +00:00
Harry Marr
5390117275 Modified AUTHORS 2010-03-08 21:58:19 +00:00
Rached Ben Mustapha
0b3af2052f implement binary field size validation 2010-03-08 17:06:52 +01:00
Rached Ben Mustapha
bb19ba3eb6 Drop collection at the end of the test 2010-03-08 16:43:43 +01:00
Rached Ben Mustapha
879bf08d18 Simple implementation of BinaryField 2010-03-08 16:42:23 +01:00
Deepak Thukral
b99421e7ee added date_joined and normalization to email address 2010-03-05 07:55:12 +01:00
blackbrrr
3b6d8fab47 added AUTHORS file 2010-03-01 12:26:03 -06:00
Harry Marr
53c0cdc0c1 Added recursive and undefined document ref docs 2010-02-28 23:29:42 +00:00
Harry Marr
58f877de1a Added recursive / document name references 2010-02-28 23:16:51 +00:00
Harry Marr
95a7b33fb4 Changed how GenericReferenceFields are stored / queried 2010-02-28 23:15:21 +00:00
Harry Marr
81dd5adccf GenericReferenceField docs 2010-02-28 21:30:54 +00:00
Harry Marr
94e86a0be1 Merge branch 'gfk' of git://github.com/blackbrrr/mongoengine 2010-02-28 21:08:32 +00:00
Harry Marr
5b2dbfe007 Added tests for URLField and DecimalField 2010-02-28 18:25:40 +00:00
Harry Marr
4451843a39 Added docs for QuerySet.only 2010-02-28 17:52:29 +00:00
Harry Marr
5e2c5fa97b Merge branch 'regex-query-shortcuts' 2010-02-28 17:38:03 +00:00
blackbrrr
018b206177 added support for GenericReferenceField to ListField. could be cleaner, perhaps. 2010-02-26 17:38:38 -06:00
blackbrrr
03d31b1890 added global model registry and GenericReferenceField, a ReferenceField not bound to a particular model 2010-02-26 16:59:12 -06:00
Harry Marr
265776566e QuerySet.only field name translation and polymorphism fix 2010-02-26 19:43:26 +00:00
Harry Marr
6e77e32855 Fixed Q object ObjectId comparison issue 2010-02-26 17:13:19 +00:00
Harry Marr
0b1c506626 Added Q object support for regexes (inc. operator shortcuts) 2010-02-26 16:46:07 +00:00
Harry Marr
719a653375 Added match operator docs 2010-02-26 13:48:00 +00:00
Harry Marr
66520c77f8 Added regex match operators with test 2010-02-26 13:43:45 +00:00
Harry Marr
ab2d019349 Added server-side js docs 2010-02-26 13:23:15 +00:00
Harry Marr
d0e0b291df Implementation and tests for exec_js field substitution 2010-02-25 17:20:52 +00:00
Harry Marr
200e9eca92 Merge branch 'only_fields' of git://github.com/blackbrrr/mongoengine 2010-02-24 20:23:59 +00:00
Harry Marr
634f771547 QuerySet repr now uses limit and skip 2010-02-24 17:01:31 +00:00
Harry Marr
2996f8919d Limits of size 0 now return no results 2010-02-24 16:07:26 +00:00
blackbrrr
1b68efe7c7 updated QuerySet.only docstring 2010-02-24 09:52:39 -06:00
blackbrrr
a19a7b976c updated advanced map/reduce test to include scope; misc cleanup in queryset 2010-02-23 22:26:05 -06:00
Harry Marr
145b0c33fc Support 1-arg queryset managers, but warn about deprecation 2010-02-23 18:27:14 +00:00
blackbrrr
8b1a39f2c1 added QuerySet.only 2010-02-23 00:24:28 -06:00
Harry Marr
6dbc051409 Fixed in_bulk test 2010-02-23 01:14:00 +00:00
Harry Marr
c148a5bbfc Merge branch 'master' of git://github.com/ack/mongoengine 2010-02-23 01:12:56 +00:00
Deepak Thukral
90d9bd9723 added natural object comparision 2010-02-21 12:13:58 +01:00
Deepak Thukral
bc7e6ccf53 set_password returns user object, comp. with django 1.2 2010-02-21 11:51:21 +01:00
Albert Choi
6cab002214 missing DecimalField 2010-02-15 14:25:49 -08:00
blackbrrr
3762a69537 added QuerySet.in_bulk, bulk querying with ObjectIds 2010-02-14 21:02:55 -06:00
blackbrrr
348f7b5dfc merged master, fixed 1 merge conflict 2010-02-14 17:23:38 -06:00
blackbrrr
008a62e4e9 updated map/reduce documentation 2010-02-12 16:07:44 -06:00
blackbrrr
a4c5fa57e0 updated notes in map_reduce_advanced queryset test 2010-02-12 15:53:28 -06:00
blackbrrr
9be6c41af7 map/reduce result objects now only have 'key', 'value', and 'object' properties; MapReduceDocument.key_object now returns proper Document subclass; added finalize with Reddit ranking simulation; MapReduceDocuments now yielded; 2010-02-12 14:39:08 -06:00
blackbrrr
5c311eefb1 fixed merge conflict in queryset test 2010-02-12 09:59:09 -06:00
blackbrrr
d0ceb74a2e removed unused fields and tests 2010-02-12 09:57:09 -06:00
Harry Marr
ea1fe6a538 Fixed set/unset issue with ListFields 2010-02-12 11:21:51 +00:00
Florian Schlachter
a93509c9b3 Merge remote branch 'hmarr/master' 2010-02-12 10:19:47 +01:00
Harry Marr
210e9e23af Dereferencing of referenced documents within lists 2010-02-12 02:31:41 +00:00
blackbrrr
c4513f0286 merged master 2010-02-11 15:43:37 -06:00
Florian Schlachter
1114572b47 Merge remote branch 'hmarr/master' 2010-02-10 14:25:33 +01:00
Harry Marr
b2588d1c4f Changed neq to ne, fixed Q object in and nin 2010-02-10 12:35:41 +00:00
blackbrrr
69d3e0c4b6 added map/reduce support via QuerySet.map_reduce. map_reduce operations respect query specs and ordering, but ordering is currently only applied to map/reduce collection. map/reduce may eventually require its own QuerySet to avoid slicing conflicts. results are returned as lists of MapReduceDocument objects, dynamic objects representing the query. tests and documentation included. considered in the neighborhood of 'good start'. 2010-02-09 14:56:15 -06:00
Florian Schlachter
e2414d8fea Merge remote branch 'hmarr/master' 2010-02-05 00:36:26 +01:00
Florian Schlachter
24db0d1499 return db-object to allow low-level access from outside via connect() 2010-02-05 00:35:49 +01:00
Harry Marr
89f505bb13 Removed pool_size from connect, minor tidyup 2010-02-04 01:44:52 +00:00
Florian Schlachter
df5b1f3806 Merge remote branch 'hmarr/master' 2010-02-03 02:26:26 +01:00
Harry Marr
755deb3ffe Added DictField 2010-02-03 01:22:25 +00:00
Florian Schlachter
59f8c9f38e make mongoengine more international :) using unicode-strings; str(err) raises errors if it contains non-ascii chars/umlauts 2010-02-02 21:48:47 +01:00
Florian Schlachter
69e9b5d55e fixed unicode-bug; replaced str(err) with err.message 2010-02-02 21:44:11 +01:00
Florian Schlachter
a2d8b0ffbe Merge remote branch 'hmarr/master' 2010-02-02 18:49:52 +01:00
Harry Marr
0bbf3a3d76 Fixed EmbeddedDocument validation bug 2010-02-02 17:37:09 +00:00
Florian Schlachter
10de19d38b be kind and also accept an integer for a float field (so e.g. mymodel.floatfield = 9 is possible, instead of mymodel.floatfield = 9.0) 2010-01-31 18:06:25 +01:00
Florian Schlachter
73aff806f3 reset to master, keep working on the dirty-fields-patch in another branch 2010-01-31 18:00:01 +01:00
Florian Schlachter
963a223e7e Merge remote branch 'hmarr/master'
Conflicts:
	mongoengine/queryset.py
2010-01-31 17:43:56 +01:00
Florian Schlachter
bbfc2f416e keep track of dirty fields is still work in progress; EmbeddedDocuments still aren't tracked (TBD) 2010-01-31 15:43:40 +01:00
Harry Marr
e05d31eaaf Added get{,_or_create} docs 2010-01-31 13:47:27 +00:00
Florian Schlachter
431f006751 new save() method updates only dirty fields. fixes issue #18 2010-01-31 14:40:00 +01:00
Harry Marr
ffc9d7b152 Merge branch 'master' of git://github.com/flosch/mongoengine
Added unit test for get_or_create, merged flosch's get with
punteney's get.

Conflicts:
	mongoengine/queryset.py
2010-01-31 13:24:50 +00:00
Harry Marr
79604180db Merge branch 'master' of git://github.com/punteney/mongoengine 2010-01-31 13:11:20 +00:00
Florian Schlachter
7d6e117f68 added get-method to fetch exactly one document from the collection. catching pymongo's ObjectId-errors and raising mongoengine's ValidationError instead. 2010-01-31 01:11:37 +01:00
Florian Schlachter
b3cc2f990a improved get_or_create 2010-01-30 22:01:43 +01:00
Florian Schlachter
8d953f0bcb Added get_or_create-method 2010-01-30 21:12:46 +01:00
Harry Marr
5cac52720c Fixed querying on ReferenceFields using primary key 2010-01-27 15:57:11 +00:00
Harry Marr
bca6119db8 Minor tidyup 2010-01-26 19:36:19 +00:00
Harry Marr
568000805f EmbeddedDocuments may now be non-polymorphic 2010-01-25 01:00:04 +00:00
blackbrrr
3fb6307596 Merge branch 'master' of git://github.com/hmarr/mongoengine 2010-01-23 18:41:52 -06:00
blackbrrr
7aa0031dec reset 2010-01-23 15:39:26 -06:00
Harry Marr
2585f1b724 queryset_manager funcs now accept doc as arg 2010-01-23 17:16:01 +00:00
Harry Marr
470e08f616 exec_js functions now acknowledge Q objects 2010-01-23 03:05:27 +00:00
blackbrrr
f1e51f9708 Merge branch 'master' of git://github.com/hmarr/mongoengine into deferred_fields 2010-01-19 12:27:14 -06:00
James Punteney
e0becc109d Adding tests to test the get query 2010-01-16 14:51:13 -05:00
James Punteney
47e4dd40cd Making the query actually get called for get 2010-01-16 13:24:10 -05:00
James Punteney
c38faebc25 Adding a get method to the queryset that raises exceptions if more or less than one item is returned 2010-01-16 13:21:16 -05:00
Harry Marr
21b7d8f8ea Bump to v0.2.2 2010-01-16 16:42:51 +00:00
Harry Marr
3357b55fbf Indexing on ListFields now works properly 2010-01-16 15:35:01 +00:00
Harry Marr
f01add9ef5 Moved sections from user guide to separate pages 2010-01-16 14:06:45 +00:00
blackbrrr
b0b8e11c60 Merge branch 'master' into deferred_fields 2010-01-14 11:39:09 -06:00
blackbrrr
7e0fcb9e65 groundwork for deferred fields 2010-01-14 11:39:03 -06:00
blackbrrr
972235cf06 added build, dist, egg dirs to .gitignore 2010-01-14 11:37:07 -06:00
blackbrrr
b3c9a76619 Merge branch 'master' of git://github.com/hmarr/mongoengine 2010-01-14 11:32:39 -06:00
blackbrrr
5f84d6f8f8 added URLField, DecimalField, tests. 2010-01-14 11:32:28 -06:00
blackbrrr
1cdeb8130d ObjectIdField.to_python returns pymongo.objectid.ObjectId 2010-01-14 11:32:01 -06:00
Harry Marr
ce69428cc6 Moved validate() to BaseDocument 2010-01-13 16:41:57 +00:00
Harry Marr
1818cf7114 Added q_objs to __call__ args 2010-01-12 18:33:33 +00:00
blackbrrr
b375c41586 added Q object support to QuerySet.filter 2010-01-12 12:19:59 -06:00
blackbrrr
d85ee4e051 fixed merge conflict in BaseField.__init__ 2010-01-12 12:19:30 -06:00
blackbrrr
cfc394963f added queryset chaining via 'filter' method. test included. 2010-01-12 12:16:15 -06:00
26 changed files with 2755 additions and 773 deletions

3
.gitignore vendored
View File

@@ -2,3 +2,6 @@
.*.swp
docs/.build
docs/_build
build/
dist/
mongoengine.egg-info/

4
AUTHORS Normal file
View File

@@ -0,0 +1,4 @@
Harry Marr <harry@hmarr.com>
Matt Dennewitz <mattdennewitz@gmail.com>
Deepak Thukral <iapain@yahoo.com>
Florian Schlachter <flori@n-schlachter.de>

View File

@@ -1,6 +1,6 @@
include MANIFEST.in
include README.rst
include LICENSE
include AUTHORS
recursive-include docs *
prune docs/_build/*
recursive-include tests *
recursive-exclude * *.pyc *.swp
prune docs/_build

View File

@@ -15,7 +15,7 @@ a `tutorial <http://hmarr.com/mongoengine/tutorial.html>`_, a `user guide
Installation
============
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
you can use ``easy_install mongoengine``. Otherwise, you can download the
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
source from `GitHub <http://github.com/hmarr/mongoengine>`_ and run ``python
setup.py install``.
@@ -82,6 +82,14 @@ Tests
To run the test suite, ensure you are running a local instance of MongoDB on
the standard port, and run ``python setup.py test``.
Community
=========
- `MongoEngine Users mailing list
<http://groups.google.com/group/mongoengine-users>`_
- `MongoEngine Developers mailing list
<http://groups.google.com/group/mongoengine-dev>`_
- `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_
Contributing
============
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ - to

View File

@@ -20,6 +20,9 @@ Documents
.. autoclass:: mongoengine.EmbeddedDocument
:members:
.. autoclass:: mongoengine.document.MapReduceDocument
:members:
Querying
========
@@ -36,18 +39,28 @@ Fields
.. autoclass:: mongoengine.StringField
.. autoclass:: mongoengine.URLField
.. autoclass:: mongoengine.IntField
.. autoclass:: mongoengine.FloatField
.. autoclass:: mongoengine.DecimalField
.. autoclass:: mongoengine.BooleanField
.. autoclass:: mongoengine.DateTimeField
.. autoclass:: mongoengine.EmbeddedDocumentField
.. autoclass:: mongoengine.DictField
.. autoclass:: mongoengine.ListField
.. autoclass:: mongoengine.BinaryField
.. autoclass:: mongoengine.ObjectIdField
.. autoclass:: mongoengine.ReferenceField
.. autoclass:: mongoengine.GenericReferenceField

View File

@@ -2,25 +2,66 @@
Changelog
=========
Changes in v0.3
===============
- Added MapReduce support
- Added ``contains``, ``startswith`` and ``endswith`` query operators (and
case-insensitive versions that are prefixed with 'i')
- Deprecated fields' ``name`` parameter, replaced with ``db_field``
- Added ``QuerySet.only`` for only retrieving specific fields
- Added ``QuerySet.in_bulk()`` for bulk querying using ids
- ``QuerySet``\ s now have a ``rewind()`` method, which is called automatically
when the iterator is exhausted, allowing ``QuerySet``\ s to be reused
- Added ``DictField``
- Added ``URLField``
- Added ``DecimalField``
- Added ``BinaryField``
- Added ``GenericReferenceField``
- Added ``get()`` and ``get_or_create()`` methods to ``QuerySet``
- ``ReferenceField``\ s may now reference the document they are defined on
(recursive references) and documents that have not yet been defined
- ``Document`` objects may now be compared for equality (equal if _ids are
equal and documents are of same type)
- ``QuerySet`` update methods now have an ``upsert`` parameter
- Added field name substitution for Javascript code (allows the user to use the
Python names for fields in JS, which are later substituted for the real field
names)
- ``Q`` objects now support regex querying
- Fixed bug where referenced documents within lists weren't properly
dereferenced
- ``ReferenceField``\ s may now be queried using their _id
- Fixed bug where ``EmbeddedDocuments`` couldn't be non-polymorphic
- ``queryset_manager`` functions now accept two arguments -- the document class
as the first and the queryset as the second
- Fixed bug where ``QuerySet.exec_js`` ignored ``Q`` objects
- Other minor fixes
Changes in v0.2.2
=================
- Fixed bug that prevented indexes from being used on ``ListField``\ s
- ``Document.filter()`` added as an alias to ``Document.__call__()``
- ``validate()`` may now be used on ``EmbeddedDocument``\ s
Changes in v0.2.1
=================
- Added a MongoEngine backend for Django sessions
- Added force_insert to Document.save()
- Improved querying syntax for ListField and EmbeddedDocumentField
- Added support for user-defined primary keys (_ids in MongoDB)
- Added ``force_insert`` to ``Document.save()``
- Improved querying syntax for ``ListField`` and ``EmbeddedDocumentField``
- Added support for user-defined primary keys (``_id`` in MongoDB)
Changes in v0.2
===============
- Added Q class for building advanced queries
- Added QuerySet methods for atomic updates to documents
- Fields may now specify ``unique=True`` to enforce uniqueness across a collection
- Added ``Q`` class for building advanced queries
- Added ``QuerySet`` methods for atomic updates to documents
- Fields may now specify ``unique=True`` to enforce uniqueness across a
collection
- Added option for default document ordering
- Fixed bug in index definitions
Changes in v0.1.3
=================
- Added Django authentication backend
- Added Document.meta support for indexes, which are ensured just before
- Added ``Document.meta`` support for indexes, which are ensured just before
querying takes place
- A few minor bugfixes
@@ -30,8 +71,8 @@ Changes in v0.1.2
- Query values may be processed before before being used in queries
- Made connections lazy
- Fixed bug in Document dictionary-style access
- Added BooleanField
- Added Document.reload method
- Added ``BooleanField``
- Added ``Document.reload()`` method
Changes in v0.1.1

View File

@@ -22,7 +22,7 @@ sys.path.append(os.path.abspath('..'))
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -38,7 +38,7 @@ master_doc = 'index'
# General information about the project.
project = u'MongoEngine'
copyright = u'2009, Harry Marr'
copyright = u'2009-2010, Harry Marr'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the

View File

@@ -28,6 +28,8 @@ The :mod:`~mongoengine.django.auth` module also contains a
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
.. versionadded:: 0.1.3
Sessions
========
Django allows the use of different backend stores for its sessions. MongoEngine
@@ -40,3 +42,5 @@ session backend, ensure that your settings module has
into you settings module::
SESSION_ENGINE = 'mongoengine.django.sessions'
.. versionadded:: 0.2.1

20
docs/guide/connecting.rst Normal file
View File

@@ -0,0 +1,20 @@
.. _guide-connecting:
=====================
Connecting to MongoDB
=====================
To connect to a running instance of :program:`mongod`, use the
:func:`~mongoengine.connect` function. The first argument is the name of the
database to connect to. If the database does not exist, it will be created. If
the database requires authentication, :attr:`username` and :attr:`password`
arguments may be provided::
from mongoengine import connect
connect('project1', username='webapp', password='pwd123')
By default, MongoEngine assumes that the :program:`mongod` instance is running
on **localhost** on port **27017**. If MongoDB is running elsewhere, you may
provide :attr:`host` and :attr:`port` arguments to
:func:`~mongoengine.connect`::
connect('project1', host='192.168.1.35', port=12345)

View File

@@ -0,0 +1,304 @@
==================
Defining documents
==================
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
working with relational databases, rows are stored in **tables**, which have a
strict **schema** that the rows follow. MongoDB stores documents in
**collections** rather than tables - the principle difference is that no schema
is enforced at a database level.
Defining a document's schema
============================
MongoEngine allows you to define schemata for documents as this helps to reduce
coding errors, and allows for utility methods to be defined on fields which may
be present.
To define a schema for a document, create a class that inherits from
:class:`~mongoengine.Document`. Fields are specified by adding **field
objects** as class attributes to the document class::
from mongoengine import *
import datetime
class Page(Document):
title = StringField(max_length=200, required=True)
date_modified = DateTimeField(default=datetime.now)
Fields
======
By default, fields are not required. To make a field mandatory, set the
:attr:`required` keyword argument of a field to ``True``. Fields also may have
validation constraints available (such as :attr:`max_length` in the example
above). Fields may also take default values, which will be used if a value is
not provided. Default values may optionally be a callable, which will be called
to retrieve the value (such as in the above example). The field types available
are as follows:
* :class:`~mongoengine.StringField`
* :class:`~mongoengine.URLField`
* :class:`~mongoengine.IntField`
* :class:`~mongoengine.FloatField`
* :class:`~mongoengine.DecimalField`
* :class:`~mongoengine.DateTimeField`
* :class:`~mongoengine.ListField`
* :class:`~mongoengine.DictField`
* :class:`~mongoengine.ObjectIdField`
* :class:`~mongoengine.EmbeddedDocumentField`
* :class:`~mongoengine.ReferenceField`
* :class:`~mongoengine.GenericReferenceField`
List fields
-----------
MongoDB allows the storage of lists of items. To add a list of items to a
:class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field
type. :class:`~mongoengine.ListField` takes another field object as its first
argument, which specifies which type elements may be stored within the list::
class Page(Document):
tags = ListField(StringField(max_length=50))
Embedded documents
------------------
MongoDB has the ability to embed documents within other documents. Schemata may
be defined for these embedded documents, just as they may be for regular
documents. To create an embedded document, just define a document as usual, but
inherit from :class:`~mongoengine.EmbeddedDocument` rather than
:class:`~mongoengine.Document`::
class Comment(EmbeddedDocument):
content = StringField()
To embed the document within another document, use the
:class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded
document class as the first argument::
class Page(Document):
comments = ListField(EmbeddedDocumentField(Comment))
comment1 = Comment('Good work!')
comment2 = Comment('Nice article!')
page = Page(comments=[comment1, comment2])
Dictionary Fields
-----------------
Often, an embedded document may be used instead of a dictionary -- generally
this is recommended as dictionaries don't support validation or custom field
types. However, sometimes you will not know the structure of what you want to
store; in this situation a :class:`~mongoengine.DictField` is appropriate::
class SurveyResponse(Document):
date = DateTimeField()
user = ReferenceField(User)
answers = DictField()
survey_response = SurveyResponse(date=datetime.now(), user=request.user)
response_form = ResponseForm(request.POST)
survey_response.answers = response_form.cleaned_data()
survey_response.save()
Reference fields
----------------
References may be stored to other documents in the database using the
:class:`~mongoengine.ReferenceField`. Pass in another document class as the
first argument to the constructor, then simply assign document objects to the
field::
class User(Document):
name = StringField()
class Page(Document):
content = StringField()
author = ReferenceField(User)
john = User(name="John Smith")
john.save()
post = Page(content="Test Page")
post.author = john
post.save()
The :class:`User` object is automatically turned into a reference behind the
scenes, and dereferenced when the :class:`Page` object is retrieved.
To add a :class:`~mongoengine.ReferenceField` that references the document
being defined, use the string ``'self'`` in place of the document class as the
argument to :class:`~mongoengine.ReferenceField`'s constructor. To reference a
document that has not yet been defined, use the name of the undefined document
as the constructor's argument::
class Employee(Document):
name = StringField()
boss = ReferenceField('self')
profile_page = ReferenceField('ProfilePage')
class ProfilePage(Document):
content = StringField()
Generic reference fields
''''''''''''''''''''''''
A second kind of reference field also exists,
:class:`~mongoengine.GenericReferenceField`. This allows you to reference any
kind of :class:`~mongoengine.Document`, and hence doesn't take a
:class:`~mongoengine.Document` subclass as a constructor argument::
class Link(Document):
url = StringField()
class Post(Document):
title = StringField()
class Bookmark(Document):
bookmark_object = GenericReferenceField()
link = Link(url='http://hmarr.com/mongoengine/')
link.save()
post = Post(title='Using MongoEngine')
post.save()
Bookmark(bookmark_object=link).save()
Bookmark(bookmark_object=post).save()
.. note::
Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less
efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if
you will only be referencing one document type, prefer the standard
:class:`~mongoengine.ReferenceField`.
Uniqueness constraints
----------------------
MongoEngine allows you to specify that a field should be unique across a
collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's
constructor. If you try to save a document that has the same value for a unique
field as a document that is already in the database, a
:class:`~mongoengine.OperationError` will be raised. You may also specify
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
either a single field name, or a list or tuple of field names::
class User(Document):
username = StringField(unique=True)
first_name = StringField()
last_name = StringField(unique_with='last_name')
Document collections
====================
Document classes that inherit **directly** from :class:`~mongoengine.Document`
will have their own **collection** in the database. The name of the collection
is by default the name of the class, coverted to lowercase (so in the example
above, the collection would be called `page`). If you need to change the name
of the collection (e.g. to use MongoEngine with an existing database), then
create a class dictionary attribute called :attr:`meta` on your document, and
set :attr:`collection` to the name of the collection that you want your
document class to use::
class Page(Document):
title = StringField(max_length=200, required=True)
meta = {'collection': 'cmsPage'}
Capped collections
------------------
A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
:attr:`max_documents` is the maximum number of documents that is allowed to be
stored in the collection, and :attr:`max_size` is the maximum size of the
collection in bytes. If :attr:`max_size` is not specified and
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
The following example shows a :class:`Log` document that will be limited to
1000 entries and 2MB of disk space::
class Log(Document):
ip_address = StringField()
meta = {'max_documents': 1000, 'max_size': 2000000}
Indexes
=======
You can specify indexes on collections to make querying faster. This is done
by creating a list of index specifications called :attr:`indexes` in the
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
either be a single field name, or a tuple containing multiple field names. A
direction may be specified on fields by prefixing the field name with a **+**
or a **-** sign. Note that direction only matters on multi-field indexes. ::
class Page(Document):
title = StringField()
rating = StringField()
meta = {
'indexes': ['title', ('title', '-rating')]
}
Ordering
========
A default ordering can be specified for your
:class:`~mongoengine.queryset.QuerySet` using the :attr:`ordering` attribute of
:attr:`~mongoengine.Document.meta`. Ordering will be applied when the
:class:`~mongoengine.queryset.QuerySet` is created, and can be overridden by
subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
from datetime import datetime
class BlogPost(Document):
title = StringField()
published_date = DateTimeField()
meta = {
'ordering': ['-published_date']
}
blog_post_1 = BlogPost(title="Blog Post #1")
blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0)
blog_post_2 = BlogPost(title="Blog Post #2")
blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0)
blog_post_3 = BlogPost(title="Blog Post #3")
blog_post_3.published_date = datetime(2010, 1, 7, 0, 0 ,0)
blog_post_1.save()
blog_post_2.save()
blog_post_3.save()
# get the "first" BlogPost using default ordering
# from BlogPost.meta.ordering
latest_post = BlogPost.objects.first()
assert latest_post.title == "Blog Post #3"
# override default ordering, order BlogPosts by "published_date"
first_post = BlogPost.objects.order_by("+published_date").first()
assert first_post.title == "Blog Post #1"
Document inheritance
====================
To create a specialised type of a :class:`~mongoengine.Document` you have
defined, you may subclass it and add any extra fields or methods you may need.
As this is new class is not a direct subclass of
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
will use the same collection as its superclass uses. This allows for more
convenient and efficient retrieval of related documents::
# Stored in a collection named 'page'
class Page(Document):
title = StringField(max_length=200, required=True)
# Also stored in the collection named 'page'
class DatedPage(Page):
date = DateTimeField()
Working with existing data
--------------------------
To enable correct retrieval of documents involved in this kind of heirarchy,
two extra attributes are stored on each document in the database: :attr:`_cls`
and :attr:`_types`. These are hidden from the user through the MongoEngine
interface, but may not be present if you are trying to use MongoEngine with
an existing database. For this reason, you may disable this inheritance
mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling
you to work with existing databases. To disable inheritance on a document
class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
dictionary::
# Will work with data in an existing collection named 'cmsPage'
class Page(Document):
title = StringField(max_length=200, required=True)
meta = {
'collection': 'cmsPage',
'allow_inheritance': False,
}

View File

@@ -0,0 +1,65 @@
===================
Documents instances
===================
To create a new document object, create an instance of the relevant document
class, providing values for its fields as its constructor keyword arguments.
You may provide values for any of the fields on the document::
>>> page = Page(title="Test Page")
>>> page.title
'Test Page'
You may also assign values to the document's fields using standard object
attribute syntax::
>>> page.title = "Example Page"
>>> page.title
'Example Page'
Saving and deleting documents
=============================
To save the document to the database, call the
:meth:`~mongoengine.Document.save` method. If the document does not exist in
the database, it will be created. If it does already exist, it will be
updated.
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
Note that this will only work if the document exists in the database and has a
valide :attr:`id`.
.. seealso::
:ref:`guide-atomic-updates`
Document IDs
============
Each document in the database has a unique id. This may be accessed through the
:attr:`id` attribute on :class:`~mongoengine.Document` objects. Usually, the id
will be generated automatically by the database server when the object is save,
meaning that you may only access the :attr:`id` field once a document has been
saved::
>>> page = Page(title="Test Page")
>>> page.id
>>> page.save()
>>> page.id
ObjectId('123456789abcdef000000000')
Alternatively, you may define one of your own fields to be the document's
"primary key" by providing ``primary_key=True`` as a keyword argument to a
field's constructor. Under the hood, MongoEngine will use this field as the
:attr:`id`; in fact :attr:`id` is actually aliased to your primary key field so
you may still use :attr:`id` to access the primary key if you want::
>>> class User(Document):
... email = StringField(primary_key=True)
... name = StringField()
...
>>> bob = User(email='bob@example.com', name='Bob')
>>> bob.save()
>>> bob.id == bob.email == 'bob@example.com'
True
.. note::
If you define your own primary key field, the field implicitly becomes
required, so a :class:`ValidationError` will be thrown if you don't provide
it.

12
docs/guide/index.rst Normal file
View File

@@ -0,0 +1,12 @@
==========
User Guide
==========
.. toctree::
:maxdepth: 2
installing
connecting
defining-documents
document-instances
querying

31
docs/guide/installing.rst Normal file
View File

@@ -0,0 +1,31 @@
======================
Installing MongoEngine
======================
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
and ensure it is running in an accessible location. You will also need
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
install MongoEngine using setuptools, then the dependencies will be handled for
you.
MongoEngine is available on PyPI, so to use it you can use
:program:`easy_install`:
.. code-block:: console
# easy_install mongoengine
Alternatively, if you don't have setuptools installed, `download it from PyPi
<http://pypi.python.org/pypi/mongoengine/>`_ and run
.. code-block:: console
# python setup.py install
To use the bleeding-edge version of MongoEngine, you can get the source from
`GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above:
.. code-block:: console
# git clone git://github.com/hmarr/mongoengine
# cd mongoengine
# python setup.py install

421
docs/guide/querying.rst Normal file
View File

@@ -0,0 +1,421 @@
=====================
Querying the database
=====================
:class:`~mongoengine.Document` classes have an :attr:`objects` attribute, which
is used for accessing the objects in the database associated with the class.
The :attr:`objects` attribute is actually a
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
a new :class:`~mongoengine.queryset.QuerySet` object on access. The
:class:`~mongoengine.queryset.QuerySet` object may may be iterated over to
fetch documents from the database::
# Prints out the names of all the users in the database
for user in User.objects:
print user.name
.. note::
Once the iteration finishes (when :class:`StopIteration` is raised),
:meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the
:class:`~mongoengine.queryset.QuerySet` may be iterated over again. The
results of the first iteration are *not* cached, so the database will be hit
each time the :class:`~mongoengine.queryset.QuerySet` is iterated over.
Filtering queries
=================
The query may be filtered by calling the
:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword
arguments. The keys in the keyword arguments correspond to fields on the
:class:`~mongoengine.Document` you are querying::
# This will return a QuerySet that will only iterate over users whose
# 'country' field is set to 'uk'
uk_users = User.objects(country='uk')
Fields on embedded documents may also be referred to using field lookup syntax
by using a double-underscore in place of the dot in object attribute access
syntax::
# This will return a QuerySet that will only iterate over pages that have
# been written by a user whose 'country' field is set to 'uk'
uk_pages = Page.objects(author__country='uk')
Querying lists
--------------
On most fields, this syntax will look up documents where the field specified
matches the given value exactly, but when the field refers to a
:class:`~mongoengine.ListField`, a single item may be provided, in which case
lists that contain that item will be matched::
class Page(Document):
tags = ListField(StringField())
# This will match all pages that have the word 'coding' as an item in the
# 'tags' list
Page.objects(tags='coding')
Query operators
===============
Operators other than equality may also be used in queries; just attach the
operator name to a key with a double-underscore::
# Only find users whose age is 18 or less
young_users = Users.objects(age__lte=18)
Available operators are as follows:
* ``neq`` -- not equal to
* ``lt`` -- less than
* ``lte`` -- less than or equal to
* ``gt`` -- greater than
* ``gte`` -- greater than or equal to
* ``in`` -- value is in list (a list of values should be provided)
* ``nin`` -- value is not in list (a list of values should be provided)
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
* ``all`` -- every item in array is in list of values provided
* ``size`` -- the size of the array is
* ``exists`` -- value for field exists
The following operators are available as shortcuts to querying with regular
expressions:
* ``contains`` -- string field contains value
* ``icontains`` -- string field contains value (case insensitive)
* ``startswith`` -- string field starts with value
* ``istartswith`` -- string field starts with value (case insensitive)
* ``endswith`` -- string field ends with value
* ``iendswith`` -- string field ends with value (case insensitive)
.. versionadded:: 0.3
Limiting and skipping results
=============================
Just as with traditional ORMs, you may limit the number of results returned, or
skip a number or results in you query.
:meth:`~mongoengine.queryset.QuerySet.limit` and
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
:class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
achieving this is using array-slicing syntax::
# Only the first 5 people
users = User.objects[:5]
# All except for the first 5 people
users = User.objects[5:]
# 5 users, starting from the 10th user found
users = User.objects[10:15]
You may also index the query to retrieve a single result. If an item at that
index does not exists, an :class:`IndexError` will be raised. A shortcut for
retrieving the first result and returning :attr:`None` if no result exists is
provided (:meth:`~mongoengine.queryset.QuerySet.first`)::
>>> # Make sure there are no users
>>> User.drop_collection()
>>> User.objects[0]
IndexError: list index out of range
>>> User.objects.first() == None
True
>>> User(name='Test User').save()
>>> User.objects[0] == User.objects.first()
True
Retrieving unique results
-------------------------
To retrieve a result that should be unique in the collection, use
:meth:`~mongoengine.queryset.QuerySet.get`. This will raise
:class:`~mongoengine.queryset.DoesNotExist` if no document matches the query,
and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one
document matched the query.
A variation of this method exists,
:meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new
document with the query arguments if no documents match the query. An
additional keyword argument, :attr:`defaults` may be provided, which will be
used as default values for the new document, in the case that it should need
to be created::
>>> a = User.objects.get_or_create(name='User A', defaults={'age': 30})
>>> b = User.objects.get_or_create(name='User A', defaults={'age': 40})
>>> a.name == b.name and a.age == b.age
True
Default Document queries
========================
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter
the collection -- it returns all objects. This may be changed by defining a
method on a document that modifies a queryset. The method should accept two
arguments -- :attr:`doc_cls` and :attr:`queryset`. The first argument is the
:class:`~mongoengine.Document` class that the method is defined on (in this
sense, the method is more like a :func:`classmethod` than a regular method),
and the second argument is the initial queryset. The method needs to be
decorated with :func:`~mongoengine.queryset.queryset_manager` in order for it
to be recognised. ::
class BlogPost(Document):
title = StringField()
date = DateTimeField()
@queryset_manager
def objects(doc_cls, queryset):
# This may actually also be done by defining a default ordering for
# the document, but this illustrates the use of manager methods
return queryset.order_by('-date')
You don't need to call your method :attr:`objects` -- you may define as many
custom manager methods as you like::
class BlogPost(Document):
title = StringField()
published = BooleanField()
@queryset_manager
def live_posts(doc_cls, queryset):
return queryset.order_by('-date')
BlogPost(title='test1', published=False).save()
BlogPost(title='test2', published=True).save()
assert len(BlogPost.objects) == 2
assert len(BlogPost.live_posts) == 1
Aggregation
===========
MongoDB provides some aggregation methods out of the box, but there are not as
many as you typically get with an RDBMS. MongoEngine provides a wrapper around
the built-in methods and provides some of its own, which are implemented as
Javascript code that is executed on the database server.
Counting results
----------------
Just as with limiting and skipping results, there is a method on
:class:`~mongoengine.queryset.QuerySet` objects --
:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic
way of achieving this::
num_users = len(User.objects)
Further aggregation
-------------------
You may sum over the values of a specific field on documents using
:meth:`~mongoengine.queryset.QuerySet.sum`::
yearly_expense = Employee.objects.sum('salary')
.. note::
If the field isn't present on a document, that document will be ignored from
the sum.
To get the average (mean) of a field on a collection of documents, use
:meth:`~mongoengine.queryset.QuerySet.average`::
mean_age = User.objects.average('age')
As MongoDB provides native lists, MongoEngine provides a helper method to get a
dictionary of the frequencies of items in lists across an entire collection --
:meth:`~mongoengine.queryset.QuerySet.item_frequencies`. An example of its use
would be generating "tag-clouds"::
class Article(Document):
tag = ListField(StringField())
# After adding some tagged articles...
tag_freqs = Article.objects.item_frequencies('tag', normalize=True)
from operator import itemgetter
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
Retrieving a subset of fields
=============================
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
and for efficiency only these should be retrieved from the database. This issue
is especially important for MongoDB, as fields may often be extremely large
(e.g. a :class:`~mongoengine.ListField` of
:class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a
blog post. To select only a subset of fields, use
:meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to
retrieve as its arguments. Note that if fields that are not downloaded are
accessed, their default value (or :attr:`None` if no default value is provided)
will be given::
>>> class Film(Document):
... title = StringField()
... year = IntField()
... rating = IntField(default=3)
...
>>> Film(title='The Shawshank Redemption', year=1994, rating=5).save()
>>> f = Film.objects.only('title').first()
>>> f.title
'The Shawshank Redemption'
>>> f.year # None
>>> f.rating # default value
3
If you later need the missing fields, just call
:meth:`~mongoengine.Document.reload` on your document.
Advanced queries
================
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
arguments can't fully express the query you want to use -- for example if you
need to combine a number of constraints using *and* and *or*. This is made
possible in MongoEngine through the :class:`~mongoengine.queryset.Q` class.
A :class:`~mongoengine.queryset.Q` object represents part of a query, and
can be initialised using the same keyword-argument syntax you use to query
documents. To build a complex query, you may combine
:class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or)
operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the
first positional argument to :attr:`Document.objects` when you filter it by
calling it with keyword arguments::
# Get published posts
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))
# Get top posts
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
.. warning::
Only use these advanced queries if absolutely necessary as they will execute
significantly slower than regular queries. This is because they are not
natively supported by MongoDB -- they are compiled to Javascript and sent
to the server for execution.
Server-side javascript execution
================================
Javascript functions may be written and sent to the server for execution. The
result of this is the return value of the Javascript function. This
functionality is accessed through the
:meth:`~mongoengine.queryset.QuerySet.exec_js` method on
:meth:`~mongoengine.queryset.QuerySet` objects. Pass in a string containing a
Javascript function as the first argument.
The remaining positional arguments are names of fields that will be passed into
you Javascript function as its arguments. This allows functions to be written
that may be executed on any field in a collection (e.g. the
:meth:`~mongoengine.queryset.QuerySet.sum` method, which accepts the name of
the field to sum over as its argument). Note that field names passed in in this
manner are automatically translated to the names used on the database (set
using the :attr:`name` keyword argument to a field constructor).
Keyword arguments to :meth:`~mongoengine.queryset.QuerySet.exec_js` are
combined into an object called :attr:`options`, which is available in the
Javascript function. This may be used for defining specific parameters for your
function.
Some variables are made available in the scope of the Javascript function:
* ``collection`` -- the name of the collection that corresponds to the
:class:`~mongoengine.Document` class that is being used; this should be
used to get the :class:`Collection` object from :attr:`db` in Javascript
code
* ``query`` -- the query that has been generated by the
:class:`~mongoengine.queryset.QuerySet` object; this may be passed into
the :meth:`find` method on a :class:`Collection` object in the Javascript
function
* ``options`` -- an object containing the keyword arguments passed into
:meth:`~mongoengine.queryset.QuerySet.exec_js`
The following example demonstrates the intended usage of
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
over a field on a document (this functionality is already available throught
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
example)::
def sum_field(document, field_name, include_negatives=True):
code = """
function(sumField) {
var total = 0.0;
db[collection].find(query).forEach(function(doc) {
var val = doc[sumField];
if (val >= 0.0 || options.includeNegatives) {
total += val;
}
});
return total;
}
"""
options = {'includeNegatives': include_negatives}
return document.objects.exec_js(code, field_name, **options)
As fields in MongoEngine may use different names in the database (set using the
:attr:`db_field` keyword argument to a :class:`Field` constructor), a mechanism
exists for replacing MongoEngine field names with the database field names in
Javascript code. When accessing a field on a collection object, use
square-bracket notation, and prefix the MongoEngine field name with a tilde.
The field name that follows the tilde will be translated to the name used in
the database. Note that when referring to fields on embedded documents,
the name of the :class:`~mongoengine.EmbeddedDocumentField`, followed by a dot,
should be used before the name of the field on the embedded document. The
following example shows how the substitutions are made::
class Comment(EmbeddedDocument):
content = StringField(db_field='body')
class BlogPost(Document):
title = StringField(db_field='doctitle')
comments = ListField(EmbeddedDocumentField(Comment), name='cs')
# Returns a list of dictionaries. Each dictionary contains a value named
# "document", which corresponds to the "title" field on a BlogPost, and
# "comment", which corresponds to an individual comment. The substitutions
# made are shown in the comments.
BlogPost.objects.exec_js("""
function() {
var comments = [];
db[collection].find(query).forEach(function(doc) {
// doc[~comments] -> doc["cs"]
var docComments = doc[~comments];
for (var i = 0; i < docComments.length; i++) {
// doc[~comments][i] -> doc["cs"][i]
var comment = doc[~comments][i];
comments.push({
// doc[~title] -> doc["doctitle"]
'document': doc[~title],
// comment[~comments.content] -> comment["body"]
'comment': comment[~comments.content]
});
}
});
return comments;
}
""")
.. _guide-atomic-updates:
Atomic updates
==============
Documents may be updated atomically by using the
:meth:`~mongoengine.queryset.QuerySet.update_one` and
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
that you may use with these methods:
* ``set`` -- set a particular value
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
* ``inc`` -- increment a value by a given amount
* ``dec`` -- decrement a value by a given amount
* ``push`` -- append a value to a list
* ``push_all`` -- append several values to a list
* ``pull`` -- remove a value from a list
* ``pull_all`` -- remove several values from a list
The syntax for atomic updates is similar to the querying syntax, but the
modifier comes before the field, not after it::
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
>>> post.save()
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
>>> post.reload() # the document has been changed, so we need to reload it
>>> post.page_views
1
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
>>> post.reload()
>>> post.title
'Example Post'
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
>>> post.reload()
>>> post.tags
['database', 'nosql']

View File

@@ -1,20 +1,28 @@
==============================
MongoEngine User Documentation
=======================================
==============================
MongoEngine is an Object-Document Mapper, written in Python for working with
MongoDB. To install it, simply run
.. code-block:: console
# easy_install mongoengine
# easy_install -U mongoengine
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_.
To get help with using MongoEngine, use the `MongoEngine Users mailing list
<http://groups.google.com/group/mongoengine-users>`_ or come chat on the
`#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_.
If you are interested in contributing, join the developers' `mailing list
<http://groups.google.com/group/mongoengine-dev>`_.
.. toctree::
:maxdepth: 2
tutorial
userguide
guide/index
apireference
django
changelog

View File

@@ -1,534 +0,0 @@
==========
User Guide
==========
Installing
==========
MongoEngine is available on PyPI, so to use it you can use
:program:`easy_install`
.. code-block:: console
# easy_install mongoengine
Alternatively, if you don't have setuptools installed, `download it from PyPi
<http://pypi.python.org/pypi/mongoengine/>`_ and run
.. code-block:: console
# python setup.py install
.. _guide-connecting:
Connecting to MongoDB
=====================
To connect to a running instance of :program:`mongod`, use the
:func:`~mongoengine.connect` function. The first argument is the name of the
database to connect to. If the database does not exist, it will be created. If
the database requires authentication, :attr:`username` and :attr:`password`
arguments may be provided::
from mongoengine import connect
connect('project1', username='webapp', password='pwd123')
By default, MongoEngine assumes that the :program:`mongod` instance is running
on **localhost** on port **27017**. If MongoDB is running elsewhere, you may
provide :attr:`host` and :attr:`port` arguments to
:func:`~mongoengine.connect`::
connect('project1', host='192.168.1.35', port=12345)
Defining documents
==================
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
working with relational databases, rows are stored in **tables**, which have a
strict **schema** that the rows follow. MongoDB stores documents in
**collections** rather than tables - the principle difference is that no schema
is enforced at a database level.
Defining a document's schema
----------------------------
MongoEngine allows you to define schemata for documents as this helps to reduce
coding errors, and allows for utility methods to be defined on fields which may
be present.
To define a schema for a document, create a class that inherits from
:class:`~mongoengine.Document`. Fields are specified by adding **field
objects** as class attributes to the document class::
from mongoengine import *
import datetime
class Page(Document):
title = StringField(max_length=200, required=True)
date_modified = DateTimeField(default=datetime.now)
Fields
------
By default, fields are not required. To make a field mandatory, set the
:attr:`required` keyword argument of a field to ``True``. Fields also may have
validation constraints available (such as :attr:`max_length` in the example
above). Fields may also take default values, which will be used if a value is
not provided. Default values may optionally be a callable, which will be called
to retrieve the value (such as in the above example). The field types available
are as follows:
* :class:`~mongoengine.StringField`
* :class:`~mongoengine.IntField`
* :class:`~mongoengine.FloatField`
* :class:`~mongoengine.DateTimeField`
* :class:`~mongoengine.ListField`
* :class:`~mongoengine.ObjectIdField`
* :class:`~mongoengine.EmbeddedDocumentField`
* :class:`~mongoengine.ReferenceField`
List fields
^^^^^^^^^^^
MongoDB allows the storage of lists of items. To add a list of items to a
:class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field
type. :class:`~mongoengine.ListField` takes another field object as its first
argument, which specifies which type elements may be stored within the list::
class Page(Document):
tags = ListField(StringField(max_length=50))
Embedded documents
^^^^^^^^^^^^^^^^^^
MongoDB has the ability to embed documents within other documents. Schemata may
be defined for these embedded documents, just as they may be for regular
documents. To create an embedded document, just define a document as usual, but
inherit from :class:`~mongoengine.EmbeddedDocument` rather than
:class:`~mongoengine.Document`::
class Comment(EmbeddedDocument):
content = StringField()
To embed the document within another document, use the
:class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded
document class as the first argument::
class Page(Document):
comments = ListField(EmbeddedDocumentField(Comment))
comment1 = Comment('Good work!')
comment2 = Comment('Nice article!')
page = Page(comments=[comment1, comment2])
Reference fields
^^^^^^^^^^^^^^^^
References may be stored to other documents in the database using the
:class:`~mongoengine.ReferenceField`. Pass in another document class as the
first argument to the constructor, then simply assign document objects to the
field::
class User(Document):
name = StringField()
class Page(Document):
content = StringField()
author = ReferenceField(User)
john = User(name="John Smith")
john.save()
post = Page(content="Test Page")
post.author = john
post.save()
The :class:`User` object is automatically turned into a reference behind the
scenes, and dereferenced when the :class:`Page` object is retrieved.
Uniqueness constraints
^^^^^^^^^^^^^^^^^^^^^^
MongoEngine allows you to specify that a field should be unique across a
collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's
constructor. If you try to save a document that has the same value for a unique
field as a document that is already in the database, a
:class:`~mongoengine.OperationError` will be raised. You may also specify
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
either a single field name, or a list or tuple of field names::
class User(Document):
username = StringField(unique=True)
first_name = StringField()
last_name = StringField(unique_with='last_name')
Document collections
--------------------
Document classes that inherit **directly** from :class:`~mongoengine.Document`
will have their own **collection** in the database. The name of the collection
is by default the name of the class, coverted to lowercase (so in the example
above, the collection would be called `page`). If you need to change the name
of the collection (e.g. to use MongoEngine with an existing database), then
create a class dictionary attribute called :attr:`meta` on your document, and
set :attr:`collection` to the name of the collection that you want your
document class to use::
class Page(Document):
title = StringField(max_length=200, required=True)
meta = {'collection': 'cmsPage'}
Capped collections
^^^^^^^^^^^^^^^^^^
A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
:attr:`max_documents` is the maximum number of documents that is allowed to be
stored in the collection, and :attr:`max_size` is the maximum size of the
collection in bytes. If :attr:`max_size` is not specified and
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
The following example shows a :class:`Log` document that will be limited to
1000 entries and 2MB of disk space::
class Log(Document):
ip_address = StringField()
meta = {'max_documents': 1000, 'max_size': 2000000}
Indexes
-------
You can specify indexes on collections to make querying faster. This is done
by creating a list of index specifications called :attr:`indexes` in the
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
either be a single field name, or a tuple containing multiple field names. A
direction may be specified on fields by prefixing the field name with a **+**
or a **-** sign. Note that direction only matters on multi-field indexes. ::
class Page(Document):
title = StringField()
rating = StringField()
meta = {
'indexes': ['title', ('title', '-rating')]
}
Ordering
--------
A default ordering can be specified for your
:class:`~mongoengine.queryset.QuerySet` using the :attr:`ordering` attribute of
:attr:`~mongoengine.Document.meta`. Ordering will be applied when the
:class:`~mongoengine.queryset.QuerySet` is created, and can be overridden by
subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
from datetime import datetime
class BlogPost(Document):
title = StringField()
published_date = DateTimeField()
meta = {
'ordering': ['-published_date']
}
blog_post_1 = BlogPost(title="Blog Post #1")
blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0))
blog_post_2 = BlogPost(title="Blog Post #2")
blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0))
blog_post_3 = BlogPost(title="Blog Post #3")
blog_post_3.published_date = datetime(2010, 1, 7, 0, 0 ,0))
blog_post_1.save()
blog_post_2.save()
blog_post_3.save()
# get the "first" BlogPost using default ordering
# from BlogPost.meta.ordering
latest_post = BlogPost.objects.first()
self.assertEqual(latest_post.title, "Blog Post #3")
# override default ordering, order BlogPosts by "published_date"
first_post = BlogPost.objects.order_by("+published_date").first()
self.assertEqual(first_post.title, "Blog Post #1")
Document inheritance
--------------------
To create a specialised type of a :class:`~mongoengine.Document` you have
defined, you may subclass it and add any extra fields or methods you may need.
As this is new class is not a direct subclass of
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
will use the same collection as its superclass uses. This allows for more
convenient and efficient retrieval of related documents::
# Stored in a collection named 'page'
class Page(Document):
title = StringField(max_length=200, required=True)
# Also stored in the collection named 'page'
class DatedPage(Page):
date = DateTimeField()
Working with existing data
^^^^^^^^^^^^^^^^^^^^^^^^^^
To enable correct retrieval of documents involved in this kind of heirarchy,
two extra attributes are stored on each document in the database: :attr:`_cls`
and :attr:`_types`. These are hidden from the user through the MongoEngine
interface, but may not be present if you are trying to use MongoEngine with
an existing database. For this reason, you may disable this inheritance
mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling
you to work with existing databases. To disable inheritance on a document
class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
dictionary::
# Will work with data in an existing collection named 'cmsPage'
class Page(Document):
title = StringField(max_length=200, required=True)
meta = {
'collection': 'cmsPage',
'allow_inheritance': False,
}
Documents instances
===================
To create a new document object, create an instance of the relevant document
class, providing values for its fields as its constructor keyword arguments.
You may provide values for any of the fields on the document::
>>> page = Page(title="Test Page")
>>> page.title
'Test Page'
You may also assign values to the document's fields using standard object
attribute syntax::
>>> page.title = "Example Page"
>>> page.title
'Example Page'
Saving and deleting documents
-----------------------------
To save the document to the database, call the
:meth:`~mongoengine.Document.save` method. If the document does not exist in
the database, it will be created. If it does already exist, it will be
updated.
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
Note that this will only work if the document exists in the database and has a
valide :attr:`id`.
Document IDs
------------
Each document in the database has a unique id. This may be accessed through the
:attr:`id` attribute on :class:`~mongoengine.Document` objects. Usually, the id
will be generated automatically by the database server when the object is save,
meaning that you may only access the :attr:`id` field once a document has been
saved::
>>> page = Page(title="Test Page")
>>> page.id
>>> page.save()
>>> page.id
ObjectId('123456789abcdef000000000')
Alternatively, you may define one of your own fields to be the document's
"primary key" by providing ``primary_key=True`` as a keyword argument to a
field's constructor. Under the hood, MongoEngine will use this field as the
:attr:`id`; in fact :attr:`id` is actually aliased to your primary key field so
you may still use :attr:`id` to access the primary key if you want::
>>> class User(Document):
... email = StringField(primary_key=True)
... name = StringField()
...
>>> bob = User(email='bob@example.com', name='Bob')
>>> bob.save()
>>> bob.id == bob.email == 'bob@example.com'
True
.. note::
If you define your own primary key field, the field implicitly becomes
required, so a :class:`ValidationError` will be thrown if you don't provide
it.
Querying the database
=====================
:class:`~mongoengine.Document` classes have an :attr:`objects` attribute, which
is used for accessing the objects in the database associated with the class.
The :attr:`objects` attribute is actually a
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
a new :class:`~mongoengine.queryset.QuerySet` object on access. The
:class:`~mongoengine.queryset.QuerySet` object may may be iterated over to
fetch documents from the database::
# Prints out the names of all the users in the database
for user in User.objects:
print user.name
Filtering queries
-----------------
The query may be filtered by calling the
:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword
arguments. The keys in the keyword arguments correspond to fields on the
:class:`~mongoengine.Document` you are querying::
# This will return a QuerySet that will only iterate over users whose
# 'country' field is set to 'uk'
uk_users = User.objects(country='uk')
Fields on embedded documents may also be referred to using field lookup syntax
by using a double-underscore in place of the dot in object attribute access
syntax::
# This will return a QuerySet that will only iterate over pages that have
# been written by a user whose 'country' field is set to 'uk'
uk_pages = Page.objects(author__country='uk')
Querying lists
^^^^^^^^^^^^^^
On most fields, this syntax will look up documents where the field specified
matches the given value exactly, but when the field refers to a
:class:`~mongoengine.ListField`, a single item may be provided, in which case
lists that contain that item will be matched::
class Page(Document):
tags = ListField(StringField())
# This will match all pages that have the word 'coding' as an item in the
# 'tags' list
Page.objects(tags='coding')
Query operators
---------------
Operators other than equality may also be used in queries; just attach the
operator name to a key with a double-underscore::
# Only find users whose age is 18 or less
young_users = Users.objects(age__lte=18)
Available operators are as follows:
* ``neq`` -- not equal to
* ``lt`` -- less than
* ``lte`` -- less than or equal to
* ``gt`` -- greater than
* ``gte`` -- greater than or equal to
* ``in`` -- value is in list (a list of values should be provided)
* ``nin`` -- value is not in list (a list of values should be provided)
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
* ``all`` -- every item in array is in list of values provided
* ``size`` -- the size of the array is
* ``exists`` -- value for field exists
Limiting and skipping results
-----------------------------
Just as with traditional ORMs, you may limit the number of results returned, or
skip a number or results in you query.
:meth:`~mongoengine.queryset.QuerySet.limit` and
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
:class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
achieving this is using array-slicing syntax::
# Only the first 5 people
users = User.objects[:5]
# All except for the first 5 people
users = User.objects[5:]
# 5 users, starting from the 10th user found
users = User.objects[10:15]
Aggregation
-----------
MongoDB provides some aggregation methods out of the box, but there are not as
many as you typically get with an RDBMS. MongoEngine provides a wrapper around
the built-in methods and provides some of its own, which are implemented as
Javascript code that is executed on the database server.
Counting results
^^^^^^^^^^^^^^^^
Just as with limiting and skipping results, there is a method on
:class:`~mongoengine.queryset.QuerySet` objects --
:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic
way of achieving this::
num_users = len(User.objects)
Further aggregation
^^^^^^^^^^^^^^^^^^^
You may sum over the values of a specific field on documents using
:meth:`~mongoengine.queryset.QuerySet.sum`::
yearly_expense = Employee.objects.sum('salary')
.. note::
If the field isn't present on a document, that document will be ignored from
the sum.
To get the average (mean) of a field on a collection of documents, use
:meth:`~mongoengine.queryset.QuerySet.average`::
mean_age = User.objects.average('age')
As MongoDB provides native lists, MongoEngine provides a helper method to get a
dictionary of the frequencies of items in lists across an entire collection --
:meth:`~mongoengine.queryset.QuerySet.item_frequencies`. An example of its use
would be generating "tag-clouds"::
class Article(Document):
tag = ListField(StringField())
# After adding some tagged articles...
tag_freqs = Article.objects.item_frequencies('tag', normalize=True)
from operator import itemgetter
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
Advanced queries
----------------
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
arguments can't fully express the query you want to use -- for example if you
need to combine a number of constraints using *and* and *or*. This is made
possible in MongoEngine through the :class:`~mongoengine.queryset.Q` class.
A :class:`~mongoengine.queryset.Q` object represents part of a query, and
can be initialised using the same keyword-argument syntax you use to query
documents. To build a complex query, you may combine
:class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or)
operators. To use :class:`~mongoengine.queryset.Q` objects, pass them in
as positional arguments to :attr:`Document.objects` when you filter it by
calling it with keyword arguments::
# Get published posts
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))
# Get top posts
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
.. warning::
Only use these advanced queries if absolutely necessary as they will execute
significantly slower than regular queries. This is because they are not
natively supported by MongoDB -- they are compiled to Javascript and sent
to the server for execution.
Atomic updates
--------------
Documents may be updated atomically by using the
:meth:`~mongoengine.queryset.QuerySet.update_one` and
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
that you may use with these methods:
* ``set`` -- set a particular value
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
* ``inc`` -- increment a value by a given amount
* ``dec`` -- decrement a value by a given amount
* ``push`` -- append a value to a list
* ``push_all`` -- append several values to a list
* ``pull`` -- remove a value from a list
* ``pull_all`` -- remove several values from a list
The syntax for atomic updates is similar to the querying syntax, but the
modifier comes before the field, not after it::
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
>>> post.save()
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
>>> post.reload() # the document has been changed, so we need to reload it
>>> post.page_views
1
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
>>> post.reload()
>>> post.title
'Example Post'
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
>>> post.reload()
>>> post.tags
['database', 'nosql']

View File

@@ -12,7 +12,7 @@ __all__ = (document.__all__ + fields.__all__ + connection.__all__ +
__author__ = 'Harry Marr'
VERSION = (0, 2, 1)
VERSION = (0, 3, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])

View File

@@ -3,6 +3,12 @@ from queryset import QuerySet, QuerySetManager
import pymongo
_document_registry = {}
def get_document(name):
return _document_registry[name]
class ValidationError(Exception):
pass
@@ -11,10 +17,18 @@ class BaseField(object):
"""A base class for fields in a MongoDB document. Instances of this class
may be added to subclasses of `Document` to define a document's schema.
"""
# Fields may have _types inserted into indexes by default
_index_with_types = True
def __init__(self, name=None, required=False, default=None, unique=False,
unique_with=None, primary_key=False):
self.name = name if not primary_key else '_id'
def __init__(self, db_field=None, name=None, required=False, default=None,
unique=False, unique_with=None, primary_key=False):
self.db_field = (db_field or name) if not primary_key else '_id'
if name:
import warnings
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
warnings.warn(msg, DeprecationWarning)
self.name = None
self.required = required or primary_key
self.default = default
self.unique = bool(unique or unique_with)
@@ -53,7 +67,7 @@ class BaseField(object):
"""
return self.to_python(value)
def prepare_query_value(self, value):
def prepare_query_value(self, op, value):
"""Prepare a value that is being used in a query for PyMongo.
"""
return value
@@ -69,14 +83,19 @@ class ObjectIdField(BaseField):
"""
def to_python(self, value):
return str(value)
return value
# return unicode(value)
def to_mongo(self, value):
if not isinstance(value, pymongo.objectid.ObjectId):
return pymongo.objectid.ObjectId(str(value))
try:
return pymongo.objectid.ObjectId(str(value))
except Exception, e:
#e.message attribute has been deprecated since Python 2.6
raise ValidationError(str(e))
return value
def prepare_query_value(self, value):
def prepare_query_value(self, op, value):
return self.to_mongo(value)
def validate(self, value):
@@ -99,6 +118,7 @@ class DocumentMetaclass(type):
doc_fields = {}
class_name = [name]
superclasses = {}
simple_class = True
for base in bases:
# Include all fields present in superclasses
if hasattr(base, '_fields'):
@@ -107,19 +127,47 @@ class DocumentMetaclass(type):
# Get superclasses from superclass
superclasses[base._class_name] = base
superclasses.update(base._superclasses)
if hasattr(base, '_meta'):
# Ensure that the Document class may be subclassed -
# inheritance may be disabled to remove dependency on
# additional fields _cls and _types
if base._meta.get('allow_inheritance', True) == False:
raise ValueError('Document %s may not be subclassed' %
base.__name__)
else:
simple_class = False
meta = attrs.get('_meta', attrs.get('meta', {}))
if 'allow_inheritance' not in meta:
meta['allow_inheritance'] = True
# Only simple classes - direct subclasses of Document - may set
# allow_inheritance to False
if not simple_class and not meta['allow_inheritance']:
raise ValueError('Only direct subclasses of Document may set '
'"allow_inheritance" to False')
attrs['_meta'] = meta
attrs['_class_name'] = '.'.join(reversed(class_name))
attrs['_superclasses'] = superclasses
# Add the document's fields to the _fields attribute
for attr_name, attr_value in attrs.items():
if hasattr(attr_value, "__class__") and \
issubclass(attr_value.__class__, BaseField):
if not attr_value.name:
attr_value.name = attr_name
issubclass(attr_value.__class__, BaseField):
attr_value.name = attr_name
if not attr_value.db_field:
attr_value.db_field = attr_name
doc_fields[attr_name] = attr_value
attrs['_fields'] = doc_fields
return super_new(cls, name, bases, attrs)
new_class = super_new(cls, name, bases, attrs)
for field in new_class._fields.values():
field.owner_document = new_class
return new_class
class TopLevelDocumentMetaclass(DocumentMetaclass):
@@ -128,6 +176,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
"""
def __new__(cls, name, bases, attrs):
global _document_registry
super_new = super(TopLevelDocumentMetaclass, cls).__new__
# Classes defined in this package are abstract and should not have
# their own metadata with DB collection, etc.
@@ -139,21 +189,12 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
collection = name.lower()
simple_class = True
id_field = None
base_indexes = []
# Subclassed documents inherit collection from superclass
for base in bases:
if hasattr(base, '_meta') and 'collection' in base._meta:
# Ensure that the Document class may be subclassed -
# inheritance may be disabled to remove dependency on
# additional fields _cls and _types
if base._meta.get('allow_inheritance', True) == False:
raise ValueError('Document %s may not be subclassed' %
base.__name__)
else:
simple_class = False
collection = base._meta['collection']
id_field = id_field or base._meta.get('id_field')
@@ -161,7 +202,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
meta = {
'collection': collection,
'allow_inheritance': True,
'max_documents': None,
'max_size': None,
'ordering': [], # default ordering applied at runtime
@@ -171,14 +211,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# Apply document-defined meta options
meta.update(attrs.get('meta', {}))
meta['indexes'] += base_indexes
# Only simple classes - direct subclasses of Document - may set
# allow_inheritance to False
if not simple_class and not meta['allow_inheritance']:
raise ValueError('Only direct subclasses of Document may set '
'"allow_inheritance" to False')
attrs['_meta'] = meta
# Set up collection manager, needs the class to have fields so use
@@ -186,6 +218,10 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
new_class = super_new(cls, name, bases, attrs)
new_class.objects = QuerySetManager()
user_indexes = [QuerySet._build_index_spec(new_class, spec)
for spec in meta['indexes']] + base_indexes
new_class._meta['indexes'] = user_indexes
unique_indexes = []
for field_name, field in new_class._fields.items():
# Generate a list of indexes needed by uniqueness constraints
@@ -204,7 +240,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
parts = other_name.split('.')
# Lookup real name
parts = QuerySet._lookup_field(new_class, parts)
name_parts = [part.name for part in parts]
name_parts = [part.db_field for part in parts]
unique_with.append('.'.join(name_parts))
# Unique field should be required
parts[-1].required = True
@@ -228,7 +264,10 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
if not new_class._meta['id_field']:
new_class._meta['id_field'] = 'id'
new_class.id = new_class._fields['id'] = ObjectIdField(name='_id')
new_class._fields['id'] = ObjectIdField(db_field='_id')
new_class.id = new_class._fields['id']
_document_registry[name] = new_class
return new_class
@@ -246,6 +285,25 @@ class BaseDocument(object):
value = getattr(self, attr_name, None)
setattr(self, attr_name, value)
def validate(self):
"""Ensure that all fields' values are valid and that required fields
are present.
"""
# Get a list of tuples of field names and their current values
fields = [(field, getattr(self, name))
for name, field in self._fields.items()]
# Ensure that each field is matched to a valid value
for field, value in fields:
if value is not None:
try:
field.validate(value)
except (ValueError, AttributeError, AssertionError), e:
raise ValidationError('Invalid value for field of type "' +
field.__class__.__name__ + '"')
elif field.required:
raise ValidationError('Field "%s" is required' % field.name)
@classmethod
def _get_subclasses(cls):
"""Return a dictionary of all subclasses (found recursively).
@@ -311,7 +369,7 @@ class BaseDocument(object):
for field_name, field in self._fields.items():
value = getattr(self, field_name, None)
if value is not None:
data[field.name] = field.to_mongo(value)
data[field.db_field] = field.to_mongo(value)
# Only add _cls and _types if allow_inheritance is not False
if not (hasattr(self, '_meta') and
self._meta.get('allow_inheritance', True) == False):
@@ -321,7 +379,7 @@ class BaseDocument(object):
@classmethod
def _from_son(cls, son):
"""Create an instance of a Document (subclass) from a PyMongo SOM.
"""Create an instance of a Document (subclass) from a PyMongo SON.
"""
# get the class name from the document, falling back to the given
# class if unavailable
@@ -344,8 +402,18 @@ class BaseDocument(object):
return None
cls = subclasses[class_name]
for field_name, field in cls._fields.items():
if field.name in data:
data[field_name] = field.to_python(data[field.name])
present_fields = data.keys()
return cls(**data)
for field_name, field in cls._fields.items():
if field.db_field in data:
data[field_name] = field.to_python(data[field.db_field])
obj = cls(**data)
obj._present_fields = present_fields
return obj
def __eq__(self, other):
if isinstance(other, self.__class__) and hasattr(other, 'id'):
if self.id == other.id:
return True
return False

View File

@@ -7,7 +7,6 @@ __all__ = ['ConnectionError', 'connect']
_connection_settings = {
'host': 'localhost',
'port': 27017,
'pool_size': 1,
}
_connection = None
@@ -60,3 +59,4 @@ def connect(db, username=None, password=None, **kwargs):
_db_name = db
_db_username = username
_db_password = password
return _get_db()

View File

@@ -30,6 +30,7 @@ class User(Document):
is_active = BooleanField(default=True)
is_superuser = BooleanField(default=False)
last_login = DateTimeField(default=datetime.datetime.now)
date_joined = DateTimeField(default=datetime.datetime.now)
def get_full_name(self):
"""Returns the users first and last names, separated by a space.
@@ -53,6 +54,8 @@ class User(Document):
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
hash = get_hexdigest(algo, salt, raw_password)
self.password = '%s$%s$%s' % (algo, salt, hash)
self.save()
return self
def check_password(self, raw_password):
"""Checks the user's password against a provided password - always use
@@ -68,10 +71,26 @@ class User(Document):
"""Create (and save) a new user with the given username, password and
email address.
"""
user = User(username=username, email=email)
now = datetime.datetime.now()
# Normalize the address by lowercasing the domain part of the email
# address.
# Not sure why we'r allowing null email when its not allowed in django
if email is not None:
try:
email_name, domain_part = email.strip().split('@', 1)
except ValueError:
pass
else:
email = '@'.join([email_name, domain_part.lower()])
user = User(username=username, email=email, date_joined=now)
user.set_password(password)
user.save()
return user
def get_and_delete_messages(self):
return []
class MongoEngineBackend(object):

View File

@@ -78,9 +78,9 @@ class Document(BaseDocument):
object_id = collection.save(doc, safe=safe)
except pymongo.errors.OperationFailure, err:
message = 'Could not save document (%s)'
if 'duplicate key' in str(err):
message = 'Tried to save duplicate unique keys (%s)'
raise OperationError(message % str(err))
if u'duplicate key' in unicode(err):
message = u'Tried to save duplicate unique keys (%s)'
raise OperationError(message % unicode(err))
id_field = self._meta['id_field']
self[id_field] = self._fields[id_field].to_python(object_id)
@@ -95,7 +95,8 @@ class Document(BaseDocument):
try:
self.__class__.objects(**{id_field: object_id}).delete(safe=safe)
except pymongo.errors.OperationFailure, err:
raise OperationError('Could not delete document (%s)' % str(err))
message = u'Could not delete document (%s)' % err.message
raise OperationError(message)
def reload(self):
"""Reloads all attributes from the database.
@@ -107,25 +108,6 @@ class Document(BaseDocument):
for field in self._fields:
setattr(self, field, obj[field])
def validate(self):
"""Ensure that all fields' values are valid and that required fields
are present.
"""
# Get a list of tuples of field names and their current values
fields = [(field, getattr(self, name))
for name, field in self._fields.items()]
# Ensure that each field is matched to a valid value
for field, value in fields:
if value is not None:
try:
field.validate(value)
except (ValueError, AttributeError, AssertionError), e:
raise ValidationError('Invalid value for field of type "' +
field.__class__.__name__ + '"')
elif field.required:
raise ValidationError('Field "%s" is required' % field.name)
@classmethod
def drop_collection(cls):
"""Drops the entire collection associated with this
@@ -133,3 +115,43 @@ class Document(BaseDocument):
"""
db = _get_db()
db.drop_collection(cls._meta['collection'])
class MapReduceDocument(object):
"""A document returned from a map/reduce query.
:param collection: An instance of :class:`~pymongo.Collection`
:param key: Document/result key, often an instance of
:class:`~pymongo.objectid.ObjectId`. If supplied as
an ``ObjectId`` found in the given ``collection``,
the object can be accessed via the ``object`` property.
:param value: The result(s) for this key.
.. versionadded:: 0.3
"""
def __init__(self, document, collection, key, value):
self._document = document
self._collection = collection
self.key = key
self.value = value
@property
def object(self):
"""Lazy-load the object referenced by ``self.key``. ``self.key``
should be the ``primary_key``.
"""
id_field = self._document()._meta['id_field']
id_field_type = type(id_field)
if not isinstance(self.key, id_field_type):
try:
self.key = id_field_type(self.key)
except:
raise Exception("Could not cast key as %s" % \
id_field_type.__name__)
if not hasattr(self, "_key_object"):
self._key_object = self._document.objects.with_id(self.key)
return self._key_object
return self._key_object

View File

@@ -1,26 +1,31 @@
from base import BaseField, ObjectIdField, ValidationError
from base import BaseField, ObjectIdField, ValidationError, get_document
from document import Document, EmbeddedDocument
from connection import _get_db
import re
import pymongo
import datetime
import decimal
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
'DateTimeField', 'EmbeddedDocumentField', 'ListField',
'ObjectIdField', 'ReferenceField', 'ValidationError']
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
'ObjectIdField', 'ReferenceField', 'ValidationError',
'DecimalField', 'URLField', 'GenericReferenceField',
'BinaryField']
RECURSIVE_REFERENCE_CONSTANT = 'self'
class StringField(BaseField):
"""A unicode string field.
"""
def __init__(self, regex=None, max_length=None, **kwargs):
self.regex = re.compile(regex) if regex else None
self.max_length = max_length
super(StringField, self).__init__(**kwargs)
def to_python(self, value):
return unicode(value)
@@ -37,6 +42,57 @@ class StringField(BaseField):
def lookup_member(self, member_name):
return None
def prepare_query_value(self, op, value):
if not isinstance(op, basestring):
return value
if op.lstrip('i') in ('startswith', 'endswith', 'contains'):
flags = 0
if op.startswith('i'):
flags = re.IGNORECASE
op = op.lstrip('i')
regex = r'%s'
if op == 'startswith':
regex = r'^%s'
elif op == 'endswith':
regex = r'%s$'
value = re.compile(regex % value, flags)
return value
class URLField(StringField):
"""A field that validates input as a URL.
.. versionadded:: 0.3
"""
URL_REGEX = re.compile(
r'^https?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|'
r'localhost|'
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'
r'(?::\d+)?'
r'(?:/?|[/?]\S+)$', re.IGNORECASE
)
def __init__(self, verify_exists=False, **kwargs):
self.verify_exists = verify_exists
super(URLField, self).__init__(**kwargs)
def validate(self, value):
if not URLField.URL_REGEX.match(value):
raise ValidationError('Invalid URL: %s' % value)
if self.verify_exists:
import urllib2
try:
request = urllib2.Request(value)
response = urllib2.urlopen(request)
except Exception, e:
message = 'This URL appears to be a broken link: %s' % e
raise ValidationError(message)
class IntField(BaseField):
"""An integer field.
@@ -45,12 +101,15 @@ class IntField(BaseField):
def __init__(self, min_value=None, max_value=None, **kwargs):
self.min_value, self.max_value = min_value, max_value
super(IntField, self).__init__(**kwargs)
def to_python(self, value):
return int(value)
def validate(self, value):
assert isinstance(value, (int, long))
try:
value = int(value)
except:
raise ValidationError('%s could not be converted to int' % value)
if self.min_value is not None and value < self.min_value:
raise ValidationError('Integer value is too small')
@@ -66,11 +125,13 @@ class FloatField(BaseField):
def __init__(self, min_value=None, max_value=None, **kwargs):
self.min_value, self.max_value = min_value, max_value
super(FloatField, self).__init__(**kwargs)
def to_python(self, value):
return float(value)
def validate(self, value):
if isinstance(value, int):
value = float(value)
assert isinstance(value, float)
if self.min_value is not None and value < self.min_value:
@@ -80,12 +141,43 @@ class FloatField(BaseField):
raise ValidationError('Float value is too large')
class DecimalField(BaseField):
"""A fixed-point decimal number field.
.. versionadded:: 0.3
"""
def __init__(self, min_value=None, max_value=None, **kwargs):
self.min_value, self.max_value = min_value, max_value
super(DecimalField, self).__init__(**kwargs)
def to_python(self, value):
if not isinstance(value, basestring):
value = unicode(value)
return decimal.Decimal(value)
def validate(self, value):
if not isinstance(value, decimal.Decimal):
if not isinstance(value, basestring):
value = str(value)
try:
value = decimal.Decimal(value)
except Exception, exc:
raise ValidationError('Could not convert to decimal: %s' % exc)
if self.min_value is not None and value < self.min_value:
raise ValidationError('Decimal value is too small')
if self.max_value is not None and value > self.max_value:
raise ValidationError('Decimal value is too large')
class BooleanField(BaseField):
"""A boolean field type.
.. versionadded:: 0.1.2
"""
def to_python(self, value):
return bool(value)
@@ -102,8 +194,8 @@ class DateTimeField(BaseField):
class EmbeddedDocumentField(BaseField):
"""An embedded document field. Only valid values are subclasses of
:class:`~mongoengine.EmbeddedDocument`.
"""An embedded document field. Only valid values are subclasses of
:class:`~mongoengine.EmbeddedDocument`.
"""
def __init__(self, document, **kwargs):
@@ -112,7 +204,7 @@ class EmbeddedDocumentField(BaseField):
'to an EmbeddedDocumentField')
self.document = document
super(EmbeddedDocumentField, self).__init__(**kwargs)
def to_python(self, value):
if not isinstance(value, self.document):
return self.document._from_son(value)
@@ -122,18 +214,19 @@ class EmbeddedDocumentField(BaseField):
return self.document.to_mongo(value)
def validate(self, value):
"""Make sure that the document instance is an instance of the
"""Make sure that the document instance is an instance of the
EmbeddedDocument subclass provided when the document was defined.
"""
# Using isinstance also works for subclasses of self.document
if not isinstance(value, self.document):
raise ValidationError('Invalid embedded document instance '
'provided to an EmbeddedDocumentField')
self.document.validate(value)
def lookup_member(self, member_name):
return self.document._fields.get(member_name)
def prepare_query_value(self, value):
def prepare_query_value(self, op, value):
return self.to_mongo(value)
@@ -142,6 +235,9 @@ class ListField(BaseField):
of the field to be used as a list in the database.
"""
# ListFields cannot be indexed with _types - MongoDB doesn't support this
_index_with_types = False
def __init__(self, field, **kwargs):
if not isinstance(field, BaseField):
raise ValidationError('Argument to ListField constructor must be '
@@ -149,6 +245,42 @@ class ListField(BaseField):
self.field = field
super(ListField, self).__init__(**kwargs)
def __get__(self, instance, owner):
"""Descriptor to automatically dereference references.
"""
if instance is None:
# Document class being used rather than a document object
return self
if isinstance(self.field, ReferenceField):
referenced_type = self.field.document_type
# Get value from document instance if available
value_list = instance._data.get(self.name)
if value_list:
deref_list = []
for value in value_list:
# Dereference DBRefs
if isinstance(value, (pymongo.dbref.DBRef)):
value = _get_db().dereference(value)
deref_list.append(referenced_type._from_son(value))
else:
deref_list.append(value)
instance._data[self.name] = deref_list
if isinstance(self.field, GenericReferenceField):
value_list = instance._data.get(self.name)
if value_list:
deref_list = []
for value in value_list:
# Dereference DBRefs
if isinstance(value, (dict, pymongo.son.SON)):
deref_list.append(self.field.dereference(value))
else:
deref_list.append(value)
instance._data[self.name] = deref_list
return super(ListField, self).__get__(instance, owner)
def to_python(self, value):
return [self.field.to_python(item) for item in value]
@@ -164,30 +296,63 @@ class ListField(BaseField):
try:
[self.field.validate(item) for item in value]
except:
raise ValidationError('All items in a list field must be of the '
'specified type')
except Exception, err:
raise ValidationError('Invalid ListField item (%s)' % str(err))
def prepare_query_value(self, value):
def prepare_query_value(self, op, value):
if op in ('set', 'unset'):
return [self.field.to_mongo(v) for v in value]
return self.field.to_mongo(value)
def lookup_member(self, member_name):
return self.field.lookup_member(member_name)
class DictField(BaseField):
"""A dictionary field that wraps a standard Python dictionary. This is
similar to an embedded document, but the structure is not defined.
.. versionadded:: 0.3
"""
def validate(self, value):
"""Make sure that a list of valid fields is being used.
"""
if not isinstance(value, dict):
raise ValidationError('Only dictionaries may be used in a '
'DictField')
if any(('.' in k or '$' in k) for k in value):
raise ValidationError('Invalid dictionary key name - keys may not '
'contain "." or "$" characters')
def lookup_member(self, member_name):
return BaseField(db_field=member_name)
class ReferenceField(BaseField):
"""A reference to a document that will be automatically dereferenced on
access (lazily).
"""
def __init__(self, document_type, **kwargs):
if not issubclass(document_type, Document):
raise ValidationError('Argument to ReferenceField constructor '
'must be a top level document class')
self.document_type = document_type
if not isinstance(document_type, basestring):
if not issubclass(document_type, (Document, basestring)):
raise ValidationError('Argument to ReferenceField constructor '
'must be a document class or a string')
self.document_type_obj = document_type
self.document_obj = None
super(ReferenceField, self).__init__(**kwargs)
@property
def document_type(self):
if isinstance(self.document_type_obj, basestring):
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
self.document_type_obj = self.owner_document
else:
self.document_type_obj = get_document(self.document_type_obj)
return self.document_type_obj
def __get__(self, instance, owner):
"""Descriptor to allow lazy dereferencing.
"""
@@ -202,28 +367,27 @@ class ReferenceField(BaseField):
value = _get_db().dereference(value)
if value is not None:
instance._data[self.name] = self.document_type._from_son(value)
return super(ReferenceField, self).__get__(instance, owner)
def to_mongo(self, document):
if isinstance(document, (str, unicode, pymongo.objectid.ObjectId)):
# document may already be an object id
id_ = document
else:
id_field_name = self.document_type._meta['id_field']
id_field = self.document_type._fields[id_field_name]
if isinstance(document, Document):
# We need the id from the saved object to create the DBRef
id_ = document.id
if id_ is None:
raise ValidationError('You can only reference documents once '
'they have been saved to the database')
else:
id_ = document
# id may be a string rather than an ObjectID object
if not isinstance(id_, pymongo.objectid.ObjectId):
id_ = pymongo.objectid.ObjectId(id_)
id_ = id_field.to_mongo(id_)
collection = self.document_type._meta['collection']
return pymongo.dbref.DBRef(collection, id_)
def prepare_query_value(self, value):
def prepare_query_value(self, op, value):
return self.to_mongo(value)
def validate(self, value):
@@ -231,3 +395,70 @@ class ReferenceField(BaseField):
def lookup_member(self, member_name):
return self.document_type._fields.get(member_name)
class GenericReferenceField(BaseField):
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
that will be automatically dereferenced on access (lazily).
.. versionadded:: 0.3
"""
def __get__(self, instance, owner):
if instance is None:
return self
value = instance._data.get(self.name)
if isinstance(value, (dict, pymongo.son.SON)):
instance._data[self.name] = self.dereference(value)
return super(GenericReferenceField, self).__get__(instance, owner)
def dereference(self, value):
doc_cls = get_document(value['_cls'])
reference = value['_ref']
doc = _get_db().dereference(reference)
if doc is not None:
doc = doc_cls._from_son(doc)
return doc
def to_mongo(self, document):
id_field_name = document.__class__._meta['id_field']
id_field = document.__class__._fields[id_field_name]
if isinstance(document, Document):
# We need the id from the saved object to create the DBRef
id_ = document.id
if id_ is None:
raise ValidationError('You can only reference documents once '
'they have been saved to the database')
else:
id_ = document
id_ = id_field.to_mongo(id_)
collection = document._meta['collection']
ref = pymongo.dbref.DBRef(collection, id_)
return {'_cls': document.__class__.__name__, '_ref': ref}
def prepare_query_value(self, op, value):
return self.to_mongo(value)['_ref']
class BinaryField(BaseField):
"""A binary data field.
"""
def __init__(self, max_bytes=None, **kwargs):
self.max_bytes = max_bytes
super(BinaryField, self).__init__(**kwargs)
def to_mongo(self, value):
return pymongo.binary.Binary(value)
def to_python(self, value):
return str(value)
def validate(self, value):
assert isinstance(value, str)
if self.max_bytes is not None and len(value) > self.max_bytes:
raise ValidationError('Binary value is too long')

View File

@@ -1,16 +1,25 @@
from connection import _get_db
import pymongo
import re
import copy
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
'InvalidCollectionError']
# The maximum number of items to display in a QuerySet.__repr__
REPR_OUTPUT_SIZE = 20
class DoesNotExist(Exception):
pass
class MultipleObjectsReturned(Exception):
pass
class InvalidQueryError(Exception):
pass
@@ -19,27 +28,32 @@ class OperationError(Exception):
pass
RE_TYPE = type(re.compile(''))
class Q(object):
OR = '||'
AND = '&&'
OPERATORS = {
'eq': 'this.%(field)s == %(value)s',
'neq': 'this.%(field)s != %(value)s',
'ne': 'this.%(field)s != %(value)s',
'gt': 'this.%(field)s > %(value)s',
'gte': 'this.%(field)s >= %(value)s',
'lt': 'this.%(field)s < %(value)s',
'lte': 'this.%(field)s <= %(value)s',
'lte': 'this.%(field)s <= %(value)s',
'in': 'this.%(field)s.indexOf(%(value)s) != -1',
'nin': 'this.%(field)s.indexOf(%(value)s) == -1',
'in': '%(value)s.indexOf(this.%(field)s) != -1',
'nin': '%(value)s.indexOf(this.%(field)s) == -1',
'mod': '%(field)s %% %(value)s',
'all': ('%(value)s.every(function(a){'
'return this.%(field)s.indexOf(a) != -1 })'),
'size': 'this.%(field)s.length == %(value)s',
'exists': 'this.%(field)s != null',
'regex_eq': '%(value)s.test(this.%(field)s)',
'regex_ne': '!%(value)s.test(this.%(field)s)',
}
def __init__(self, **query):
self.query = [query]
@@ -82,43 +96,64 @@ class Q(object):
for j, (op, value) in enumerate(value.items()):
# Create a custom variable name for this operator
op_value_name = '%so%s' % (value_name, j)
# Construct the JS that uses this op
value, operation_js = self._build_op_js(op, key, value,
op_value_name)
# Update the js scope with the value for this op
js_scope[op_value_name] = value
# Construct the JS that uses this op
operation_js = Q.OPERATORS[op.strip('$')] % {
'field': key,
'value': op_value_name
}
js.append(operation_js)
else:
js_scope[value_name] = value
# Construct the JS for this field
field_js = Q.OPERATORS[op.strip('$')] % {
'field': key,
'value': value_name
}
value, field_js = self._build_op_js(op, key, value, value_name)
js_scope[value_name] = value
js.append(field_js)
return ' && '.join(js)
def _build_op_js(self, op, key, value, value_name):
"""Substitute the values in to the correct chunk of Javascript.
"""
if isinstance(value, RE_TYPE):
# Regexes are handled specially
if op.strip('$') == 'ne':
op_js = Q.OPERATORS['regex_ne']
else:
op_js = Q.OPERATORS['regex_eq']
else:
op_js = Q.OPERATORS[op.strip('$')]
# Comparing two ObjectIds in Javascript doesn't work..
if isinstance(value, pymongo.objectid.ObjectId):
value = str(value)
# Perform the substitution
operation_js = op_js % {
'field': key,
'value': value_name
}
return value, operation_js
class QuerySet(object):
"""A set of results returned from a query. Wraps a MongoDB cursor,
"""A set of results returned from a query. Wraps a MongoDB cursor,
providing :class:`~mongoengine.Document` objects as the results.
"""
def __init__(self, document, collection):
self._document = document
self._collection_obj = collection
self._accessed_collection = False
self._query = {}
self._where_clauses = []
self._where_clause = None
self._loaded_fields = []
self._ordering = []
# If inheritance is allowed, only return instances and instances of
# subclasses of the class being used
if document._meta.get('allow_inheritance'):
self._query = {'_types': self._document._class_name}
self._cursor_obj = None
self._limit = None
self._skip = None
def ensure_index(self, key_or_list):
"""Ensure that the given indexes are in place.
@@ -126,14 +161,19 @@ class QuerySet(object):
construct a multi-field index); keys may be prefixed with a **+**
or a **-** to determine the index ordering
"""
index_list = QuerySet._build_index_spec(self._document, key_or_list)
self._collection.ensure_index(index_list)
return self
@classmethod
def _build_index_spec(cls, doc_cls, key_or_list):
"""Build a PyMongo index spec from a MongoEngine index spec.
"""
if isinstance(key_or_list, basestring):
key_or_list = [key_or_list]
index_list = []
# If _types is being used, prepend it to every specified index
if self._document._meta.get('allow_inheritance'):
index_list.append(('_types', 1))
use_types = doc_cls._meta.get('allow_inheritance', True)
for key in key_or_list:
# Get direction from + or -
direction = pymongo.ASCENDING
@@ -141,25 +181,46 @@ class QuerySet(object):
direction = pymongo.DESCENDING
if key.startswith(("+", "-")):
key = key[1:]
# Use real field name
key = QuerySet._translate_field_name(self._document, key)
# Use real field name, do it manually because we need field
# objects for the next part (list field checking)
parts = key.split('.')
fields = QuerySet._lookup_field(doc_cls, parts)
parts = [field.db_field for field in fields]
key = '.'.join(parts)
index_list.append((key, direction))
self._collection.ensure_index(index_list)
return self
def __call__(self, *q_objs, **query):
"""Filter the selected documents by calling the
:class:`~mongoengine.QuerySet` with a query.
# Check if a list field is being used, don't use _types if it is
if use_types and not all(f._index_with_types for f in fields):
use_types = False
:param q_objs: :class:`~mongoengine.Q` objects to be used in the query
# If _types is being used, prepend it to every specified index
if doc_cls._meta.get('allow_inheritance') and use_types:
index_list.insert(0, ('_types', 1))
return index_list
def __call__(self, q_obj=None, **query):
"""Filter the selected documents by calling the
:class:`~mongoengine.queryset.QuerySet` with a query.
:param q_obj: a :class:`~mongoengine.queryset.Q` object to be used in
the query; the :class:`~mongoengine.queryset.QuerySet` is filtered
multiple times with different :class:`~mongoengine.queryset.Q`
objects, only the last one will be used
:param query: Django-style query keyword arguments
"""
for q in q_objs:
self._where_clauses.append(q.as_js(self._document))
if q_obj:
self._where_clause = q_obj.as_js(self._document)
query = QuerySet._transform_query(_doc_cls=self._document, **query)
self._query.update(query)
return self
def filter(self, *q_objs, **query):
"""An alias of :meth:`~mongoengine.queryset.QuerySet.__call__`
"""
return self.__call__(*q_objs, **query)
@property
def _collection(self):
"""Property that returns the collection object. This allows us to
@@ -171,7 +232,8 @@ class QuerySet(object):
# Ensure document-defined indexes are created
if self._document._meta['indexes']:
for key_or_list in self._document._meta['indexes']:
self.ensure_index(key_or_list)
#self.ensure_index(key_or_list)
self._collection.ensure_index(key_or_list)
# Ensure indexes created by uniqueness constraints
for index in self._document._meta['unique_indexes']:
@@ -184,16 +246,20 @@ class QuerySet(object):
@property
def _cursor(self):
if not self._cursor_obj:
self._cursor_obj = self._collection.find(self._query)
if self._cursor_obj is None:
cursor_args = {}
if self._loaded_fields:
cursor_args = {'fields': self._loaded_fields}
self._cursor_obj = self._collection.find(self._query,
**cursor_args)
# Apply where clauses to cursor
for js in self._where_clauses:
self._cursor_obj.where(js)
if self._where_clause:
self._cursor_obj.where(self._where_clause)
# apply default ordering
if self._document._meta['ordering']:
self.order_by(*self._document._meta['ordering'])
return self._cursor_obj
@classmethod
@@ -223,38 +289,45 @@ class QuerySet(object):
"""Translate a field attribute name to a database field name.
"""
parts = field.split(sep)
parts = [f.name for f in QuerySet._lookup_field(doc_cls, parts)]
parts = [f.db_field for f in QuerySet._lookup_field(doc_cls, parts)]
return '.'.join(parts)
@classmethod
def _transform_query(cls, _doc_cls=None, **query):
"""Transform a query from Django-style format to Mongo format.
"""
operators = ['neq', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
'all', 'size', 'exists']
match_operators = ['contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith']
mongo_query = {}
for key, value in query.items():
parts = key.split('__')
# Check for an operator and transform to mongo-style if there is
op = None
if parts[-1] in operators:
if parts[-1] in operators + match_operators:
op = parts.pop()
if _doc_cls:
# Switch field names to proper names [set in Field(name='foo')]
fields = QuerySet._lookup_field(_doc_cls, parts)
parts = [field.name for field in fields]
parts = [field.db_field for field in fields]
# Convert value to proper value
field = fields[-1]
if op in (None, 'neq', 'gt', 'gte', 'lt', 'lte'):
value = field.prepare_query_value(value)
singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte']
singular_ops += match_operators
if op in singular_ops:
value = field.prepare_query_value(op, value)
elif op in ('in', 'nin', 'all'):
# 'in', 'nin' and 'all' require a list of values
value = [field.prepare_query_value(v) for v in value]
value = [field.prepare_query_value(op, v) for v in value]
if op:
if field.__class__.__name__ == 'GenericReferenceField':
parts.append('_ref')
if op and op not in match_operators:
value = {'$' + op: value}
key = '.'.join(parts)
@@ -265,6 +338,50 @@ class QuerySet(object):
return mongo_query
def get(self, *q_objs, **query):
"""Retrieve the the matching object raising
:class:`~mongoengine.queryset.MultipleObjectsReturned` or
:class:`~mongoengine.queryset.DoesNotExist` exceptions if multiple or
no results are found.
.. versionadded:: 0.3
"""
self.__call__(*q_objs, **query)
count = self.count()
if count == 1:
return self[0]
elif count > 1:
message = u'%d items returned, instead of 1' % count
raise MultipleObjectsReturned(message)
else:
raise DoesNotExist('Document not found')
def get_or_create(self, *q_objs, **query):
"""Retreive unique object or create, if it doesn't exist. Raises
:class:`~mongoengine.queryset.MultipleObjectsReturned` if multiple
results are found. A new document will be created if the document
doesn't exists; a dictionary of default values for the new document
may be provided as a keyword argument called :attr:`defaults`.
.. versionadded:: 0.3
"""
defaults = query.get('defaults', {})
if 'defaults' in query:
del query['defaults']
self.__call__(*q_objs, **query)
count = self.count()
if count == 0:
query.update(defaults)
doc = self._document(**query)
doc.save()
return doc
elif count == 1:
return self.first()
else:
message = u'%d items returned, instead of 1' % count
raise MultipleObjectsReturned(message)
def first(self):
"""Retrieve the first object matching the query.
"""
@@ -287,26 +404,138 @@ class QuerySet(object):
result = self._document._from_son(result)
return result
def in_bulk(self, object_ids):
"""Retrieve a set of documents by their ids.
:param object_ids: a list or tuple of ``ObjectId``\ s
:rtype: dict of ObjectIds as keys and collection-specific
Document subclasses as values.
.. versionadded:: 0.3
"""
doc_map = {}
docs = self._collection.find({'_id': {'$in': object_ids}})
for doc in docs:
doc_map[doc['_id']] = self._document._from_son(doc)
return doc_map
def next(self):
"""Wrap the result in a :class:`~mongoengine.Document` object.
"""
return self._document._from_son(self._cursor.next())
try:
if self._limit == 0:
raise StopIteration
return self._document._from_son(self._cursor.next())
except StopIteration, e:
self.rewind()
raise e
def rewind(self):
"""Rewind the cursor to its unevaluated state.
.. versionadded:: 0.3
"""
self._cursor.rewind()
def count(self):
"""Count the selected elements in the query.
"""
return self._cursor.count()
if self._limit == 0:
return 0
return self._cursor.count(with_limit_and_skip=True)
def __len__(self):
return self.count()
def map_reduce(self, map_f, reduce_f, finalize_f=None, limit=None,
scope=None, keep_temp=False):
"""Perform a map/reduce query using the current query spec
and ordering. While ``map_reduce`` respects ``QuerySet`` chaining,
it must be the last call made, as it does not return a maleable
``QuerySet``.
See the :meth:`~mongoengine.tests.QuerySetTest.test_map_reduce`
and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced`
tests in ``tests.queryset.QuerySetTest`` for usage examples.
:param map_f: map function, as :class:`~pymongo.code.Code` or string
:param reduce_f: reduce function, as
:class:`~pymongo.code.Code` or string
:param finalize_f: finalize function, an optional function that
performs any post-reduction processing.
:param scope: values to insert into map/reduce global scope. Optional.
:param limit: number of objects from current query to provide
to map/reduce method
:param keep_temp: keep temporary table (boolean, default ``True``)
Returns an iterator yielding
:class:`~mongoengine.document.MapReduceDocument`.
.. note:: Map/Reduce requires server version **>= 1.1.1**. The PyMongo
:meth:`~pymongo.collection.Collection.map_reduce` helper requires
PyMongo version **>= 1.2**.
.. versionadded:: 0.3
"""
from document import MapReduceDocument
if not hasattr(self._collection, "map_reduce"):
raise NotImplementedError("Requires MongoDB >= 1.1.1")
map_f_scope = {}
if isinstance(map_f, pymongo.code.Code):
map_f_scope = map_f.scope
map_f = str(map_f)
map_f = pymongo.code.Code(self._sub_js_fields(map_f), map_f_scope)
reduce_f_scope = {}
if isinstance(reduce_f, pymongo.code.Code):
reduce_f_scope = reduce_f.scope
reduce_f = str(reduce_f)
reduce_f_code = self._sub_js_fields(reduce_f)
reduce_f = pymongo.code.Code(reduce_f_code, reduce_f_scope)
mr_args = {'query': self._query, 'keeptemp': keep_temp}
if finalize_f:
finalize_f_scope = {}
if isinstance(finalize_f, pymongo.code.Code):
finalize_f_scope = finalize_f.scope
finalize_f = str(finalize_f)
finalize_f_code = self._sub_js_fields(finalize_f)
finalize_f = pymongo.code.Code(finalize_f_code, finalize_f_scope)
mr_args['finalize'] = finalize_f
if scope:
mr_args['scope'] = scope
if limit:
mr_args['limit'] = limit
results = self._collection.map_reduce(map_f, reduce_f, **mr_args)
results = results.find()
if self._ordering:
results = results.sort(self._ordering)
for doc in results:
yield MapReduceDocument(self._document, self._collection,
doc['_id'], doc['value'])
def limit(self, n):
"""Limit the number of returned documents to `n`. This may also be
achieved using array-slicing syntax (e.g. ``User.objects[:5]``).
:param n: the maximum number of objects to return
"""
self._cursor.limit(n)
if n == 0:
self._cursor.limit(1)
else:
self._cursor.limit(n)
self._limit = n
# Return self to allow chaining
return self
@@ -317,6 +546,7 @@ class QuerySet(object):
:param n: the number of objects to skip before returning results
"""
self._cursor.skip(n)
self._skip = n
return self
def __getitem__(self, key):
@@ -324,13 +554,48 @@ class QuerySet(object):
"""
# Slice provided
if isinstance(key, slice):
self._cursor_obj = self._cursor[key]
try:
self._cursor_obj = self._cursor[key]
self._skip, self._limit = key.start, key.stop
except IndexError, err:
# PyMongo raises an error if key.start == key.stop, catch it,
# bin it, kill it.
start = key.start or 0
if start >= 0 and key.stop >= 0 and key.step is None:
if start == key.stop:
self.limit(0)
self._skip, self._limit = key.start, key.stop - start
return self
raise err
# Allow further QuerySet modifications to be performed
return self
# Integer index provided
elif isinstance(key, int):
return self._document._from_son(self._cursor[key])
def only(self, *fields):
"""Load only a subset of this document's fields. ::
post = BlogPost.objects(...).only("title")
:param fields: fields to include
.. versionadded:: 0.3
"""
self._loaded_fields = []
for field in fields:
if '.' in field:
raise InvalidQueryError('Subfields cannot be used as '
'arguments to QuerySet.only')
# Translate field name
field = QuerySet._lookup_field(self._document, field)[-1].db_field
self._loaded_fields.append(field)
# _cls is needed for polymorphism
if self._document._meta.get('allow_inheritance'):
self._loaded_fields += ['_cls']
return self
def order_by(self, *keys):
"""Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The
order may be specified by prepending each of the keys by a + or a -.
@@ -346,13 +611,14 @@ class QuerySet(object):
direction = pymongo.DESCENDING
if key[0] in ('-', '+'):
key = key[1:]
key_list.append((key, direction))
key_list.append((key, direction))
self._ordering = key_list
self._cursor.sort(key_list)
return self
def explain(self, format=False):
"""Return an explain plan record for the
"""Return an explain plan record for the
:class:`~mongoengine.queryset.QuerySet`\ 's cursor.
:param format: format the plan before returning it
@@ -363,7 +629,7 @@ class QuerySet(object):
import pprint
plan = pprint.pformat(plan)
return plan
def delete(self, safe=False):
"""Delete the documents matched by the query.
@@ -375,7 +641,7 @@ class QuerySet(object):
def _transform_update(cls, _doc_cls=None, **update):
"""Transform an update spec from Django-style format to Mongo format.
"""
operators = ['set', 'unset', 'inc', 'dec', 'push', 'push_all', 'pull',
operators = ['set', 'unset', 'inc', 'dec', 'push', 'push_all', 'pull',
'pull_all']
mongo_update = {}
@@ -398,14 +664,14 @@ class QuerySet(object):
if _doc_cls:
# Switch field names to proper names [set in Field(name='foo')]
fields = QuerySet._lookup_field(_doc_cls, parts)
parts = [field.name for field in fields]
parts = [field.db_field for field in fields]
# Convert value to proper value
field = fields[-1]
if op in (None, 'set', 'unset', 'push', 'pull'):
value = field.prepare_query_value(value)
value = field.prepare_query_value(op, value)
elif op in ('pushAll', 'pullAll'):
value = [field.prepare_query_value(v) for v in value]
value = [field.prepare_query_value(op, v) for v in value]
key = '.'.join(parts)
@@ -420,7 +686,7 @@ class QuerySet(object):
return mongo_update
def update(self, safe_update=True, **update):
def update(self, safe_update=True, upsert=False, **update):
"""Perform an atomic update on the fields matched by the query.
:param safe: check if the operation succeeded before returning
@@ -434,13 +700,14 @@ class QuerySet(object):
update = QuerySet._transform_update(self._document, **update)
try:
self._collection.update(self._query, update, safe=safe_update,
multi=True)
upsert=upsert, multi=True)
except pymongo.errors.OperationFailure, err:
if str(err) == 'multi not coded yet':
raise OperationError('update() method requires MongoDB 1.1.3+')
raise OperationError('Update failed (%s)' % str(err))
if unicode(err) == u'multi not coded yet':
message = u'update() method requires MongoDB 1.1.3+'
raise OperationError(message)
raise OperationError(u'Update failed (%s)' % unicode(err))
def update_one(self, safe_update=True, **update):
def update_one(self, safe_update=True, upsert=False, **update):
"""Perform an atomic update on first field matched by the query.
:param safe: check if the operation succeeded before returning
@@ -454,7 +721,7 @@ class QuerySet(object):
# as the default may change to 'True'
if pymongo.version >= '1.1.1':
self._collection.update(self._query, update, safe=safe_update,
multi=False)
upsert=upsert, multi=False)
else:
# Older versions of PyMongo don't support 'multi'
self._collection.update(self._query, update, safe=safe_update)
@@ -464,29 +731,59 @@ class QuerySet(object):
def __iter__(self):
return self
def _sub_js_fields(self, code):
"""When fields are specified with [~fieldname] syntax, where
*fieldname* is the Python name of a field, *fieldname* will be
substituted for the MongoDB name of the field (specified using the
:attr:`name` keyword argument in a field's constructor).
"""
def field_sub(match):
# Extract just the field name, and look up the field objects
field_name = match.group(1).split('.')
fields = QuerySet._lookup_field(self._document, field_name)
# Substitute the correct name for the field into the javascript
return '["%s"]' % fields[-1].db_field
return re.sub('\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code)
def exec_js(self, code, *fields, **options):
"""Execute a Javascript function on the server. A list of fields may be
provided, which will be translated to their correct names and supplied
as the arguments to the function. A few extra variables are added to
the function's scope: ``collection``, which is the name of the
collection in use; ``query``, which is an object representing the
the function's scope: ``collection``, which is the name of the
collection in use; ``query``, which is an object representing the
current query; and ``options``, which is an object containing any
options specified as keyword arguments.
As fields in MongoEngine may use different names in the database (set
using the :attr:`db_field` keyword argument to a :class:`Field`
constructor), a mechanism exists for replacing MongoEngine field names
with the database field names in Javascript code. When accessing a
field, use square-bracket notation, and prefix the MongoEngine field
name with a tilde (~).
:param code: a string of Javascript code to execute
:param fields: fields that you will be using in your function, which
will be passed in to your function as arguments
:param options: options that you want available to the function
:param options: options that you want available to the function
(accessed in Javascript through the ``options`` object)
"""
code = self._sub_js_fields(code)
fields = [QuerySet._translate_field_name(self._document, f)
for f in fields]
collection = self._document._meta['collection']
scope = {
'collection': collection,
'query': self._query,
'options': options or {},
}
query = self._query
if self._where_clause:
query['$where'] = self._where_clause
scope['query'] = query
code = pymongo.code.Code(code, scope=scope)
db = _get_db()
@@ -533,7 +830,7 @@ class QuerySet(object):
def item_frequencies(self, list_field, normalize=False):
"""Returns a dictionary of all items present in a list field across
the whole queried set of documents, and their corresponding frequency.
This is useful for generating tag clouds, or searching documents.
This is useful for generating tag clouds, or searching documents.
:param list_field: the list field to use
:param normalize: normalize the results so they add to 1.0
@@ -563,11 +860,15 @@ class QuerySet(object):
return self.exec_js(freq_func, list_field, normalize=normalize)
def __repr__(self):
data = list(self[:REPR_OUTPUT_SIZE + 1])
limit = REPR_OUTPUT_SIZE + 1
if self._limit is not None and self._limit < limit:
limit = self._limit
data = list(self[self._skip:limit])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
return repr(data)
class InvalidCollectionError(Exception):
pass
@@ -579,7 +880,7 @@ class QuerySetManager(object):
self._collection = None
def __get__(self, instance, owner):
"""Descriptor for instantiating a new QuerySet object when
"""Descriptor for instantiating a new QuerySet object when
Document.objects is accessed.
"""
if instance is not None:
@@ -598,7 +899,7 @@ class QuerySetManager(object):
if collection in db.collection_names():
self._collection = db[collection]
# The collection already exists, check if its capped
# The collection already exists, check if its capped
# options match the specified capped options
options = self._collection.options()
if options.get('max') != max_documents or \
@@ -614,18 +915,27 @@ class QuerySetManager(object):
self._collection = db.create_collection(collection, opts)
else:
self._collection = db[collection]
# owner is the document that contains the QuerySetManager
queryset = QuerySet(owner, self._collection)
if self._manager_func:
queryset = self._manager_func(queryset)
if self._manager_func.func_code.co_argcount == 1:
queryset = self._manager_func(queryset)
else:
queryset = self._manager_func(owner, queryset)
return queryset
def queryset_manager(func):
"""Decorator that allows you to define custom QuerySet managers on
"""Decorator that allows you to define custom QuerySet managers on
:class:`~mongoengine.Document` classes. The manager must be a function that
accepts a :class:`~mongoengine.queryset.QuerySet` as its only argument, and
returns a :class:`~mongoengine.queryset.QuerySet`, probably the same one
but modified in some way.
accepts a :class:`~mongoengine.Document` class as its first argument, and a
:class:`~mongoengine.queryset.QuerySet` as its second argument. The method
function should return a :class:`~mongoengine.queryset.QuerySet`, probably
the same one that was passed in, but modified in some way.
"""
if func.func_code.co_argcount == 1:
import warnings
msg = 'Methods decorated with queryset_manager should take 2 arguments'
warnings.warn(msg, DeprecationWarning)
return QuerySetManager(func)

View File

@@ -1,5 +1,5 @@
import unittest
import datetime
from datetime import datetime
import pymongo
from mongoengine import *
@@ -156,6 +156,20 @@ class DocumentTest(unittest.TestCase):
class Employee(self.Person):
meta = {'allow_inheritance': False}
self.assertRaises(ValueError, create_employee_class)
# Test the same for embedded documents
class Comment(EmbeddedDocument):
content = StringField()
meta = {'allow_inheritance': False}
def create_special_comment():
class SpecialComment(Comment):
pass
self.assertRaises(ValueError, create_special_comment)
comment = Comment(content='test')
self.assertFalse('_cls' in comment.to_mongo())
self.assertFalse('_types' in comment.to_mongo())
def test_collection_name(self):
"""Ensure that a collection with a specified name may be used.
@@ -185,7 +199,7 @@ class DocumentTest(unittest.TestCase):
"""Ensure that capped collections work properly.
"""
class Log(Document):
date = DateTimeField(default=datetime.datetime.now)
date = DateTimeField(default=datetime.now)
meta = {
'max_documents': 10,
'max_size': 90000,
@@ -211,7 +225,7 @@ class DocumentTest(unittest.TestCase):
# Check that the document cannot be redefined with different options
def recreate_log_document():
class Log(Document):
date = DateTimeField(default=datetime.datetime.now)
date = DateTimeField(default=datetime.now)
meta = {
'max_documents': 11,
}
@@ -225,11 +239,13 @@ class DocumentTest(unittest.TestCase):
"""Ensure that indexes are used when meta[indexes] is specified.
"""
class BlogPost(Document):
date = DateTimeField(name='addDate', default=datetime.datetime.now)
date = DateTimeField(db_field='addDate', default=datetime.now)
category = StringField()
tags = ListField(StringField())
meta = {
'indexes': [
'-date',
'tags',
('category', '-date')
],
}
@@ -237,7 +253,8 @@ class DocumentTest(unittest.TestCase):
BlogPost.drop_collection()
info = BlogPost.objects._collection.index_information()
self.assertEqual(len(info), 4) # _id, types, '-date', ('cat', 'date')
# _id, types, '-date', 'tags', ('cat', 'date')
self.assertEqual(len(info), 5)
# Indexes are lazy so use list() to perform query
list(BlogPost.objects)
@@ -245,6 +262,8 @@ class DocumentTest(unittest.TestCase):
self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)]
in info.values())
self.assertTrue([('_types', 1), ('addDate', -1)] in info.values())
# tags is a list field so it shouldn't have _types in the index
self.assertTrue([('tags', 1)] in info.values())
class ExtendedBlogPost(BlogPost):
title = StringField()
@@ -278,7 +297,7 @@ class DocumentTest(unittest.TestCase):
self.assertRaises(OperationError, post2.save)
class Date(EmbeddedDocument):
year = IntField(name='yr')
year = IntField(db_field='yr')
class BlogPost(Document):
title = StringField()
@@ -309,7 +328,7 @@ class DocumentTest(unittest.TestCase):
User.drop_collection()
self.assertEqual(User._fields['username'].name, '_id')
self.assertEqual(User._fields['username'].db_field, '_id')
self.assertEqual(User._meta['id_field'], 'username')
def create_invalid_user():
@@ -386,7 +405,26 @@ class DocumentTest(unittest.TestCase):
self.assertTrue('content' in Comment._fields)
self.assertFalse('id' in Comment._fields)
self.assertFalse(hasattr(Comment, '_meta'))
self.assertFalse('collection' in Comment._meta)
def test_embedded_document_validation(self):
"""Ensure that embedded documents may be validated.
"""
class Comment(EmbeddedDocument):
date = DateTimeField()
content = StringField(required=True)
comment = Comment()
self.assertRaises(ValidationError, comment.validate)
comment.content = 'test'
comment.validate()
comment.date = 4
self.assertRaises(ValidationError, comment.validate)
comment.date = datetime.now()
comment.validate()
def test_save(self):
"""Ensure that a document may be saved in the database.
@@ -399,7 +437,7 @@ class DocumentTest(unittest.TestCase):
person_obj = collection.find_one({'name': 'Test User'})
self.assertEqual(person_obj['name'], 'Test User')
self.assertEqual(person_obj['age'], 30)
self.assertEqual(str(person_obj['_id']), person.id)
self.assertEqual(person_obj['_id'], person.id)
def test_delete(self):
"""Ensure that document may be deleted using the delete method.

View File

@@ -1,5 +1,8 @@
import unittest
import datetime
from decimal import Decimal
import pymongo
from mongoengine import *
from mongoengine.connection import _get_db
@@ -79,6 +82,19 @@ class FieldTest(unittest.TestCase):
person.name = 'Shorter name'
person.validate()
def test_url_validation(self):
"""Ensure that URLFields validate urls properly.
"""
class Link(Document):
url = URLField()
link = Link()
link.url = 'google'
self.assertRaises(ValidationError, link.validate)
link.url = 'http://www.google.com:8080'
link.validate()
def test_int_validation(self):
"""Ensure that invalid values cannot be assigned to int fields.
"""
@@ -106,12 +122,32 @@ class FieldTest(unittest.TestCase):
person.height = 1.89
person.validate()
person.height = 2
person.height = '2.0'
self.assertRaises(ValidationError, person.validate)
person.height = 0.01
self.assertRaises(ValidationError, person.validate)
person.height = 4.0
self.assertRaises(ValidationError, person.validate)
def test_decimal_validation(self):
"""Ensure that invalid values cannot be assigned to decimal fields.
"""
class Person(Document):
height = DecimalField(min_value=Decimal('0.1'),
max_value=Decimal('3.5'))
person = Person()
person.height = Decimal('1.89')
person.validate()
person.height = '2.0'
person.validate()
person.height = 0.01
self.assertRaises(ValidationError, person.validate)
person.height = Decimal('0.01')
self.assertRaises(ValidationError, person.validate)
person.height = Decimal('4.0')
self.assertRaises(ValidationError, person.validate)
def test_boolean_validation(self):
"""Ensure that invalid values cannot be assigned to boolean fields.
@@ -176,6 +212,28 @@ class FieldTest(unittest.TestCase):
post.comments = 'yay'
self.assertRaises(ValidationError, post.validate)
def test_dict_validation(self):
"""Ensure that dict types work as expected.
"""
class BlogPost(Document):
info = DictField()
post = BlogPost()
post.info = 'my post'
self.assertRaises(ValidationError, post.validate)
post.info = ['test', 'test']
self.assertRaises(ValidationError, post.validate)
post.info = {'$title': 'test'}
self.assertRaises(ValidationError, post.validate)
post.info = {'the.title': 'test'}
self.assertRaises(ValidationError, post.validate)
post.info = {'title': 'test'}
post.validate()
def test_embedded_document_validation(self):
"""Ensure that invalid embedded documents cannot be assigned to
embedded document fields.
@@ -184,7 +242,7 @@ class FieldTest(unittest.TestCase):
content = StringField()
class PersonPreferences(EmbeddedDocument):
food = StringField()
food = StringField(required=True)
number = IntField()
class Person(Document):
@@ -195,9 +253,14 @@ class FieldTest(unittest.TestCase):
person.preferences = 'My Preferences'
self.assertRaises(ValidationError, person.validate)
# Check that only the right embedded doc works
person.preferences = Comment(content='Nice blog post...')
self.assertRaises(ValidationError, person.validate)
# Check that the embedded doc is valid
person.preferences = PersonPreferences()
self.assertRaises(ValidationError, person.validate)
person.preferences = PersonPreferences(food='Cheese', number=47)
self.assertEqual(person.preferences.food, 'Cheese')
person.validate()
@@ -258,7 +321,235 @@ class FieldTest(unittest.TestCase):
User.drop_collection()
BlogPost.drop_collection()
def test_list_item_dereference(self):
"""Ensure that DBRef items in ListFields are dereferenced.
"""
class User(Document):
name = StringField()
class Group(Document):
members = ListField(ReferenceField(User))
User.drop_collection()
Group.drop_collection()
user1 = User(name='user1')
user1.save()
user2 = User(name='user2')
user2.save()
group = Group(members=[user1, user2])
group.save()
group_obj = Group.objects.first()
self.assertEqual(group_obj.members[0].name, user1.name)
self.assertEqual(group_obj.members[1].name, user2.name)
User.drop_collection()
Group.drop_collection()
def test_recursive_reference(self):
"""Ensure that ReferenceFields can reference their own documents.
"""
class Employee(Document):
name = StringField()
boss = ReferenceField('self')
bill = Employee(name='Bill Lumbergh')
bill.save()
peter = Employee(name='Peter Gibbons', boss=bill)
peter.save()
peter = Employee.objects.with_id(peter.id)
self.assertEqual(peter.boss, bill)
def test_undefined_reference(self):
"""Ensure that ReferenceFields may reference undefined Documents.
"""
class Product(Document):
name = StringField()
company = ReferenceField('Company')
class Company(Document):
name = StringField()
ten_gen = Company(name='10gen')
ten_gen.save()
mongodb = Product(name='MongoDB', company=ten_gen)
mongodb.save()
obj = Product.objects(company=ten_gen).first()
self.assertEqual(obj, mongodb)
self.assertEqual(obj.company, ten_gen)
def test_reference_query_conversion(self):
"""Ensure that ReferenceFields can be queried using objects and values
of the type of the primary key of the referenced object.
"""
class Member(Document):
user_num = IntField(primary_key=True)
class BlogPost(Document):
title = StringField()
author = ReferenceField(Member)
Member.drop_collection()
BlogPost.drop_collection()
m1 = Member(user_num=1)
m1.save()
m2 = Member(user_num=2)
m2.save()
post1 = BlogPost(title='post 1', author=m1)
post1.save()
post2 = BlogPost(title='post 2', author=m2)
post2.save()
post = BlogPost.objects(author=m1).first()
self.assertEqual(post.id, post1.id)
post = BlogPost.objects(author=m2).first()
self.assertEqual(post.id, post2.id)
Member.drop_collection()
BlogPost.drop_collection()
def test_generic_reference(self):
"""Ensure that a GenericReferenceField properly dereferences items.
"""
class Link(Document):
title = StringField()
meta = {'allow_inheritance': False}
class Post(Document):
title = StringField()
class Bookmark(Document):
bookmark_object = GenericReferenceField()
Link.drop_collection()
Post.drop_collection()
Bookmark.drop_collection()
link_1 = Link(title="Pitchfork")
link_1.save()
post_1 = Post(title="Behind the Scenes of the Pavement Reunion")
post_1.save()
bm = Bookmark(bookmark_object=post_1)
bm.save()
bm = Bookmark.objects(bookmark_object=post_1).first()
self.assertEqual(bm.bookmark_object, post_1)
self.assertTrue(isinstance(bm.bookmark_object, Post))
bm.bookmark_object = link_1
bm.save()
bm = Bookmark.objects(bookmark_object=link_1).first()
self.assertEqual(bm.bookmark_object, link_1)
self.assertTrue(isinstance(bm.bookmark_object, Link))
Link.drop_collection()
Post.drop_collection()
Bookmark.drop_collection()
def test_generic_reference_list(self):
"""Ensure that a ListField properly dereferences generic references.
"""
class Link(Document):
title = StringField()
class Post(Document):
title = StringField()
class User(Document):
bookmarks = ListField(GenericReferenceField())
Link.drop_collection()
Post.drop_collection()
User.drop_collection()
link_1 = Link(title="Pitchfork")
link_1.save()
post_1 = Post(title="Behind the Scenes of the Pavement Reunion")
post_1.save()
user = User(bookmarks=[post_1, link_1])
user.save()
user = User.objects(bookmarks__all=[post_1, link_1]).first()
self.assertEqual(user.bookmarks[0], post_1)
self.assertEqual(user.bookmarks[1], link_1)
Link.drop_collection()
Post.drop_collection()
User.drop_collection()
def test_binary_fields(self):
"""Ensure that binary fields can be stored and retrieved.
"""
class Attachment(Document):
content_type = StringField()
blob = BinaryField()
BLOB = '\xe6\x00\xc4\xff\x07'
MIME_TYPE = 'application/octet-stream'
Attachment.drop_collection()
attachment = Attachment(content_type=MIME_TYPE, blob=BLOB)
attachment.save()
attachment_1 = Attachment.objects().first()
self.assertEqual(MIME_TYPE, attachment_1.content_type)
self.assertEqual(BLOB, attachment_1.blob)
Attachment.drop_collection()
def test_binary_validation(self):
"""Ensure that invalid values cannot be assigned to binary fields.
"""
class Attachment(Document):
blob = BinaryField()
class AttachmentRequired(Document):
blob = BinaryField(required=True)
class AttachmentSizeLimit(Document):
blob = BinaryField(max_bytes=4)
Attachment.drop_collection()
AttachmentRequired.drop_collection()
AttachmentSizeLimit.drop_collection()
attachment = Attachment()
attachment.validate()
attachment.blob = 2
self.assertRaises(ValidationError, attachment.validate)
attachment_required = AttachmentRequired()
self.assertRaises(ValidationError, attachment_required.validate)
attachment_required.blob = '\xe6\x00\xc4\xff\x07'
attachment_required.validate()
attachment_size_limit = AttachmentSizeLimit(blob='\xe6\x00\xc4\xff\x07')
self.assertRaises(ValidationError, attachment_size_limit.validate)
attachment_size_limit.blob = '\xe6\x00\xc4\xff'
attachment_size_limit.validate()
Attachment.drop_collection()
AttachmentRequired.drop_collection()
AttachmentSizeLimit.drop_collection()
if __name__ == '__main__':
unittest.main()

View File

@@ -1,13 +1,17 @@
# -*- coding: utf-8 -*-
import unittest
import pymongo
from datetime import datetime
from datetime import datetime, timedelta
from mongoengine.queryset import QuerySet
from mongoengine.queryset import (QuerySet, MultipleObjectsReturned,
DoesNotExist)
from mongoengine import *
class QuerySetTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
@@ -20,7 +24,7 @@ class QuerySetTest(unittest.TestCase):
"""Ensure that a QuerySet is correctly initialised by QuerySetManager.
"""
self.assertTrue(isinstance(self.Person.objects, QuerySet))
self.assertEqual(self.Person.objects._collection.name(),
self.assertEqual(self.Person.objects._collection.name,
self.Person._meta['collection'])
self.assertTrue(isinstance(self.Person.objects._collection,
pymongo.collection.Collection))
@@ -30,15 +34,15 @@ class QuerySetTest(unittest.TestCase):
"""
self.assertEqual(QuerySet._transform_query(name='test', age=30),
{'name': 'test', 'age': 30})
self.assertEqual(QuerySet._transform_query(age__lt=30),
self.assertEqual(QuerySet._transform_query(age__lt=30),
{'age': {'$lt': 30}})
self.assertEqual(QuerySet._transform_query(age__gt=20, age__lt=50),
{'age': {'$gt': 20, '$lt': 50}})
self.assertEqual(QuerySet._transform_query(age=20, age__gt=50),
{'age': 20})
self.assertEqual(QuerySet._transform_query(friend__age__gte=30),
self.assertEqual(QuerySet._transform_query(friend__age__gte=30),
{'friend.age': {'$gte': 30}})
self.assertEqual(QuerySet._transform_query(name__exists=True),
self.assertEqual(QuerySet._transform_query(name__exists=True),
{'name': {'$exists': True}})
def test_find(self):
@@ -58,7 +62,7 @@ class QuerySetTest(unittest.TestCase):
results = list(people)
self.assertTrue(isinstance(results[0], self.Person))
self.assertTrue(isinstance(results[0].id, (pymongo.objectid.ObjectId,
str, unicode)))
str, unicode)))
self.assertEqual(results[0].name, "User A")
self.assertEqual(results[0].age, 20)
self.assertEqual(results[1].name, "User B")
@@ -101,6 +105,9 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(len(people), 1)
self.assertEqual(people[0].name, 'User B')
people = list(self.Person.objects[1:1])
self.assertEqual(len(people), 0)
def test_find_one(self):
"""Ensure that a query using find_one returns a valid result.
"""
@@ -130,11 +137,160 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(person.name, "User B")
self.assertRaises(IndexError, self.Person.objects.__getitem__, 2)
# Find a document using just the object id
person = self.Person.objects.with_id(person1.id)
self.assertEqual(person.name, "User A")
def test_find_only_one(self):
"""Ensure that a query using ``get`` returns at most one result.
"""
# Try retrieving when no objects exists
self.assertRaises(DoesNotExist, self.Person.objects.get)
person1 = self.Person(name="User A", age=20)
person1.save()
person2 = self.Person(name="User B", age=30)
person2.save()
# Retrieve the first person from the database
self.assertRaises(MultipleObjectsReturned, self.Person.objects.get)
# Use a query to filter the people found to just person2
person = self.Person.objects.get(age=30)
self.assertEqual(person.name, "User B")
person = self.Person.objects.get(age__lt=30)
self.assertEqual(person.name, "User A")
def test_get_or_create(self):
"""Ensure that ``get_or_create`` returns one result or creates a new
document.
"""
person1 = self.Person(name="User A", age=20)
person1.save()
person2 = self.Person(name="User B", age=30)
person2.save()
# Retrieve the first person from the database
self.assertRaises(MultipleObjectsReturned,
self.Person.objects.get_or_create)
# Use a query to filter the people found to just person2
person = self.Person.objects.get_or_create(age=30)
self.assertEqual(person.name, "User B")
person = self.Person.objects.get_or_create(age__lt=30)
self.assertEqual(person.name, "User A")
# Try retrieving when no objects exists - new doc should be created
self.Person.objects.get_or_create(age=50, defaults={'name': 'User C'})
person = self.Person.objects.get(age=50)
self.assertEqual(person.name, "User C")
def test_repeated_iteration(self):
"""Ensure that QuerySet rewinds itself one iteration finishes.
"""
self.Person(name='Person 1').save()
self.Person(name='Person 2').save()
queryset = self.Person.objects
people1 = [person for person in queryset]
people2 = [person for person in queryset]
self.assertEqual(people1, people2)
def test_regex_query_shortcuts(self):
"""Ensure that contains, startswith, endswith, etc work.
"""
person = self.Person(name='Guido van Rossum')
person.save()
# Test contains
obj = self.Person.objects(name__contains='van').first()
self.assertEqual(obj, person)
obj = self.Person.objects(name__contains='Van').first()
self.assertEqual(obj, None)
obj = self.Person.objects(Q(name__contains='van')).first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name__contains='Van')).first()
self.assertEqual(obj, None)
# Test icontains
obj = self.Person.objects(name__icontains='Van').first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name__icontains='Van')).first()
self.assertEqual(obj, person)
# Test startswith
obj = self.Person.objects(name__startswith='Guido').first()
self.assertEqual(obj, person)
obj = self.Person.objects(name__startswith='guido').first()
self.assertEqual(obj, None)
obj = self.Person.objects(Q(name__startswith='Guido')).first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name__startswith='guido')).first()
self.assertEqual(obj, None)
# Test istartswith
obj = self.Person.objects(name__istartswith='guido').first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name__istartswith='guido')).first()
self.assertEqual(obj, person)
# Test endswith
obj = self.Person.objects(name__endswith='Rossum').first()
self.assertEqual(obj, person)
obj = self.Person.objects(name__endswith='rossuM').first()
self.assertEqual(obj, None)
obj = self.Person.objects(Q(name__endswith='Rossum')).first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name__endswith='rossuM')).first()
self.assertEqual(obj, None)
# Test iendswith
obj = self.Person.objects(name__iendswith='rossuM').first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name__iendswith='rossuM')).first()
self.assertEqual(obj, person)
def test_filter_chaining(self):
"""Ensure filters can be chained together.
"""
from datetime import datetime
class BlogPost(Document):
title = StringField()
is_published = BooleanField()
published_date = DateTimeField()
@queryset_manager
def published(doc_cls, queryset):
return queryset(is_published=True)
blog_post_1 = BlogPost(title="Blog Post #1",
is_published = True,
published_date=datetime(2010, 1, 5, 0, 0 ,0))
blog_post_2 = BlogPost(title="Blog Post #2",
is_published = True,
published_date=datetime(2010, 1, 6, 0, 0 ,0))
blog_post_3 = BlogPost(title="Blog Post #3",
is_published = True,
published_date=datetime(2010, 1, 7, 0, 0 ,0))
blog_post_1.save()
blog_post_2.save()
blog_post_3.save()
# find all published blog posts before 2010-01-07
published_posts = BlogPost.published()
published_posts = published_posts.filter(
published_date__lt=datetime(2010, 1, 7, 0, 0 ,0))
self.assertEqual(published_posts.count(), 2)
BlogPost.drop_collection()
def test_ordering(self):
"""Ensure default ordering is applied and can be overridden.
"""
@@ -148,28 +304,61 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection()
blog_post_1 = BlogPost(title="Blog Post #1",
blog_post_1 = BlogPost(title="Blog Post #1",
published_date=datetime(2010, 1, 5, 0, 0 ,0))
blog_post_2 = BlogPost(title="Blog Post #2",
blog_post_2 = BlogPost(title="Blog Post #2",
published_date=datetime(2010, 1, 6, 0, 0 ,0))
blog_post_3 = BlogPost(title="Blog Post #3",
blog_post_3 = BlogPost(title="Blog Post #3",
published_date=datetime(2010, 1, 7, 0, 0 ,0))
blog_post_1.save()
blog_post_2.save()
blog_post_3.save()
# get the "first" BlogPost using default ordering
# from BlogPost.meta.ordering
latest_post = BlogPost.objects.first()
latest_post = BlogPost.objects.first()
self.assertEqual(latest_post.title, "Blog Post #3")
# override default ordering, order BlogPosts by "published_date"
first_post = BlogPost.objects.order_by("+published_date").first()
self.assertEqual(first_post.title, "Blog Post #1")
BlogPost.drop_collection()
def test_only(self):
"""Ensure that QuerySet.only only returns the requested fields.
"""
person = self.Person(name='test', age=25)
person.save()
obj = self.Person.objects.only('name').get()
self.assertEqual(obj.name, person.name)
self.assertEqual(obj.age, None)
obj = self.Person.objects.only('age').get()
self.assertEqual(obj.name, None)
self.assertEqual(obj.age, person.age)
obj = self.Person.objects.only('name', 'age').get()
self.assertEqual(obj.name, person.name)
self.assertEqual(obj.age, person.age)
# Check polymorphism still works
class Employee(self.Person):
salary = IntField(db_field='wage')
employee = Employee(name='test employee', age=40, salary=30000)
employee.save()
obj = self.Person.objects(id=employee.id).only('age').get()
self.assertTrue(isinstance(obj, Employee))
# Check field names are looked up properly
obj = Employee.objects(id=employee.id).only('salary').get()
self.assertEqual(obj.salary, employee.salary)
self.assertEqual(obj.name, None)
def test_find_embedded(self):
"""Ensure that an embedded document is properly returned from a query.
"""
@@ -189,10 +378,28 @@ class QuerySetTest(unittest.TestCase):
result = BlogPost.objects.first()
self.assertTrue(isinstance(result.author, User))
self.assertEqual(result.author.name, 'Test User')
BlogPost.drop_collection()
def test_find_dict_item(self):
"""Ensure that DictField items may be found.
"""
class BlogPost(Document):
info = DictField()
BlogPost.drop_collection()
post = BlogPost(info={'title': 'test'})
post.save()
post_obj = BlogPost.objects(info__title='test').first()
self.assertEqual(post_obj.id, post.id)
BlogPost.drop_collection()
def test_q(self):
"""Ensure that Q objects may be used to query for documents.
"""
class BlogPost(Document):
publish_date = DateTimeField()
published = BooleanField()
@@ -217,6 +424,11 @@ class QuerySetTest(unittest.TestCase):
post6 = BlogPost(published=False)
post6.save()
# Check ObjectId lookup works
obj = BlogPost.objects(id=post1.id).first()
self.assertEqual(obj, post1)
# Check Q object combination
date = datetime(2010, 1, 10)
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
posts = [post.id for post in q]
@@ -228,6 +440,132 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection()
# Check the 'in' operator
self.Person(name='user1', age=20).save()
self.Person(name='user2', age=20).save()
self.Person(name='user3', age=30).save()
self.Person(name='user4', age=40).save()
self.assertEqual(len(self.Person.objects(Q(age__in=[20]))), 2)
self.assertEqual(len(self.Person.objects(Q(age__in=[20, 30]))), 3)
def test_q_regex(self):
"""Ensure that Q objects can be queried using regexes.
"""
person = self.Person(name='Guido van Rossum')
person.save()
import re
obj = self.Person.objects(Q(name=re.compile('^Gui'))).first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name=re.compile('^gui'))).first()
self.assertEqual(obj, None)
obj = self.Person.objects(Q(name=re.compile('^gui', re.I))).first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name__ne=re.compile('^bob'))).first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name__ne=re.compile('^Gui'))).first()
self.assertEqual(obj, None)
def test_exec_js_query(self):
"""Ensure that queries are properly formed for use in exec_js.
"""
class BlogPost(Document):
hits = IntField()
published = BooleanField()
BlogPost.drop_collection()
post1 = BlogPost(hits=1, published=False)
post1.save()
post2 = BlogPost(hits=1, published=True)
post2.save()
post3 = BlogPost(hits=1, published=True)
post3.save()
js_func = """
function(hitsField) {
var count = 0;
db[collection].find(query).forEach(function(doc) {
count += doc[hitsField];
});
return count;
}
"""
# Ensure that normal queries work
c = BlogPost.objects(published=True).exec_js(js_func, 'hits')
self.assertEqual(c, 2)
c = BlogPost.objects(published=False).exec_js(js_func, 'hits')
self.assertEqual(c, 1)
# Ensure that Q object queries work
c = BlogPost.objects(Q(published=True)).exec_js(js_func, 'hits')
self.assertEqual(c, 2)
c = BlogPost.objects(Q(published=False)).exec_js(js_func, 'hits')
self.assertEqual(c, 1)
BlogPost.drop_collection()
def test_exec_js_field_sub(self):
"""Ensure that field substitutions occur properly in exec_js functions.
"""
class Comment(EmbeddedDocument):
content = StringField(db_field='body')
class BlogPost(Document):
name = StringField(db_field='doc-name')
comments = ListField(EmbeddedDocumentField(Comment),
db_field='cmnts')
BlogPost.drop_collection()
comments1 = [Comment(content='cool'), Comment(content='yay')]
post1 = BlogPost(name='post1', comments=comments1)
post1.save()
comments2 = [Comment(content='nice stuff')]
post2 = BlogPost(name='post2', comments=comments2)
post2.save()
code = """
function getComments() {
var comments = [];
db[collection].find(query).forEach(function(doc) {
var docComments = doc[~comments];
for (var i = 0; i < docComments.length; i++) {
comments.push({
'document': doc[~name],
'comment': doc[~comments][i][~comments.content]
});
}
});
return comments;
}
"""
sub_code = BlogPost.objects._sub_js_fields(code)
code_chunks = ['doc["cmnts"];', 'doc["doc-name"],',
'doc["cmnts"][i]["body"]']
for chunk in code_chunks:
self.assertTrue(chunk in sub_code)
results = BlogPost.objects.exec_js(code)
expected_results = [
{u'comment': u'cool', u'document': u'post1'},
{u'comment': u'yay', u'document': u'post1'},
{u'comment': u'nice stuff', u'document': u'post2'},
]
self.assertEqual(results, expected_results)
BlogPost.drop_collection()
def test_delete(self):
"""Ensure that documents are properly deleted from the database.
"""
@@ -293,16 +631,220 @@ class QuerySetTest(unittest.TestCase):
names = [p.name for p in self.Person.objects.order_by('age')]
self.assertEqual(names, ['User A', 'User C', 'User B'])
ages = [p.age for p in self.Person.objects.order_by('-name')]
self.assertEqual(ages, [30, 40, 20])
def test_map_reduce(self):
"""Ensure map/reduce is both mapping and reducing.
"""
class BlogPost(Document):
title = StringField()
tags = ListField(StringField(), db_field='post-tag-list')
BlogPost.drop_collection()
BlogPost(title="Post #1", tags=['music', 'film', 'print']).save()
BlogPost(title="Post #2", tags=['music', 'film']).save()
BlogPost(title="Post #3", tags=['film', 'photography']).save()
map_f = """
function() {
this[~tags].forEach(function(tag) {
emit(tag, 1);
});
}
"""
reduce_f = """
function(key, values) {
var total = 0;
for(var i=0; i<values.length; i++) {
total += values[i];
}
return total;
}
"""
# run a map/reduce operation spanning all posts
results = BlogPost.objects.map_reduce(map_f, reduce_f)
results = list(results)
self.assertEqual(len(results), 4)
music = filter(lambda r: r.key == "music", results)[0]
self.assertEqual(music.value, 2)
film = filter(lambda r: r.key == "film", results)[0]
self.assertEqual(film.value, 3)
BlogPost.drop_collection()
def test_map_reduce_with_custom_object_ids(self):
"""Ensure that QuerySet.map_reduce works properly with custom
primary keys.
"""
class BlogPost(Document):
title = StringField(primary_key=True)
tags = ListField(StringField())
post1 = BlogPost(title="Post #1", tags=["mongodb", "mongoengine"])
post2 = BlogPost(title="Post #2", tags=["django", "mongodb"])
post3 = BlogPost(title="Post #3", tags=["hitchcock films"])
post1.save()
post2.save()
post3.save()
self.assertEqual(BlogPost._fields['title'].db_field, '_id')
self.assertEqual(BlogPost._meta['id_field'], 'title')
map_f = """
function() {
emit(this._id, 1);
}
"""
# reduce to a list of tag ids and counts
reduce_f = """
function(key, values) {
var total = 0;
for(var i=0; i<values.length; i++) {
total += values[i];
}
return total;
}
"""
results = BlogPost.objects.map_reduce(map_f, reduce_f)
results = list(results)
self.assertEqual(results[0].object, post1)
self.assertEqual(results[1].object, post2)
self.assertEqual(results[2].object, post3)
BlogPost.drop_collection()
def test_map_reduce_finalize(self):
"""Ensure that map, reduce, and finalize run and introduce "scope"
by simulating "hotness" ranking with Reddit algorithm.
"""
from time import mktime
class Link(Document):
title = StringField(db_field='bpTitle')
up_votes = IntField()
down_votes = IntField()
submitted = DateTimeField(db_field='sTime')
Link.drop_collection()
now = datetime.utcnow()
# Note: Test data taken from a custom Reddit homepage on
# Fri, 12 Feb 2010 14:36:00 -0600. Link ordering should
# reflect order of insertion below, but is not influenced
# by insertion order.
Link(title = "Google Buzz auto-followed a woman's abusive ex ...",
up_votes = 1079,
down_votes = 553,
submitted = now-timedelta(hours=4)).save()
Link(title = "We did it! Barbie is a computer engineer.",
up_votes = 481,
down_votes = 124,
submitted = now-timedelta(hours=2)).save()
Link(title = "This Is A Mosquito Getting Killed By A Laser",
up_votes = 1446,
down_votes = 530,
submitted=now-timedelta(hours=13)).save()
Link(title = "Arabic flashcards land physics student in jail.",
up_votes = 215,
down_votes = 105,
submitted = now-timedelta(hours=6)).save()
Link(title = "The Burger Lab: Presenting, the Flood Burger",
up_votes = 48,
down_votes = 17,
submitted = now-timedelta(hours=5)).save()
Link(title="How to see polarization with the naked eye",
up_votes = 74,
down_votes = 13,
submitted = now-timedelta(hours=10)).save()
map_f = """
function() {
emit(this[~id], {up_delta: this[~up_votes] - this[~down_votes],
sub_date: this[~submitted].getTime() / 1000})
}
"""
reduce_f = """
function(key, values) {
data = values[0];
x = data.up_delta;
// calculate time diff between reddit epoch and submission
sec_since_epoch = data.sub_date - reddit_epoch;
// calculate 'Y'
if(x > 0) {
y = 1;
} else if (x = 0) {
y = 0;
} else {
y = -1;
}
// calculate 'Z', the maximal value
if(Math.abs(x) >= 1) {
z = Math.abs(x);
} else {
z = 1;
}
return {x: x, y: y, z: z, t_s: sec_since_epoch};
}
"""
finalize_f = """
function(key, value) {
// f(sec_since_epoch,y,z) =
// log10(z) + ((y*sec_since_epoch) / 45000)
z_10 = Math.log(value.z) / Math.log(10);
weight = z_10 + ((value.y * value.t_s) / 45000);
return weight;
}
"""
# provide the reddit epoch (used for ranking) as a variable available
# to all phases of the map/reduce operation: map, reduce, and finalize.
reddit_epoch = mktime(datetime(2005, 12, 8, 7, 46, 43).timetuple())
scope = {'reddit_epoch': reddit_epoch}
# run a map/reduce operation across all links. ordering is set
# to "-value", which orders the "weight" value returned from
# "finalize_f" in descending order.
results = Link.objects.order_by("-value")
results = results.map_reduce(map_f,
reduce_f,
finalize_f=finalize_f,
scope=scope)
results = list(results)
# assert troublesome Buzz article is ranked 1st
self.assertTrue(results[0].object.title.startswith("Google Buzz"))
# assert laser vision is ranked last
self.assertTrue(results[-1].object.title.startswith("How to see"))
Link.drop_collection()
def test_item_frequencies(self):
"""Ensure that item frequencies are properly generated from lists.
"""
class BlogPost(Document):
hits = IntField()
tags = ListField(StringField(), name='blogTags')
tags = ListField(StringField(), db_field='blogTags')
BlogPost.drop_collection()
@@ -364,7 +906,7 @@ class QuerySetTest(unittest.TestCase):
tags = ListField(StringField())
@queryset_manager
def music_posts(queryset):
def music_posts(doc_cls, queryset):
return queryset(tags='music')
BlogPost.drop_collection()
@@ -387,26 +929,26 @@ class QuerySetTest(unittest.TestCase):
"""Ensure that the correct field name is used when querying.
"""
class Comment(EmbeddedDocument):
content = StringField(name='commentContent')
content = StringField(db_field='commentContent')
class BlogPost(Document):
title = StringField(name='postTitle')
title = StringField(db_field='postTitle')
comments = ListField(EmbeddedDocumentField(Comment),
name='postComments')
db_field='postComments')
BlogPost.drop_collection()
data = {'title': 'Post 1', 'comments': [Comment(content='test')]}
BlogPost(**data).save()
self.assertTrue('postTitle' in
self.assertTrue('postTitle' in
BlogPost.objects(title=data['title'])._query)
self.assertFalse('title' in
self.assertFalse('title' in
BlogPost.objects(title=data['title'])._query)
self.assertEqual(len(BlogPost.objects(title=data['title'])), 1)
self.assertTrue('postComments.commentContent' in
self.assertTrue('postComments.commentContent' in
BlogPost.objects(comments__content='test')._query)
self.assertEqual(len(BlogPost.objects(comments__content='test')), 1)
@@ -427,7 +969,7 @@ class QuerySetTest(unittest.TestCase):
post.save()
# Test that query may be performed by providing a document as a value
# while using a ReferenceField's name - the document should be
# while using a ReferenceField's name - the document should be
# converted to an DBRef, which is legal, unlike a Document object
post_obj = BlogPost.objects(author=person).first()
self.assertEqual(post.id, post_obj.id)
@@ -438,6 +980,30 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection()
def test_update_value_conversion(self):
"""Ensure that values used in updates are converted before use.
"""
class Group(Document):
members = ListField(ReferenceField(self.Person))
Group.drop_collection()
user1 = self.Person(name='user1')
user1.save()
user2 = self.Person(name='user2')
user2.save()
group = Group()
group.save()
Group.objects(id=group.id).update(set__members=[user1, user2])
group.reload()
self.assertTrue(len(group.members) == 2)
self.assertEqual(group.members[0].name, user1.name)
self.assertEqual(group.members[1].name, user2.name)
Group.drop_collection()
def test_types_index(self):
"""Ensure that and index is used when '_types' is being used in a
@@ -466,13 +1032,50 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection()
def test_bulk(self):
"""Ensure bulk querying by object id returns a proper dict.
"""
class BlogPost(Document):
title = StringField()
BlogPost.drop_collection()
post_1 = BlogPost(title="Post #1")
post_2 = BlogPost(title="Post #2")
post_3 = BlogPost(title="Post #3")
post_4 = BlogPost(title="Post #4")
post_5 = BlogPost(title="Post #5")
post_1.save()
post_2.save()
post_3.save()
post_4.save()
post_5.save()
ids = [post_1.id, post_2.id, post_5.id]
objects = BlogPost.objects.in_bulk(ids)
self.assertEqual(len(objects), 3)
self.assertTrue(post_1.id in objects)
self.assertTrue(post_2.id in objects)
self.assertTrue(post_5.id in objects)
self.assertTrue(objects[post_1.id].title == post_1.title)
self.assertTrue(objects[post_2.id].title == post_2.title)
self.assertTrue(objects[post_5.id].title == post_5.title)
BlogPost.drop_collection()
def tearDown(self):
self.Person.drop_collection()
class QTest(unittest.TestCase):
def test_or_and(self):
"""Ensure that Q objects may be combined correctly.
"""
q1 = Q(name='test')
q2 = Q(age__gte=18)
@@ -493,8 +1096,8 @@ class QTest(unittest.TestCase):
examples = [
({'name': 'test'}, 'this.name == i0f0', {'i0f0': 'test'}),
({'age': {'$gt': 18}}, 'this.age > i0f0o0', {'i0f0o0': 18}),
({'name': 'test', 'age': {'$gt': 18, '$lte': 65}},
'this.age <= i0f0o0 && this.age > i0f0o1 && this.name == i0f1',
({'name': 'test', 'age': {'$gt': 18, '$lte': 65}},
'this.age <= i0f0o0 && this.age > i0f0o1 && this.name == i0f1',
{'i0f0o0': 65, 'i0f0o1': 18, 'i0f1': 'test'}),
]
for item, js, scope in examples: