Compare commits
38 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
21b7d8f8ea | ||
|
3357b55fbf | ||
|
f01add9ef5 | ||
|
ce69428cc6 | ||
|
1818cf7114 | ||
|
b375c41586 | ||
|
d85ee4e051 | ||
|
cfc394963f | ||
|
e7380e3676 | ||
|
597ef8b947 | ||
|
484bc1e6f0 | ||
|
afd416c84e | ||
|
84d7987108 | ||
|
ec927bdd63 | ||
|
df7d4cbc47 | ||
|
dc51362f0b | ||
|
da2d282cf6 | ||
|
3b37bf4794 | ||
|
42a58dda57 | ||
|
4d695a3544 | ||
|
45080d3fd1 | ||
|
9195d96705 | ||
|
54d276f6a7 | ||
|
2a7fc03e79 | ||
|
eb3e6963fa | ||
|
960aea2fd4 | ||
|
ef5815e4a5 | ||
|
b7e8108edd | ||
|
d48296eacc | ||
|
e0a546000d | ||
|
4c93e2945c | ||
|
a6d64b2010 | ||
|
2e74c93878 | ||
|
ccb4827ec9 | ||
|
bb4444f54d | ||
|
8ad0df41a0 | ||
|
aa9cba38c4 | ||
|
12a7fc1af1 |
@@ -27,6 +27,8 @@ Querying
|
||||
.. autoclass:: mongoengine.queryset.QuerySet
|
||||
:members:
|
||||
|
||||
.. automethod:: mongoengine.queryset.QuerySet.__call__
|
||||
|
||||
.. autofunction:: mongoengine.queryset.queryset_manager
|
||||
|
||||
Fields
|
||||
|
@@ -2,13 +2,43 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
Changes in v0.2.2
|
||||
=================
|
||||
- Fixed bug that prevented indexes from being used on ``ListField``\ s
|
||||
- ``Document.filter()`` added as an alias to ``Document.__call__()``
|
||||
- ``validate()`` may now be used on ``EmbeddedDocument``\ s
|
||||
|
||||
Changes in v0.2.1
|
||||
=================
|
||||
- Added a MongoEngine backend for Django sessions
|
||||
- Added ``force_insert`` to ``Document.save()``
|
||||
- Improved querying syntax for ``ListField`` and ``EmbeddedDocumentField``
|
||||
- Added support for user-defined primary keys (``_id`` in MongoDB)
|
||||
|
||||
Changes in v0.2
|
||||
===============
|
||||
- Added ``Q`` class for building advanced queries
|
||||
- Added ``QuerySet`` methods for atomic updates to documents
|
||||
- Fields may now specify ``unique=True`` to enforce uniqueness across a
|
||||
collection
|
||||
- Added option for default document ordering
|
||||
- Fixed bug in index definitions
|
||||
|
||||
Changes in v0.1.3
|
||||
=================
|
||||
- Added Django authentication backend
|
||||
- Added ``Document.meta`` support for indexes, which are ensured just before
|
||||
querying takes place
|
||||
- A few minor bugfixes
|
||||
|
||||
|
||||
Changes in v0.1.2
|
||||
=================
|
||||
- Query values may be processed before before being used in queries
|
||||
- Made connections lazy
|
||||
- Fixed bug in Document dictionary-style access
|
||||
- Added BooleanField
|
||||
- Added Document.reload method
|
||||
- Added ``BooleanField``
|
||||
- Added ``Document.reload()`` method
|
||||
|
||||
|
||||
Changes in v0.1.1
|
||||
|
46
docs/django.rst
Normal file
46
docs/django.rst
Normal file
@@ -0,0 +1,46 @@
|
||||
=============================
|
||||
Using MongoEngine with Django
|
||||
=============================
|
||||
|
||||
Connecting
|
||||
==========
|
||||
In your **settings.py** file, ignore the standard database settings (unless you
|
||||
also plan to use the ORM in your project), and instead call
|
||||
:func:`~mongoengine.connect` somewhere in the settings module.
|
||||
|
||||
Authentication
|
||||
==============
|
||||
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
||||
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
||||
:class:`~mongoengine.Document`, but implements most of the methods and
|
||||
attributes that the standard Django :class:`User` model does - so the two are
|
||||
moderately compatible. Using this backend will allow you to store users in
|
||||
MongoDB but still use many of the Django authentication infrastucture (such as
|
||||
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
||||
enable the MongoEngine auth backend, add the following to you **settings.py**
|
||||
file::
|
||||
|
||||
AUTHENTICATION_BACKENDS = (
|
||||
'mongoengine.django.auth.MongoEngineBackend',
|
||||
)
|
||||
|
||||
The :mod:`~mongoengine.django.auth` module also contains a
|
||||
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
|
||||
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
|
||||
|
||||
.. versionadded:: 0.1.3
|
||||
|
||||
Sessions
|
||||
========
|
||||
Django allows the use of different backend stores for its sessions. MongoEngine
|
||||
provides a MongoDB-based session backend for Django, which allows you to use
|
||||
sessions in you Django application with just MongoDB. To enable the MongoEngine
|
||||
session backend, ensure that your settings module has
|
||||
``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
|
||||
``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
|
||||
``INSTALLED_APPS``. From there, all you need to do is add the following line
|
||||
into you settings module::
|
||||
|
||||
SESSION_ENGINE = 'mongoengine.django.sessions'
|
||||
|
||||
.. versionadded:: 0.2.1
|
20
docs/guide/connecting.rst
Normal file
20
docs/guide/connecting.rst
Normal file
@@ -0,0 +1,20 @@
|
||||
.. _guide-connecting:
|
||||
|
||||
=====================
|
||||
Connecting to MongoDB
|
||||
=====================
|
||||
To connect to a running instance of :program:`mongod`, use the
|
||||
:func:`~mongoengine.connect` function. The first argument is the name of the
|
||||
database to connect to. If the database does not exist, it will be created. If
|
||||
the database requires authentication, :attr:`username` and :attr:`password`
|
||||
arguments may be provided::
|
||||
|
||||
from mongoengine import connect
|
||||
connect('project1', username='webapp', password='pwd123')
|
||||
|
||||
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
||||
on **localhost** on port **27017**. If MongoDB is running elsewhere, you may
|
||||
provide :attr:`host` and :attr:`port` arguments to
|
||||
:func:`~mongoengine.connect`::
|
||||
|
||||
connect('project1', host='192.168.1.35', port=12345)
|
238
docs/guide/defining-documents.rst
Normal file
238
docs/guide/defining-documents.rst
Normal file
@@ -0,0 +1,238 @@
|
||||
==================
|
||||
Defining documents
|
||||
==================
|
||||
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
||||
working with relational databases, rows are stored in **tables**, which have a
|
||||
strict **schema** that the rows follow. MongoDB stores documents in
|
||||
**collections** rather than tables - the principle difference is that no schema
|
||||
is enforced at a database level.
|
||||
|
||||
Defining a document's schema
|
||||
============================
|
||||
MongoEngine allows you to define schemata for documents as this helps to reduce
|
||||
coding errors, and allows for utility methods to be defined on fields which may
|
||||
be present.
|
||||
|
||||
To define a schema for a document, create a class that inherits from
|
||||
:class:`~mongoengine.Document`. Fields are specified by adding **field
|
||||
objects** as class attributes to the document class::
|
||||
|
||||
from mongoengine import *
|
||||
import datetime
|
||||
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
date_modified = DateTimeField(default=datetime.now)
|
||||
|
||||
Fields
|
||||
======
|
||||
By default, fields are not required. To make a field mandatory, set the
|
||||
:attr:`required` keyword argument of a field to ``True``. Fields also may have
|
||||
validation constraints available (such as :attr:`max_length` in the example
|
||||
above). Fields may also take default values, which will be used if a value is
|
||||
not provided. Default values may optionally be a callable, which will be called
|
||||
to retrieve the value (such as in the above example). The field types available
|
||||
are as follows:
|
||||
|
||||
* :class:`~mongoengine.StringField`
|
||||
* :class:`~mongoengine.IntField`
|
||||
* :class:`~mongoengine.FloatField`
|
||||
* :class:`~mongoengine.DateTimeField`
|
||||
* :class:`~mongoengine.ListField`
|
||||
* :class:`~mongoengine.ObjectIdField`
|
||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.ReferenceField`
|
||||
|
||||
List fields
|
||||
^^^^^^^^^^^
|
||||
MongoDB allows the storage of lists of items. To add a list of items to a
|
||||
:class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field
|
||||
type. :class:`~mongoengine.ListField` takes another field object as its first
|
||||
argument, which specifies which type elements may be stored within the list::
|
||||
|
||||
class Page(Document):
|
||||
tags = ListField(StringField(max_length=50))
|
||||
|
||||
Embedded documents
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
MongoDB has the ability to embed documents within other documents. Schemata may
|
||||
be defined for these embedded documents, just as they may be for regular
|
||||
documents. To create an embedded document, just define a document as usual, but
|
||||
inherit from :class:`~mongoengine.EmbeddedDocument` rather than
|
||||
:class:`~mongoengine.Document`::
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
To embed the document within another document, use the
|
||||
:class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded
|
||||
document class as the first argument::
|
||||
|
||||
class Page(Document):
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
comment1 = Comment('Good work!')
|
||||
comment2 = Comment('Nice article!')
|
||||
page = Page(comments=[comment1, comment2])
|
||||
|
||||
Reference fields
|
||||
^^^^^^^^^^^^^^^^
|
||||
References may be stored to other documents in the database using the
|
||||
:class:`~mongoengine.ReferenceField`. Pass in another document class as the
|
||||
first argument to the constructor, then simply assign document objects to the
|
||||
field::
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class Page(Document):
|
||||
content = StringField()
|
||||
author = ReferenceField(User)
|
||||
|
||||
john = User(name="John Smith")
|
||||
john.save()
|
||||
|
||||
post = Page(content="Test Page")
|
||||
post.author = john
|
||||
post.save()
|
||||
|
||||
The :class:`User` object is automatically turned into a reference behind the
|
||||
scenes, and dereferenced when the :class:`Page` object is retrieved.
|
||||
|
||||
Uniqueness constraints
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
MongoEngine allows you to specify that a field should be unique across a
|
||||
collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's
|
||||
constructor. If you try to save a document that has the same value for a unique
|
||||
field as a document that is already in the database, a
|
||||
:class:`~mongoengine.OperationError` will be raised. You may also specify
|
||||
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
|
||||
either a single field name, or a list or tuple of field names::
|
||||
|
||||
class User(Document):
|
||||
username = StringField(unique=True)
|
||||
first_name = StringField()
|
||||
last_name = StringField(unique_with='last_name')
|
||||
|
||||
Document collections
|
||||
====================
|
||||
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
||||
will have their own **collection** in the database. The name of the collection
|
||||
is by default the name of the class, coverted to lowercase (so in the example
|
||||
above, the collection would be called `page`). If you need to change the name
|
||||
of the collection (e.g. to use MongoEngine with an existing database), then
|
||||
create a class dictionary attribute called :attr:`meta` on your document, and
|
||||
set :attr:`collection` to the name of the collection that you want your
|
||||
document class to use::
|
||||
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
meta = {'collection': 'cmsPage'}
|
||||
|
||||
Capped collections
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
|
||||
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
||||
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
||||
stored in the collection, and :attr:`max_size` is the maximum size of the
|
||||
collection in bytes. If :attr:`max_size` is not specified and
|
||||
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
|
||||
The following example shows a :class:`Log` document that will be limited to
|
||||
1000 entries and 2MB of disk space::
|
||||
|
||||
class Log(Document):
|
||||
ip_address = StringField()
|
||||
meta = {'max_documents': 1000, 'max_size': 2000000}
|
||||
|
||||
Indexes
|
||||
=======
|
||||
You can specify indexes on collections to make querying faster. This is done
|
||||
by creating a list of index specifications called :attr:`indexes` in the
|
||||
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
||||
either be a single field name, or a tuple containing multiple field names. A
|
||||
direction may be specified on fields by prefixing the field name with a **+**
|
||||
or a **-** sign. Note that direction only matters on multi-field indexes. ::
|
||||
|
||||
class Page(Document):
|
||||
title = StringField()
|
||||
rating = StringField()
|
||||
meta = {
|
||||
'indexes': ['title', ('title', '-rating')]
|
||||
}
|
||||
|
||||
Ordering
|
||||
========
|
||||
A default ordering can be specified for your
|
||||
:class:`~mongoengine.queryset.QuerySet` using the :attr:`ordering` attribute of
|
||||
:attr:`~mongoengine.Document.meta`. Ordering will be applied when the
|
||||
:class:`~mongoengine.queryset.QuerySet` is created, and can be overridden by
|
||||
subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
published_date = DateTimeField()
|
||||
|
||||
meta = {
|
||||
'ordering': ['-published_date']
|
||||
}
|
||||
|
||||
blog_post_1 = BlogPost(title="Blog Post #1")
|
||||
blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0))
|
||||
|
||||
blog_post_2 = BlogPost(title="Blog Post #2")
|
||||
blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0))
|
||||
|
||||
blog_post_3 = BlogPost(title="Blog Post #3")
|
||||
blog_post_3.published_date = datetime(2010, 1, 7, 0, 0 ,0))
|
||||
|
||||
blog_post_1.save()
|
||||
blog_post_2.save()
|
||||
blog_post_3.save()
|
||||
|
||||
# get the "first" BlogPost using default ordering
|
||||
# from BlogPost.meta.ordering
|
||||
latest_post = BlogPost.objects.first()
|
||||
self.assertEqual(latest_post.title, "Blog Post #3")
|
||||
|
||||
# override default ordering, order BlogPosts by "published_date"
|
||||
first_post = BlogPost.objects.order_by("+published_date").first()
|
||||
self.assertEqual(first_post.title, "Blog Post #1")
|
||||
|
||||
Document inheritance
|
||||
====================
|
||||
To create a specialised type of a :class:`~mongoengine.Document` you have
|
||||
defined, you may subclass it and add any extra fields or methods you may need.
|
||||
As this is new class is not a direct subclass of
|
||||
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
||||
will use the same collection as its superclass uses. This allows for more
|
||||
convenient and efficient retrieval of related documents::
|
||||
|
||||
# Stored in a collection named 'page'
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
|
||||
# Also stored in the collection named 'page'
|
||||
class DatedPage(Page):
|
||||
date = DateTimeField()
|
||||
|
||||
Working with existing data
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
To enable correct retrieval of documents involved in this kind of heirarchy,
|
||||
two extra attributes are stored on each document in the database: :attr:`_cls`
|
||||
and :attr:`_types`. These are hidden from the user through the MongoEngine
|
||||
interface, but may not be present if you are trying to use MongoEngine with
|
||||
an existing database. For this reason, you may disable this inheritance
|
||||
mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling
|
||||
you to work with existing databases. To disable inheritance on a document
|
||||
class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
||||
dictionary::
|
||||
|
||||
# Will work with data in an existing collection named 'cmsPage'
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
meta = {
|
||||
'collection': 'cmsPage',
|
||||
'allow_inheritance': False,
|
||||
}
|
65
docs/guide/document-instances.rst
Normal file
65
docs/guide/document-instances.rst
Normal file
@@ -0,0 +1,65 @@
|
||||
===================
|
||||
Documents instances
|
||||
===================
|
||||
To create a new document object, create an instance of the relevant document
|
||||
class, providing values for its fields as its constructor keyword arguments.
|
||||
You may provide values for any of the fields on the document::
|
||||
|
||||
>>> page = Page(title="Test Page")
|
||||
>>> page.title
|
||||
'Test Page'
|
||||
|
||||
You may also assign values to the document's fields using standard object
|
||||
attribute syntax::
|
||||
|
||||
>>> page.title = "Example Page"
|
||||
>>> page.title
|
||||
'Example Page'
|
||||
|
||||
Saving and deleting documents
|
||||
-----------------------------
|
||||
To save the document to the database, call the
|
||||
:meth:`~mongoengine.Document.save` method. If the document does not exist in
|
||||
the database, it will be created. If it does already exist, it will be
|
||||
updated.
|
||||
|
||||
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
||||
Note that this will only work if the document exists in the database and has a
|
||||
valide :attr:`id`.
|
||||
|
||||
.. seealso::
|
||||
:ref:`guide-atomic-updates`
|
||||
|
||||
Document IDs
|
||||
------------
|
||||
Each document in the database has a unique id. This may be accessed through the
|
||||
:attr:`id` attribute on :class:`~mongoengine.Document` objects. Usually, the id
|
||||
will be generated automatically by the database server when the object is save,
|
||||
meaning that you may only access the :attr:`id` field once a document has been
|
||||
saved::
|
||||
|
||||
>>> page = Page(title="Test Page")
|
||||
>>> page.id
|
||||
>>> page.save()
|
||||
>>> page.id
|
||||
ObjectId('123456789abcdef000000000')
|
||||
|
||||
Alternatively, you may define one of your own fields to be the document's
|
||||
"primary key" by providing ``primary_key=True`` as a keyword argument to a
|
||||
field's constructor. Under the hood, MongoEngine will use this field as the
|
||||
:attr:`id`; in fact :attr:`id` is actually aliased to your primary key field so
|
||||
you may still use :attr:`id` to access the primary key if you want::
|
||||
|
||||
>>> class User(Document):
|
||||
... email = StringField(primary_key=True)
|
||||
... name = StringField()
|
||||
...
|
||||
>>> bob = User(email='bob@example.com', name='Bob')
|
||||
>>> bob.save()
|
||||
>>> bob.id == bob.email == 'bob@example.com'
|
||||
True
|
||||
|
||||
.. note::
|
||||
If you define your own primary key field, the field implicitly becomes
|
||||
required, so a :class:`ValidationError` will be thrown if you don't provide
|
||||
it.
|
12
docs/guide/index.rst
Normal file
12
docs/guide/index.rst
Normal file
@@ -0,0 +1,12 @@
|
||||
==========
|
||||
User Guide
|
||||
==========
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
installing
|
||||
connecting
|
||||
defining-documents
|
||||
document-instances
|
||||
querying
|
31
docs/guide/installing.rst
Normal file
31
docs/guide/installing.rst
Normal file
@@ -0,0 +1,31 @@
|
||||
======================
|
||||
Installing MongoEngine
|
||||
======================
|
||||
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
|
||||
and ensure it is running in an accessible location. You will also need
|
||||
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
||||
install MongoEngine using setuptools, then the dependencies will be handled for
|
||||
you.
|
||||
|
||||
MongoEngine is available on PyPI, so to use it you can use
|
||||
:program:`easy_install`:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# easy_install mongoengine
|
||||
|
||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# python setup.py install
|
||||
|
||||
To use the bleeding-edge version of MongoEngine, you can get the source from
|
||||
`GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# git clone git://github.com/hmarr/mongoengine
|
||||
# cd mongoengine
|
||||
# python setup.py install
|
196
docs/guide/querying.rst
Normal file
196
docs/guide/querying.rst
Normal file
@@ -0,0 +1,196 @@
|
||||
=====================
|
||||
Querying the database
|
||||
=====================
|
||||
:class:`~mongoengine.Document` classes have an :attr:`objects` attribute, which
|
||||
is used for accessing the objects in the database associated with the class.
|
||||
The :attr:`objects` attribute is actually a
|
||||
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
|
||||
a new :class:`~mongoengine.queryset.QuerySet` object on access. The
|
||||
:class:`~mongoengine.queryset.QuerySet` object may may be iterated over to
|
||||
fetch documents from the database::
|
||||
|
||||
# Prints out the names of all the users in the database
|
||||
for user in User.objects:
|
||||
print user.name
|
||||
|
||||
Filtering queries
|
||||
-----------------
|
||||
The query may be filtered by calling the
|
||||
:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword
|
||||
arguments. The keys in the keyword arguments correspond to fields on the
|
||||
:class:`~mongoengine.Document` you are querying::
|
||||
|
||||
# This will return a QuerySet that will only iterate over users whose
|
||||
# 'country' field is set to 'uk'
|
||||
uk_users = User.objects(country='uk')
|
||||
|
||||
Fields on embedded documents may also be referred to using field lookup syntax
|
||||
by using a double-underscore in place of the dot in object attribute access
|
||||
syntax::
|
||||
|
||||
# This will return a QuerySet that will only iterate over pages that have
|
||||
# been written by a user whose 'country' field is set to 'uk'
|
||||
uk_pages = Page.objects(author__country='uk')
|
||||
|
||||
Querying lists
|
||||
^^^^^^^^^^^^^^
|
||||
On most fields, this syntax will look up documents where the field specified
|
||||
matches the given value exactly, but when the field refers to a
|
||||
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
||||
lists that contain that item will be matched::
|
||||
|
||||
class Page(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
# This will match all pages that have the word 'coding' as an item in the
|
||||
# 'tags' list
|
||||
Page.objects(tags='coding')
|
||||
|
||||
Query operators
|
||||
---------------
|
||||
Operators other than equality may also be used in queries; just attach the
|
||||
operator name to a key with a double-underscore::
|
||||
|
||||
# Only find users whose age is 18 or less
|
||||
young_users = Users.objects(age__lte=18)
|
||||
|
||||
Available operators are as follows:
|
||||
|
||||
* ``neq`` -- not equal to
|
||||
* ``lt`` -- less than
|
||||
* ``lte`` -- less than or equal to
|
||||
* ``gt`` -- greater than
|
||||
* ``gte`` -- greater than or equal to
|
||||
* ``in`` -- value is in list (a list of values should be provided)
|
||||
* ``nin`` -- value is not in list (a list of values should be provided)
|
||||
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
|
||||
* ``all`` -- every item in array is in list of values provided
|
||||
* ``size`` -- the size of the array is
|
||||
* ``exists`` -- value for field exists
|
||||
|
||||
Limiting and skipping results
|
||||
-----------------------------
|
||||
Just as with traditional ORMs, you may limit the number of results returned, or
|
||||
skip a number or results in you query.
|
||||
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
||||
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
||||
:class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
|
||||
achieving this is using array-slicing syntax::
|
||||
|
||||
# Only the first 5 people
|
||||
users = User.objects[:5]
|
||||
|
||||
# All except for the first 5 people
|
||||
users = User.objects[5:]
|
||||
|
||||
# 5 users, starting from the 10th user found
|
||||
users = User.objects[10:15]
|
||||
|
||||
Aggregation
|
||||
-----------
|
||||
MongoDB provides some aggregation methods out of the box, but there are not as
|
||||
many as you typically get with an RDBMS. MongoEngine provides a wrapper around
|
||||
the built-in methods and provides some of its own, which are implemented as
|
||||
Javascript code that is executed on the database server.
|
||||
|
||||
Counting results
|
||||
^^^^^^^^^^^^^^^^
|
||||
Just as with limiting and skipping results, there is a method on
|
||||
:class:`~mongoengine.queryset.QuerySet` objects --
|
||||
:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic
|
||||
way of achieving this::
|
||||
|
||||
num_users = len(User.objects)
|
||||
|
||||
Further aggregation
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
You may sum over the values of a specific field on documents using
|
||||
:meth:`~mongoengine.queryset.QuerySet.sum`::
|
||||
|
||||
yearly_expense = Employee.objects.sum('salary')
|
||||
|
||||
.. note::
|
||||
If the field isn't present on a document, that document will be ignored from
|
||||
the sum.
|
||||
|
||||
To get the average (mean) of a field on a collection of documents, use
|
||||
:meth:`~mongoengine.queryset.QuerySet.average`::
|
||||
|
||||
mean_age = User.objects.average('age')
|
||||
|
||||
As MongoDB provides native lists, MongoEngine provides a helper method to get a
|
||||
dictionary of the frequencies of items in lists across an entire collection --
|
||||
:meth:`~mongoengine.queryset.QuerySet.item_frequencies`. An example of its use
|
||||
would be generating "tag-clouds"::
|
||||
|
||||
class Article(Document):
|
||||
tag = ListField(StringField())
|
||||
|
||||
# After adding some tagged articles...
|
||||
tag_freqs = Article.objects.item_frequencies('tag', normalize=True)
|
||||
|
||||
from operator import itemgetter
|
||||
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
||||
|
||||
Advanced queries
|
||||
----------------
|
||||
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
||||
arguments can't fully express the query you want to use -- for example if you
|
||||
need to combine a number of constraints using *and* and *or*. This is made
|
||||
possible in MongoEngine through the :class:`~mongoengine.queryset.Q` class.
|
||||
A :class:`~mongoengine.queryset.Q` object represents part of a query, and
|
||||
can be initialised using the same keyword-argument syntax you use to query
|
||||
documents. To build a complex query, you may combine
|
||||
:class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or)
|
||||
operators. To use :class:`~mongoengine.queryset.Q` objects, pass them in
|
||||
as positional arguments to :attr:`Document.objects` when you filter it by
|
||||
calling it with keyword arguments::
|
||||
|
||||
# Get published posts
|
||||
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))
|
||||
|
||||
# Get top posts
|
||||
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
|
||||
|
||||
.. warning::
|
||||
Only use these advanced queries if absolutely necessary as they will execute
|
||||
significantly slower than regular queries. This is because they are not
|
||||
natively supported by MongoDB -- they are compiled to Javascript and sent
|
||||
to the server for execution.
|
||||
|
||||
.. _guide-atomic-updates:
|
||||
|
||||
Atomic updates
|
||||
--------------
|
||||
Documents may be updated atomically by using the
|
||||
:meth:`~mongoengine.queryset.QuerySet.update_one` and
|
||||
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
|
||||
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
||||
that you may use with these methods:
|
||||
|
||||
* ``set`` -- set a particular value
|
||||
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
||||
* ``inc`` -- increment a value by a given amount
|
||||
* ``dec`` -- decrement a value by a given amount
|
||||
* ``push`` -- append a value to a list
|
||||
* ``push_all`` -- append several values to a list
|
||||
* ``pull`` -- remove a value from a list
|
||||
* ``pull_all`` -- remove several values from a list
|
||||
|
||||
The syntax for atomic updates is similar to the querying syntax, but the
|
||||
modifier comes before the field, not after it::
|
||||
|
||||
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
|
||||
>>> post.save()
|
||||
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
|
||||
>>> post.reload() # the document has been changed, so we need to reload it
|
||||
>>> post.page_views
|
||||
1
|
||||
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
|
||||
>>> post.reload()
|
||||
>>> post.title
|
||||
'Example Post'
|
||||
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
|
||||
>>> post.reload()
|
||||
>>> post.tags
|
||||
['database', 'nosql']
|
@@ -1,5 +1,6 @@
|
||||
==============================
|
||||
MongoEngine User Documentation
|
||||
=======================================
|
||||
==============================
|
||||
|
||||
MongoEngine is an Object-Document Mapper, written in Python for working with
|
||||
MongoDB. To install it, simply run
|
||||
@@ -14,8 +15,9 @@ The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_.
|
||||
:maxdepth: 2
|
||||
|
||||
tutorial
|
||||
userguide
|
||||
guide/index
|
||||
apireference
|
||||
django
|
||||
changelog
|
||||
|
||||
Indices and tables
|
||||
|
@@ -1,385 +0,0 @@
|
||||
==========
|
||||
User Guide
|
||||
==========
|
||||
|
||||
Installing
|
||||
==========
|
||||
MongoEngine is available on PyPI, so to use it you can use
|
||||
:program:`easy_install`
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# easy_install mongoengine
|
||||
|
||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# python setup.py install
|
||||
|
||||
.. _guide-connecting:
|
||||
|
||||
Connecting to MongoDB
|
||||
=====================
|
||||
To connect to a running instance of :program:`mongod`, use the
|
||||
:func:`~mongoengine.connect` function. The first argument is the name of the
|
||||
database to connect to. If the database does not exist, it will be created. If
|
||||
the database requires authentication, :attr:`username` and :attr:`password`
|
||||
arguments may be provided::
|
||||
|
||||
from mongoengine import connect
|
||||
connect('project1', username='webapp', password='pwd123')
|
||||
|
||||
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
||||
on **localhost** on port **27017**. If MongoDB is running elsewhere, you may
|
||||
provide :attr:`host` and :attr:`port` arguments to
|
||||
:func:`~mongoengine.connect`::
|
||||
|
||||
connect('project1', host='192.168.1.35', port=12345)
|
||||
|
||||
Defining documents
|
||||
==================
|
||||
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
||||
working with relational databases, rows are stored in **tables**, which have a
|
||||
strict **schema** that the rows follow. MongoDB stores documents in
|
||||
**collections** rather than tables - the principle difference is that no schema
|
||||
is enforced at a database level.
|
||||
|
||||
Defining a document's schema
|
||||
----------------------------
|
||||
MongoEngine allows you to define schemata for documents as this helps to reduce
|
||||
coding errors, and allows for utility methods to be defined on fields which may
|
||||
be present.
|
||||
|
||||
To define a schema for a document, create a class that inherits from
|
||||
:class:`~mongoengine.Document`. Fields are specified by adding **field
|
||||
objects** as class attributes to the document class::
|
||||
|
||||
from mongoengine import *
|
||||
import datetime
|
||||
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
date_modified = DateTimeField(default=datetime.now)
|
||||
|
||||
Fields
|
||||
------
|
||||
By default, fields are not required. To make a field mandatory, set the
|
||||
:attr:`required` keyword argument of a field to ``True``. Fields also may have
|
||||
validation constraints available (such as :attr:`max_length` in the example
|
||||
above). Fields may also take default values, which will be used if a value is
|
||||
not provided. Default values may optionally be a callable, which will be called
|
||||
to retrieve the value (such as in the above example). The field types available
|
||||
are as follows:
|
||||
|
||||
* :class:`~mongoengine.StringField`
|
||||
* :class:`~mongoengine.IntField`
|
||||
* :class:`~mongoengine.FloatField`
|
||||
* :class:`~mongoengine.DateTimeField`
|
||||
* :class:`~mongoengine.ListField`
|
||||
* :class:`~mongoengine.ObjectIdField`
|
||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.ReferenceField`
|
||||
|
||||
List fields
|
||||
^^^^^^^^^^^
|
||||
MongoDB allows the storage of lists of items. To add a list of items to a
|
||||
:class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field
|
||||
type. :class:`~mongoengine.ListField` takes another field object as its first
|
||||
argument, which specifies which type elements may be stored within the list::
|
||||
|
||||
class Page(Document):
|
||||
tags = ListField(StringField(max_length=50))
|
||||
|
||||
Embedded documents
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
MongoDB has the ability to embed documents within other documents. Schemata may
|
||||
be defined for these embedded documents, just as they may be for regular
|
||||
documents. To create an embedded document, just define a document as usual, but
|
||||
inherit from :class:`~mongoengine.EmbeddedDocument` rather than
|
||||
:class:`~mongoengine.Document`::
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
To embed the document within another document, use the
|
||||
:class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded
|
||||
document class as the first argument::
|
||||
|
||||
class Page(Document):
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
comment1 = Comment('Good work!')
|
||||
comment2 = Comment('Nice article!')
|
||||
page = Page(comments=[comment1, comment2])
|
||||
|
||||
Reference fields
|
||||
^^^^^^^^^^^^^^^^
|
||||
References may be stored to other documents in the database using the
|
||||
:class:`~mongoengine.ReferenceField`. Pass in another document class as the
|
||||
first argument to the constructor, then simply assign document objects to the
|
||||
field::
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class Page(Document):
|
||||
content = StringField()
|
||||
author = ReferenceField(User)
|
||||
|
||||
john = User(name="John Smith")
|
||||
john.save()
|
||||
|
||||
post = Page(content="Test Page")
|
||||
post.author = john
|
||||
post.save()
|
||||
|
||||
The :class:`User` object is automatically turned into a reference behind the
|
||||
scenes, and dereferenced when the :class:`Page` object is retrieved.
|
||||
|
||||
Document collections
|
||||
--------------------
|
||||
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
||||
will have their own **collection** in the database. The name of the collection
|
||||
is by default the name of the class, coverted to lowercase (so in the example
|
||||
above, the collection would be called `page`). If you need to change the name
|
||||
of the collection (e.g. to use MongoEngine with an existing database), then
|
||||
create a class dictionary attribute called :attr:`meta` on your document, and
|
||||
set :attr:`collection` to the name of the collection that you want your
|
||||
document class to use::
|
||||
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
meta = {'collection': 'cmsPage'}
|
||||
|
||||
Capped collections
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
|
||||
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
||||
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
||||
stored in the collection, and :attr:`max_size` is the maximum size of the
|
||||
collection in bytes. If :attr:`max_size` is not specified and
|
||||
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
|
||||
The following example shows a :class:`Log` document that will be limited to
|
||||
1000 entries and 2MB of disk space::
|
||||
|
||||
class Log(Document):
|
||||
ip_address = StringField()
|
||||
meta = {'max_documents': 1000, 'max_size': 2000000}
|
||||
|
||||
Document inheritance
|
||||
--------------------
|
||||
To create a specialised type of a :class:`~mongoengine.Document` you have
|
||||
defined, you may subclass it and add any extra fields or methods you may need.
|
||||
As this is new class is not a direct subclass of
|
||||
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
||||
will use the same collection as its superclass uses. This allows for more
|
||||
convenient and efficient retrieval of related documents::
|
||||
|
||||
# Stored in a collection named 'page'
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
|
||||
# Also stored in the collection named 'page'
|
||||
class DatedPage(Page):
|
||||
date = DateTimeField()
|
||||
|
||||
Working with existing data
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
To enable correct retrieval of documents involved in this kind of heirarchy,
|
||||
two extra attributes are stored on each document in the database: :attr:`_cls`
|
||||
and :attr:`_types`. These are hidden from the user through the MongoEngine
|
||||
interface, but may not be present if you are trying to use MongoEngine with
|
||||
an existing database. For this reason, you may disable this inheritance
|
||||
mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling
|
||||
you to work with existing databases. To disable inheritance on a document
|
||||
class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
||||
dictionary::
|
||||
|
||||
# Will work with data in an existing collection named 'cmsPage'
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
meta = {
|
||||
'collection': 'cmsPage',
|
||||
'allow_inheritance': False,
|
||||
}
|
||||
|
||||
Documents instances
|
||||
===================
|
||||
To create a new document object, create an instance of the relevant document
|
||||
class, providing values for its fields as its constructor keyword arguments.
|
||||
You may provide values for any of the fields on the document::
|
||||
|
||||
>>> page = Page(title="Test Page")
|
||||
>>> page.title
|
||||
'Test Page'
|
||||
|
||||
You may also assign values to the document's fields using standard object
|
||||
attribute syntax::
|
||||
|
||||
>>> page.title = "Example Page"
|
||||
>>> page.title
|
||||
'Example Page'
|
||||
|
||||
Saving and deleting documents
|
||||
-----------------------------
|
||||
To save the document to the database, call the
|
||||
:meth:`~mongoengine.Document.save` method. If the document does not exist in
|
||||
the database, it will be created. If it does already exist, it will be
|
||||
updated.
|
||||
|
||||
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
||||
Note that this will only work if the document exists in the database and has a
|
||||
valide :attr:`id`.
|
||||
|
||||
Document IDs
|
||||
------------
|
||||
Each document in the database has a unique id. This may be accessed through the
|
||||
:attr:`id` attribute on :class:`~mongoengine.Document` objects. Usually, the id
|
||||
will be generated automatically by the database server when the object is save,
|
||||
meaning that you may only access the :attr:`id` field once a document has been
|
||||
saved::
|
||||
|
||||
>>> page = Page(title="Test Page")
|
||||
>>> page.id
|
||||
>>> page.save()
|
||||
>>> page.id
|
||||
ObjectId('123456789abcdef000000000')
|
||||
|
||||
Alternatively, you may explicitly set the :attr:`id` before you save the
|
||||
document, but the id must be a valid PyMongo :class:`ObjectId`.
|
||||
|
||||
Querying the database
|
||||
=====================
|
||||
:class:`~mongoengine.Document` classes have an :attr:`objects` attribute, which
|
||||
is used for accessing the objects in the database associated with the class.
|
||||
The :attr:`objects` attribute is actually a
|
||||
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
|
||||
a new :class:`~mongoengine.queryset.QuerySet` object on access. The
|
||||
:class:`~mongoengine.queryset.QuerySet` object may may be iterated over to
|
||||
fetch documents from the database::
|
||||
|
||||
# Prints out the names of all the users in the database
|
||||
for user in User.objects:
|
||||
print user.name
|
||||
|
||||
Filtering queries
|
||||
-----------------
|
||||
The query may be filtered by calling the
|
||||
:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword
|
||||
arguments. The keys in the keyword arguments correspond to fields on the
|
||||
:class:`~mongoengine.Document` you are querying::
|
||||
|
||||
# This will return a QuerySet that will only iterate over users whose
|
||||
# 'country' field is set to 'uk'
|
||||
uk_users = User.objects(country='uk')
|
||||
|
||||
Fields on embedded documents may also be referred to using field lookup syntax
|
||||
by using a double-underscore in place of the dot in object attribute access
|
||||
syntax::
|
||||
|
||||
# This will return a QuerySet that will only iterate over pages that have
|
||||
# been written by a user whose 'country' field is set to 'uk'
|
||||
uk_pages = Page.objects(author__country='uk')
|
||||
|
||||
Querying lists
|
||||
^^^^^^^^^^^^^^
|
||||
On most fields, this syntax will look up documents where the field specified
|
||||
matches the given value exactly, but when the field refers to a
|
||||
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
||||
lists that contain that item will be matched::
|
||||
|
||||
class Page(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
# This will match all pages that have the word 'coding' as an item in the
|
||||
# 'tags' list
|
||||
Page.objects(tags='coding')
|
||||
|
||||
Query operators
|
||||
---------------
|
||||
Operators other than equality may also be used in queries; just attach the
|
||||
operator name to a key with a double-underscore::
|
||||
|
||||
# Only find users whose age is 18 or less
|
||||
young_users = Users.objects(age__lte=18)
|
||||
|
||||
Available operators are as follows:
|
||||
|
||||
* ``neq`` -- not equal to
|
||||
* ``lt`` -- less than
|
||||
* ``lte`` -- less than or equal to
|
||||
* ``gt`` -- greater than
|
||||
* ``gte`` -- greater than or equal to
|
||||
* ``in`` -- value is in list (a list of values should be provided)
|
||||
* ``nin`` -- value is not in list (a list of values should be provided)
|
||||
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
|
||||
* ``all`` -- every item in array is in list of values provided
|
||||
* ``size`` -- the size of the array is
|
||||
* ``exists`` -- value for field exists
|
||||
|
||||
Limiting and skipping results
|
||||
-----------------------------
|
||||
Just as with traditional ORMs, you may limit the number of results returned, or
|
||||
skip a number or results in you query.
|
||||
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
||||
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
||||
:class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
|
||||
achieving this is using array-slicing syntax::
|
||||
|
||||
# Only the first 5 people
|
||||
users = User.objects[:5]
|
||||
|
||||
# All except for the first 5 people
|
||||
users = User.objects[5:]
|
||||
|
||||
# 5 users, starting from the 10th user found
|
||||
users = User.objects[10:15]
|
||||
|
||||
Aggregation
|
||||
-----------
|
||||
MongoDB provides some aggregation methods out of the box, but there are not as
|
||||
many as you typically get with an RDBMS. MongoEngine provides a wrapper around
|
||||
the built-in methods and provides some of its own, which are implemented as
|
||||
Javascript code that is executed on the database server.
|
||||
|
||||
Counting results
|
||||
^^^^^^^^^^^^^^^^
|
||||
Just as with limiting and skipping results, there is a method on
|
||||
:class:`~mongoengine.queryset.QuerySet` objects --
|
||||
:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic
|
||||
way of achieving this::
|
||||
|
||||
num_users = len(User.objects)
|
||||
|
||||
Further aggregation
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
You may sum over the values of a specific field on documents using
|
||||
:meth:`~mongoengine.queryset.QuerySet.sum`::
|
||||
|
||||
yearly_expense = Employee.objects.sum('salary')
|
||||
|
||||
.. note::
|
||||
If the field isn't present on a document, that document will be ignored from
|
||||
the sum.
|
||||
|
||||
To get the average (mean) of a field on a collection of documents, use
|
||||
:meth:`~mongoengine.queryset.QuerySet.average`::
|
||||
|
||||
mean_age = User.objects.average('age')
|
||||
|
||||
As MongoDB provides native lists, MongoEngine provides a helper method to get a
|
||||
dictionary of the frequencies of items in lists across an entire collection --
|
||||
:meth:`~mongoengine.queryset.QuerySet.item_frequencies`. An example of its use
|
||||
would be generating "tag-clouds"::
|
||||
|
||||
class Article(Document):
|
||||
tag = ListField(StringField())
|
||||
|
||||
# After adding some tagged articles...
|
||||
tag_freqs = Article.objects.item_frequencies('tag', normalize=True)
|
||||
|
||||
from operator import itemgetter
|
||||
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
||||
|
@@ -12,7 +12,7 @@ __all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||
|
||||
__author__ = 'Harry Marr'
|
||||
|
||||
VERSION = (0, 1, 2)
|
||||
VERSION = (0, 2, 2)
|
||||
|
||||
def get_version():
|
||||
version = '%s.%s' % (VERSION[0], VERSION[1])
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from queryset import QuerySetManager
|
||||
from queryset import QuerySet, QuerySetManager
|
||||
|
||||
import pymongo
|
||||
|
||||
@@ -11,11 +11,18 @@ class BaseField(object):
|
||||
"""A base class for fields in a MongoDB document. Instances of this class
|
||||
may be added to subclasses of `Document` to define a document's schema.
|
||||
"""
|
||||
|
||||
# Fields may have _types inserted into indexes by default
|
||||
_index_with_types = True
|
||||
|
||||
def __init__(self, name=None, required=False, default=None):
|
||||
self.name = name
|
||||
self.required = required
|
||||
def __init__(self, name=None, required=False, default=None, unique=False,
|
||||
unique_with=None, primary_key=False):
|
||||
self.name = name if not primary_key else '_id'
|
||||
self.required = required or primary_key
|
||||
self.default = default
|
||||
self.unique = bool(unique or unique_with)
|
||||
self.unique_with = unique_with
|
||||
self.primary_key = primary_key
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
"""Descriptor for retrieving a value from a field in a document. Do
|
||||
@@ -65,11 +72,11 @@ class ObjectIdField(BaseField):
|
||||
"""
|
||||
|
||||
def to_python(self, value):
|
||||
return str(value)
|
||||
return unicode(value)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if not isinstance(value, pymongo.objectid.ObjectId):
|
||||
return pymongo.objectid.ObjectId(value)
|
||||
return pymongo.objectid.ObjectId(str(value))
|
||||
return value
|
||||
|
||||
def prepare_query_value(self, value):
|
||||
@@ -136,6 +143,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
collection = name.lower()
|
||||
|
||||
simple_class = True
|
||||
id_field = None
|
||||
base_indexes = []
|
||||
|
||||
# Subclassed documents inherit collection from superclass
|
||||
for base in bases:
|
||||
if hasattr(base, '_meta') and 'collection' in base._meta:
|
||||
@@ -149,13 +159,22 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
simple_class = False
|
||||
collection = base._meta['collection']
|
||||
|
||||
id_field = id_field or base._meta.get('id_field')
|
||||
base_indexes += base._meta.get('indexes', [])
|
||||
|
||||
meta = {
|
||||
'collection': collection,
|
||||
'allow_inheritance': True,
|
||||
'max_documents': None,
|
||||
'max_size': None,
|
||||
'ordering': [], # default ordering applied at runtime
|
||||
'indexes': [], # indexes to be ensured at runtime
|
||||
'id_field': id_field,
|
||||
}
|
||||
|
||||
# Apply document-defined meta options
|
||||
meta.update(attrs.get('meta', {}))
|
||||
|
||||
# Only simple classes - direct subclasses of Document - may set
|
||||
# allow_inheritance to False
|
||||
if not simple_class and not meta['allow_inheritance']:
|
||||
@@ -163,13 +182,59 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
'"allow_inheritance" to False')
|
||||
attrs['_meta'] = meta
|
||||
|
||||
attrs['id'] = ObjectIdField(name='_id')
|
||||
|
||||
# Set up collection manager, needs the class to have fields so use
|
||||
# DocumentMetaclass before instantiating CollectionManager object
|
||||
new_class = super_new(cls, name, bases, attrs)
|
||||
new_class.objects = QuerySetManager()
|
||||
|
||||
user_indexes = [QuerySet._build_index_spec(new_class, spec)
|
||||
for spec in meta['indexes']] + base_indexes
|
||||
new_class._meta['indexes'] = user_indexes
|
||||
|
||||
unique_indexes = []
|
||||
for field_name, field in new_class._fields.items():
|
||||
# Generate a list of indexes needed by uniqueness constraints
|
||||
if field.unique:
|
||||
field.required = True
|
||||
unique_fields = [field_name]
|
||||
|
||||
# Add any unique_with fields to the back of the index spec
|
||||
if field.unique_with:
|
||||
if isinstance(field.unique_with, basestring):
|
||||
field.unique_with = [field.unique_with]
|
||||
|
||||
# Convert unique_with field names to real field names
|
||||
unique_with = []
|
||||
for other_name in field.unique_with:
|
||||
parts = other_name.split('.')
|
||||
# Lookup real name
|
||||
parts = QuerySet._lookup_field(new_class, parts)
|
||||
name_parts = [part.name for part in parts]
|
||||
unique_with.append('.'.join(name_parts))
|
||||
# Unique field should be required
|
||||
parts[-1].required = True
|
||||
unique_fields += unique_with
|
||||
|
||||
# Add the new index to the list
|
||||
index = [(f, pymongo.ASCENDING) for f in unique_fields]
|
||||
unique_indexes.append(index)
|
||||
|
||||
# Check for custom primary key
|
||||
if field.primary_key:
|
||||
if not new_class._meta['id_field']:
|
||||
new_class._meta['id_field'] = field_name
|
||||
# Make 'Document.id' an alias to the real primary key field
|
||||
new_class.id = field
|
||||
#new_class._fields['id'] = field
|
||||
else:
|
||||
raise ValueError('Cannot override primary key field')
|
||||
|
||||
new_class._meta['unique_indexes'] = unique_indexes
|
||||
|
||||
if not new_class._meta['id_field']:
|
||||
new_class._meta['id_field'] = 'id'
|
||||
new_class.id = new_class._fields['id'] = ObjectIdField(name='_id')
|
||||
|
||||
return new_class
|
||||
|
||||
|
||||
@@ -186,6 +251,25 @@ class BaseDocument(object):
|
||||
value = getattr(self, attr_name, None)
|
||||
setattr(self, attr_name, value)
|
||||
|
||||
def validate(self):
|
||||
"""Ensure that all fields' values are valid and that required fields
|
||||
are present.
|
||||
"""
|
||||
# Get a list of tuples of field names and their current values
|
||||
fields = [(field, getattr(self, name))
|
||||
for name, field in self._fields.items()]
|
||||
|
||||
# Ensure that each field is matched to a valid value
|
||||
for field, value in fields:
|
||||
if value is not None:
|
||||
try:
|
||||
field.validate(value)
|
||||
except (ValueError, AttributeError, AssertionError), e:
|
||||
raise ValidationError('Invalid value for field of type "' +
|
||||
field.__class__.__name__ + '"')
|
||||
elif field.required:
|
||||
raise ValidationError('Field "%s" is required' % field.name)
|
||||
|
||||
@classmethod
|
||||
def _get_subclasses(cls):
|
||||
"""Return a dictionary of all subclasses (found recursively).
|
||||
@@ -202,9 +286,7 @@ class BaseDocument(object):
|
||||
return all_subclasses
|
||||
|
||||
def __iter__(self):
|
||||
# Use _data rather than _fields as iterator only looks at names so
|
||||
# values don't need to be converted to Python types
|
||||
return iter(self._data)
|
||||
return iter(self._fields)
|
||||
|
||||
def __getitem__(self, name):
|
||||
"""Dictionary-style field access, return a field's value if present.
|
||||
@@ -234,6 +316,18 @@ class BaseDocument(object):
|
||||
def __len__(self):
|
||||
return len(self._data)
|
||||
|
||||
def __repr__(self):
|
||||
try:
|
||||
u = unicode(self)
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
u = '[Bad Unicode data]'
|
||||
return u'<%s: %s>' % (self.__class__.__name__, u)
|
||||
|
||||
def __str__(self):
|
||||
if hasattr(self, '__unicode__'):
|
||||
return unicode(self).encode('utf-8')
|
||||
return '%s object' % self.__class__.__name__
|
||||
|
||||
def to_mongo(self):
|
||||
"""Return data dictionary ready for use with MongoDB.
|
||||
"""
|
||||
|
0
mongoengine/django/__init__.py
Normal file
0
mongoengine/django/__init__.py
Normal file
99
mongoengine/django/auth.py
Normal file
99
mongoengine/django/auth.py
Normal file
@@ -0,0 +1,99 @@
|
||||
from mongoengine import *
|
||||
|
||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||
from django.utils.encoding import smart_str
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
|
||||
import datetime
|
||||
|
||||
REDIRECT_FIELD_NAME = 'next'
|
||||
|
||||
def get_hexdigest(algorithm, salt, raw_password):
|
||||
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||
if algorithm == 'md5':
|
||||
return md5_constructor(salt + raw_password).hexdigest()
|
||||
elif algorithm == 'sha1':
|
||||
return sha_constructor(salt + raw_password).hexdigest()
|
||||
raise ValueError('Got unknown password algorithm type in password')
|
||||
|
||||
|
||||
class User(Document):
|
||||
"""A User document that aims to mirror most of the API specified by Django
|
||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||
"""
|
||||
username = StringField(max_length=30, required=True)
|
||||
first_name = StringField(max_length=30)
|
||||
last_name = StringField(max_length=30)
|
||||
email = StringField()
|
||||
password = StringField(max_length=128)
|
||||
is_staff = BooleanField(default=False)
|
||||
is_active = BooleanField(default=True)
|
||||
is_superuser = BooleanField(default=False)
|
||||
last_login = DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
def get_full_name(self):
|
||||
"""Returns the users first and last names, separated by a space.
|
||||
"""
|
||||
full_name = u'%s %s' % (self.first_name or '', self.last_name or '')
|
||||
return full_name.strip()
|
||||
|
||||
def is_anonymous(self):
|
||||
return False
|
||||
|
||||
def is_authenticated(self):
|
||||
return True
|
||||
|
||||
def set_password(self, raw_password):
|
||||
"""Sets the user's password - always use this rather than directly
|
||||
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
||||
password is hashed before storage.
|
||||
"""
|
||||
from random import random
|
||||
algo = 'sha1'
|
||||
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||
hash = get_hexdigest(algo, salt, raw_password)
|
||||
self.password = '%s$%s$%s' % (algo, salt, hash)
|
||||
|
||||
def check_password(self, raw_password):
|
||||
"""Checks the user's password against a provided password - always use
|
||||
this rather than directly comparing to
|
||||
:attr:`~mongoengine.django.auth.User.password` as the password is
|
||||
hashed before storage.
|
||||
"""
|
||||
algo, salt, hash = self.password.split('$')
|
||||
return hash == get_hexdigest(algo, salt, raw_password)
|
||||
|
||||
@classmethod
|
||||
def create_user(cls, username, password, email=None):
|
||||
"""Create (and save) a new user with the given username, password and
|
||||
email address.
|
||||
"""
|
||||
user = User(username=username, email=email)
|
||||
user.set_password(password)
|
||||
user.save()
|
||||
return user
|
||||
|
||||
|
||||
class MongoEngineBackend(object):
|
||||
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
||||
"""
|
||||
|
||||
def authenticate(self, username=None, password=None):
|
||||
user = User.objects(username=username).first()
|
||||
if user:
|
||||
if password and user.check_password(password):
|
||||
return user
|
||||
return None
|
||||
|
||||
def get_user(self, user_id):
|
||||
return User.objects.with_id(user_id)
|
||||
|
||||
|
||||
def get_user(userid):
|
||||
"""Returns a User object from an id (User.id). Django's equivalent takes
|
||||
request, but taking an id instead leaves it up to the developer to store
|
||||
the id in any way they want (session, signed cookie, etc.)
|
||||
"""
|
||||
if not userid:
|
||||
return AnonymousUser()
|
||||
return MongoEngineBackend().get_user(userid) or AnonymousUser()
|
63
mongoengine/django/sessions.py
Normal file
63
mongoengine/django/sessions.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
from django.utils.encoding import force_unicode
|
||||
|
||||
from mongoengine.document import Document
|
||||
from mongoengine import fields
|
||||
from mongoengine.queryset import OperationError
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class MongoSession(Document):
|
||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||
session_data = fields.StringField()
|
||||
expire_date = fields.DateTimeField()
|
||||
|
||||
meta = {'collection': 'django_session', 'allow_inheritance': False}
|
||||
|
||||
|
||||
class SessionStore(SessionBase):
|
||||
"""A MongoEngine-based session store for Django.
|
||||
"""
|
||||
|
||||
def load(self):
|
||||
try:
|
||||
s = MongoSession.objects(session_key=self.session_key,
|
||||
expire_date__gt=datetime.now())[0]
|
||||
return self.decode(force_unicode(s.session_data))
|
||||
except (IndexError, SuspiciousOperation):
|
||||
self.create()
|
||||
return {}
|
||||
|
||||
def exists(self, session_key):
|
||||
return bool(MongoSession.objects(session_key=session_key).first())
|
||||
|
||||
def create(self):
|
||||
while True:
|
||||
self.session_key = self._get_new_session_key()
|
||||
try:
|
||||
self.save(must_create=True)
|
||||
except CreateError:
|
||||
continue
|
||||
self.modified = True
|
||||
self._session_cache = {}
|
||||
return
|
||||
|
||||
def save(self, must_create=False):
|
||||
s = MongoSession(session_key=self.session_key)
|
||||
s.session_data = self.encode(self._get_session(no_load=must_create))
|
||||
s.expire_date = self.get_expiry_date()
|
||||
try:
|
||||
s.save(force_insert=must_create, safe=True)
|
||||
except OperationError:
|
||||
if must_create:
|
||||
raise CreateError
|
||||
raise
|
||||
|
||||
def delete(self, session_key=None):
|
||||
if session_key is None:
|
||||
if self.session_key is None:
|
||||
return
|
||||
session_key = self.session_key
|
||||
MongoSession.objects(session_key=session_key).delete()
|
@@ -1,9 +1,12 @@
|
||||
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
||||
ValidationError)
|
||||
from queryset import OperationError
|
||||
from connection import _get_db
|
||||
|
||||
import pymongo
|
||||
|
||||
__all__ = ['Document', 'EmbeddedDocument']
|
||||
|
||||
__all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError']
|
||||
|
||||
|
||||
class EmbeddedDocument(BaseDocument):
|
||||
@@ -44,53 +47,65 @@ class Document(BaseDocument):
|
||||
maximum size of the collection in bytes. If :attr:`max_size` is not
|
||||
specified and :attr:`max_documents` is, :attr:`max_size` defaults to
|
||||
10000000 bytes (10MB).
|
||||
|
||||
Indexes may be created by specifying :attr:`indexes` in the :attr:`meta`
|
||||
dictionary. The value should be a list of field names or tuples of field
|
||||
names. Index direction may be specified by prefixing the field names with
|
||||
a **+** or **-** sign.
|
||||
"""
|
||||
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
|
||||
def save(self):
|
||||
def save(self, safe=True, force_insert=False):
|
||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||
document already exists, it will be updated, otherwise it will be
|
||||
created.
|
||||
|
||||
If ``safe=True`` and the operation is unsuccessful, an
|
||||
:class:`~mongoengine.OperationError` will be raised.
|
||||
|
||||
:param safe: check if the operation succeeded before returning
|
||||
:param force_insert: only try to create a new document, don't allow
|
||||
updates of existing documents
|
||||
"""
|
||||
self.validate()
|
||||
object_id = self.__class__.objects._collection.save(self.to_mongo())
|
||||
self.id = self._fields['id'].to_python(object_id)
|
||||
doc = self.to_mongo()
|
||||
try:
|
||||
collection = self.__class__.objects._collection
|
||||
if force_insert:
|
||||
object_id = collection.insert(doc, safe=safe)
|
||||
else:
|
||||
object_id = collection.save(doc, safe=safe)
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
message = 'Could not save document (%s)'
|
||||
if 'duplicate key' in str(err):
|
||||
message = 'Tried to save duplicate unique keys (%s)'
|
||||
raise OperationError(message % str(err))
|
||||
id_field = self._meta['id_field']
|
||||
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||
|
||||
def delete(self):
|
||||
def delete(self, safe=False):
|
||||
"""Delete the :class:`~mongoengine.Document` from the database. This
|
||||
will only take effect if the document has been previously saved.
|
||||
|
||||
:param safe: check if the operation succeeded before returning
|
||||
"""
|
||||
object_id = self._fields['id'].to_mongo(self.id)
|
||||
self.__class__.objects(id=object_id).delete()
|
||||
id_field = self._meta['id_field']
|
||||
object_id = self._fields[id_field].to_mongo(self[id_field])
|
||||
try:
|
||||
self.__class__.objects(**{id_field: object_id}).delete(safe=safe)
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
raise OperationError('Could not delete document (%s)' % str(err))
|
||||
|
||||
def reload(self):
|
||||
"""Reloads all attributes from the database.
|
||||
|
||||
.. versionadded:: 0.1.2
|
||||
"""
|
||||
#object_id = self._fields['id'].to_mongo(self.id)
|
||||
#obj = self.__class__.objects(id=object_id).first()
|
||||
obj = self.__class__.objects(id=self.id).first()
|
||||
id_field = self._meta['id_field']
|
||||
obj = self.__class__.objects(**{id_field: self[id_field]}).first()
|
||||
for field in self._fields:
|
||||
setattr(self, field, getattr(obj, field))
|
||||
|
||||
def validate(self):
|
||||
"""Ensure that all fields' values are valid and that required fields
|
||||
are present.
|
||||
"""
|
||||
# Get a list of tuples of field names and their current values
|
||||
fields = [(field, getattr(self, name))
|
||||
for name, field in self._fields.items()]
|
||||
|
||||
# Ensure that each field is matched to a valid value
|
||||
for field, value in fields:
|
||||
if value is not None:
|
||||
try:
|
||||
field.validate(value)
|
||||
except (ValueError, AttributeError, AssertionError), e:
|
||||
raise ValidationError('Invalid value for field of type "' +
|
||||
field.__class__.__name__ + '"')
|
||||
elif field.required:
|
||||
raise ValidationError('Field "%s" is required' % field.name)
|
||||
setattr(self, field, obj[field])
|
||||
|
||||
@classmethod
|
||||
def drop_collection(cls):
|
||||
|
@@ -82,6 +82,8 @@ class FloatField(BaseField):
|
||||
|
||||
class BooleanField(BaseField):
|
||||
"""A boolean field type.
|
||||
|
||||
.. versionadded:: 0.1.2
|
||||
"""
|
||||
|
||||
def to_python(self, value):
|
||||
@@ -131,12 +133,18 @@ class EmbeddedDocumentField(BaseField):
|
||||
def lookup_member(self, member_name):
|
||||
return self.document._fields.get(member_name)
|
||||
|
||||
def prepare_query_value(self, value):
|
||||
return self.to_mongo(value)
|
||||
|
||||
|
||||
class ListField(BaseField):
|
||||
"""A list field that wraps a standard field, allowing multiple instances
|
||||
of the field to be used as a list in the database.
|
||||
"""
|
||||
|
||||
# ListFields cannot be indexed with _types - MongoDB doesn't support this
|
||||
_index_with_types = False
|
||||
|
||||
def __init__(self, field, **kwargs):
|
||||
if not isinstance(field, BaseField):
|
||||
raise ValidationError('Argument to ListField constructor must be '
|
||||
@@ -163,6 +171,9 @@ class ListField(BaseField):
|
||||
raise ValidationError('All items in a list field must be of the '
|
||||
'specified type')
|
||||
|
||||
def prepare_query_value(self, value):
|
||||
return self.field.to_mongo(value)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return self.field.lookup_member(member_name)
|
||||
|
||||
|
@@ -1,15 +1,106 @@
|
||||
from connection import _get_db
|
||||
|
||||
import pymongo
|
||||
import copy
|
||||
|
||||
|
||||
__all__ = ['queryset_manager', 'InvalidQueryError', 'InvalidCollectionError']
|
||||
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
|
||||
'InvalidCollectionError']
|
||||
|
||||
# The maximum number of items to display in a QuerySet.__repr__
|
||||
REPR_OUTPUT_SIZE = 20
|
||||
|
||||
|
||||
class InvalidQueryError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class OperationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Q(object):
|
||||
|
||||
OR = '||'
|
||||
AND = '&&'
|
||||
OPERATORS = {
|
||||
'eq': 'this.%(field)s == %(value)s',
|
||||
'neq': 'this.%(field)s != %(value)s',
|
||||
'gt': 'this.%(field)s > %(value)s',
|
||||
'gte': 'this.%(field)s >= %(value)s',
|
||||
'lt': 'this.%(field)s < %(value)s',
|
||||
'lte': 'this.%(field)s <= %(value)s',
|
||||
'lte': 'this.%(field)s <= %(value)s',
|
||||
'in': 'this.%(field)s.indexOf(%(value)s) != -1',
|
||||
'nin': 'this.%(field)s.indexOf(%(value)s) == -1',
|
||||
'mod': '%(field)s %% %(value)s',
|
||||
'all': ('%(value)s.every(function(a){'
|
||||
'return this.%(field)s.indexOf(a) != -1 })'),
|
||||
'size': 'this.%(field)s.length == %(value)s',
|
||||
'exists': 'this.%(field)s != null',
|
||||
}
|
||||
|
||||
def __init__(self, **query):
|
||||
self.query = [query]
|
||||
|
||||
def _combine(self, other, op):
|
||||
obj = Q()
|
||||
obj.query = ['('] + copy.deepcopy(self.query) + [op]
|
||||
obj.query += copy.deepcopy(other.query) + [')']
|
||||
return obj
|
||||
|
||||
def __or__(self, other):
|
||||
return self._combine(other, self.OR)
|
||||
|
||||
def __and__(self, other):
|
||||
return self._combine(other, self.AND)
|
||||
|
||||
def as_js(self, document):
|
||||
js = []
|
||||
js_scope = {}
|
||||
for i, item in enumerate(self.query):
|
||||
if isinstance(item, dict):
|
||||
item_query = QuerySet._transform_query(document, **item)
|
||||
# item_query will values will either be a value or a dict
|
||||
js.append(self._item_query_as_js(item_query, js_scope, i))
|
||||
else:
|
||||
js.append(item)
|
||||
return pymongo.code.Code(' '.join(js), js_scope)
|
||||
|
||||
def _item_query_as_js(self, item_query, js_scope, item_num):
|
||||
# item_query will be in one of the following forms
|
||||
# {'age': 25, 'name': 'Test'}
|
||||
# {'age': {'$lt': 25}, 'name': {'$in': ['Test', 'Example']}
|
||||
# {'age': {'$lt': 25, '$gt': 18}}
|
||||
js = []
|
||||
for i, (key, value) in enumerate(item_query.items()):
|
||||
op = 'eq'
|
||||
# Construct a variable name for the value in the JS
|
||||
value_name = 'i%sf%s' % (item_num, i)
|
||||
if isinstance(value, dict):
|
||||
# Multiple operators for this field
|
||||
for j, (op, value) in enumerate(value.items()):
|
||||
# Create a custom variable name for this operator
|
||||
op_value_name = '%so%s' % (value_name, j)
|
||||
# Update the js scope with the value for this op
|
||||
js_scope[op_value_name] = value
|
||||
# Construct the JS that uses this op
|
||||
operation_js = Q.OPERATORS[op.strip('$')] % {
|
||||
'field': key,
|
||||
'value': op_value_name
|
||||
}
|
||||
js.append(operation_js)
|
||||
else:
|
||||
js_scope[value_name] = value
|
||||
# Construct the JS for this field
|
||||
field_js = Q.OPERATORS[op.strip('$')] % {
|
||||
'field': key,
|
||||
'value': value_name
|
||||
}
|
||||
js.append(field_js)
|
||||
return ' && '.join(js)
|
||||
|
||||
|
||||
class QuerySet(object):
|
||||
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
||||
providing :class:`~mongoengine.Document` objects as the results.
|
||||
@@ -17,39 +108,117 @@ class QuerySet(object):
|
||||
|
||||
def __init__(self, document, collection):
|
||||
self._document = document
|
||||
self._collection = collection
|
||||
self._collection_obj = collection
|
||||
self._accessed_collection = False
|
||||
self._query = {}
|
||||
self._where_clauses = []
|
||||
|
||||
# If inheritance is allowed, only return instances and instances of
|
||||
# subclasses of the class being used
|
||||
if document._meta.get('allow_inheritance'):
|
||||
self._query = {'_types': self._document._class_name}
|
||||
self._cursor_obj = None
|
||||
|
||||
def ensure_index(self, key_or_list, direction=None):
|
||||
def ensure_index(self, key_or_list):
|
||||
"""Ensure that the given indexes are in place.
|
||||
|
||||
:param key_or_list: a single index key or a list of index keys (to
|
||||
construct a multi-field index); keys may be prefixed with a **+**
|
||||
or a **-** to determine the index ordering
|
||||
"""
|
||||
if isinstance(key_or_list, basestring):
|
||||
# single-field indexes needn't specify a direction
|
||||
if key_or_list.startswith("-"):
|
||||
key_or_list = key_or_list[1:]
|
||||
self._collection.ensure_index(key_or_list)
|
||||
elif isinstance(key_or_list, (list, tuple)):
|
||||
print key_or_list
|
||||
self._collection.ensure_index(key_or_list)
|
||||
index_list = QuerySet._build_index_spec(self._document, key_or_list)
|
||||
self._collection.ensure_index(index_list)
|
||||
return self
|
||||
|
||||
def __call__(self, **query):
|
||||
"""Filter the selected documents by calling the
|
||||
:class:`~mongoengine.QuerySet` with a query.
|
||||
@classmethod
|
||||
def _build_index_spec(cls, doc_cls, key_or_list):
|
||||
"""Build a PyMongo index spec from a MongoEngine index spec.
|
||||
"""
|
||||
if isinstance(key_or_list, basestring):
|
||||
key_or_list = [key_or_list]
|
||||
|
||||
index_list = []
|
||||
use_types = doc_cls._meta.get('allow_inheritance', True)
|
||||
for key in key_or_list:
|
||||
# Get direction from + or -
|
||||
direction = pymongo.ASCENDING
|
||||
if key.startswith("-"):
|
||||
direction = pymongo.DESCENDING
|
||||
if key.startswith(("+", "-")):
|
||||
key = key[1:]
|
||||
|
||||
# Use real field name, do it manually because we need field
|
||||
# objects for the next part (list field checking)
|
||||
parts = key.split('.')
|
||||
fields = QuerySet._lookup_field(doc_cls, parts)
|
||||
parts = [field.name for field in fields]
|
||||
key = '.'.join(parts)
|
||||
index_list.append((key, direction))
|
||||
|
||||
# Check if a list field is being used, don't use _types if it is
|
||||
if use_types and not all(f._index_with_types for f in fields):
|
||||
use_types = False
|
||||
|
||||
# If _types is being used, prepend it to every specified index
|
||||
if doc_cls._meta.get('allow_inheritance') and use_types:
|
||||
index_list.insert(0, ('_types', 1))
|
||||
|
||||
return index_list
|
||||
|
||||
def __call__(self, *q_objs, **query):
|
||||
"""Filter the selected documents by calling the
|
||||
:class:`~mongoengine.queryset.QuerySet` with a query.
|
||||
|
||||
:param q_objs: :class:`~mongoengine.queryset.Q` objects to be used in
|
||||
the query
|
||||
:param query: Django-style query keyword arguments
|
||||
"""
|
||||
for q in q_objs:
|
||||
self._where_clauses.append(q.as_js(self._document))
|
||||
query = QuerySet._transform_query(_doc_cls=self._document, **query)
|
||||
self._query.update(query)
|
||||
return self
|
||||
|
||||
def filter(self, *q_objs, **query):
|
||||
"""An alias of :meth:`~mongoengine.queryset.QuerySet.__call__`
|
||||
"""
|
||||
return self.__call__(*q_objs, **query)
|
||||
|
||||
@property
|
||||
def _collection(self):
|
||||
"""Property that returns the collection object. This allows us to
|
||||
perform operations only if the collection is accessed.
|
||||
"""
|
||||
if not self._accessed_collection:
|
||||
self._accessed_collection = True
|
||||
|
||||
# Ensure document-defined indexes are created
|
||||
if self._document._meta['indexes']:
|
||||
for key_or_list in self._document._meta['indexes']:
|
||||
#self.ensure_index(key_or_list)
|
||||
self._collection.ensure_index(key_or_list)
|
||||
|
||||
# Ensure indexes created by uniqueness constraints
|
||||
for index in self._document._meta['unique_indexes']:
|
||||
self._collection.ensure_index(index, unique=True)
|
||||
|
||||
# If _types is being used (for polymorphism), it needs an index
|
||||
if '_types' in self._query:
|
||||
self._collection.ensure_index('_types')
|
||||
return self._collection_obj
|
||||
|
||||
@property
|
||||
def _cursor(self):
|
||||
if not self._cursor_obj:
|
||||
self._cursor_obj = self._collection.find(self._query)
|
||||
# Apply where clauses to cursor
|
||||
for js in self._where_clauses:
|
||||
self._cursor_obj.where(js)
|
||||
|
||||
# apply default ordering
|
||||
if self._document._meta['ordering']:
|
||||
self.order_by(*self._document._meta['ordering'])
|
||||
|
||||
return self._cursor_obj
|
||||
|
||||
@classmethod
|
||||
@@ -75,10 +244,12 @@ class QuerySet(object):
|
||||
return fields
|
||||
|
||||
@classmethod
|
||||
def _translate_field_name(cls, doc_cls, parts):
|
||||
def _translate_field_name(cls, doc_cls, field, sep='.'):
|
||||
"""Translate a field attribute name to a database field name.
|
||||
"""
|
||||
return [field.name for field in QuerySet._lookup_field(doc_cls, parts)]
|
||||
parts = field.split(sep)
|
||||
parts = [f.name for f in QuerySet._lookup_field(doc_cls, parts)]
|
||||
return '.'.join(parts)
|
||||
|
||||
@classmethod
|
||||
def _transform_query(cls, _doc_cls=None, **query):
|
||||
@@ -130,11 +301,13 @@ class QuerySet(object):
|
||||
|
||||
def with_id(self, object_id):
|
||||
"""Retrieve the object matching the id provided.
|
||||
"""
|
||||
if not isinstance(object_id, pymongo.objectid.ObjectId):
|
||||
object_id = pymongo.objectid.ObjectId(object_id)
|
||||
|
||||
result = self._collection.find_one(object_id)
|
||||
:param object_id: the value for the id of the document to look up
|
||||
"""
|
||||
id_field = self._document._meta['id_field']
|
||||
object_id = self._document._fields[id_field].to_mongo(object_id)
|
||||
|
||||
result = self._collection.find_one({'_id': object_id})
|
||||
if result is not None:
|
||||
result = self._document._from_son(result)
|
||||
return result
|
||||
@@ -155,6 +328,8 @@ class QuerySet(object):
|
||||
def limit(self, n):
|
||||
"""Limit the number of returned documents to `n`. This may also be
|
||||
achieved using array-slicing syntax (e.g. ``User.objects[:5]``).
|
||||
|
||||
:param n: the maximum number of objects to return
|
||||
"""
|
||||
self._cursor.limit(n)
|
||||
# Return self to allow chaining
|
||||
@@ -163,6 +338,8 @@ class QuerySet(object):
|
||||
def skip(self, n):
|
||||
"""Skip `n` documents before returning the results. This may also be
|
||||
achieved using array-slicing syntax (e.g. ``User.objects[5:]``).
|
||||
|
||||
:param n: the number of objects to skip before returning results
|
||||
"""
|
||||
self._cursor.skip(n)
|
||||
return self
|
||||
@@ -172,7 +349,16 @@ class QuerySet(object):
|
||||
"""
|
||||
# Slice provided
|
||||
if isinstance(key, slice):
|
||||
self._cursor_obj = self._cursor[key]
|
||||
try:
|
||||
self._cursor_obj = self._cursor[key]
|
||||
except IndexError, err:
|
||||
# PyMongo raises an error if key.start == key.stop, catch it,
|
||||
# bin it, kill it.
|
||||
if key.start >=0 and key.stop >= 0 and key.step is None:
|
||||
if key.start == key.stop:
|
||||
self.limit(0)
|
||||
return self
|
||||
raise err
|
||||
# Allow further QuerySet modifications to be performed
|
||||
return self
|
||||
# Integer index provided
|
||||
@@ -183,6 +369,9 @@ class QuerySet(object):
|
||||
"""Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The
|
||||
order may be specified by prepending each of the keys by a + or a -.
|
||||
Ascending order is assumed.
|
||||
|
||||
:param keys: fields to order the query results by; keys may be
|
||||
prefixed with **+** or **-** to determine the ordering direction
|
||||
"""
|
||||
key_list = []
|
||||
for key in keys:
|
||||
@@ -199,17 +388,112 @@ class QuerySet(object):
|
||||
def explain(self, format=False):
|
||||
"""Return an explain plan record for the
|
||||
:class:`~mongoengine.queryset.QuerySet`\ 's cursor.
|
||||
|
||||
:param format: format the plan before returning it
|
||||
"""
|
||||
|
||||
plan = self._cursor.explain()
|
||||
if format:
|
||||
import pprint
|
||||
plan = pprint.pformat(plan)
|
||||
return plan
|
||||
|
||||
def delete(self):
|
||||
def delete(self, safe=False):
|
||||
"""Delete the documents matched by the query.
|
||||
|
||||
:param safe: check if the operation succeeded before returning
|
||||
"""
|
||||
self._collection.remove(self._query)
|
||||
self._collection.remove(self._query, safe=safe)
|
||||
|
||||
@classmethod
|
||||
def _transform_update(cls, _doc_cls=None, **update):
|
||||
"""Transform an update spec from Django-style format to Mongo format.
|
||||
"""
|
||||
operators = ['set', 'unset', 'inc', 'dec', 'push', 'push_all', 'pull',
|
||||
'pull_all']
|
||||
|
||||
mongo_update = {}
|
||||
for key, value in update.items():
|
||||
parts = key.split('__')
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
op = None
|
||||
if parts[0] in operators:
|
||||
op = parts.pop(0)
|
||||
# Convert Pythonic names to Mongo equivalents
|
||||
if op in ('push_all', 'pull_all'):
|
||||
op = op.replace('_all', 'All')
|
||||
elif op == 'dec':
|
||||
# Support decrement by flipping a positive value's sign
|
||||
# and using 'inc'
|
||||
op = 'inc'
|
||||
if value > 0:
|
||||
value = -value
|
||||
|
||||
if _doc_cls:
|
||||
# Switch field names to proper names [set in Field(name='foo')]
|
||||
fields = QuerySet._lookup_field(_doc_cls, parts)
|
||||
parts = [field.name for field in fields]
|
||||
|
||||
# Convert value to proper value
|
||||
field = fields[-1]
|
||||
if op in (None, 'set', 'unset', 'push', 'pull'):
|
||||
value = field.prepare_query_value(value)
|
||||
elif op in ('pushAll', 'pullAll'):
|
||||
value = [field.prepare_query_value(v) for v in value]
|
||||
|
||||
key = '.'.join(parts)
|
||||
|
||||
if op:
|
||||
value = {key: value}
|
||||
key = '$' + op
|
||||
|
||||
if op is None or key not in mongo_update:
|
||||
mongo_update[key] = value
|
||||
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
||||
mongo_update[key].update(value)
|
||||
|
||||
return mongo_update
|
||||
|
||||
def update(self, safe_update=True, **update):
|
||||
"""Perform an atomic update on the fields matched by the query.
|
||||
|
||||
:param safe: check if the operation succeeded before returning
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
if pymongo.version < '1.1.1':
|
||||
raise OperationError('update() method requires PyMongo 1.1.1+')
|
||||
|
||||
update = QuerySet._transform_update(self._document, **update)
|
||||
try:
|
||||
self._collection.update(self._query, update, safe=safe_update,
|
||||
multi=True)
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
if str(err) == 'multi not coded yet':
|
||||
raise OperationError('update() method requires MongoDB 1.1.3+')
|
||||
raise OperationError('Update failed (%s)' % str(err))
|
||||
|
||||
def update_one(self, safe_update=True, **update):
|
||||
"""Perform an atomic update on first field matched by the query.
|
||||
|
||||
:param safe: check if the operation succeeded before returning
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
update = QuerySet._transform_update(self._document, **update)
|
||||
try:
|
||||
# Explicitly provide 'multi=False' to newer versions of PyMongo
|
||||
# as the default may change to 'True'
|
||||
if pymongo.version >= '1.1.1':
|
||||
self._collection.update(self._query, update, safe=safe_update,
|
||||
multi=False)
|
||||
else:
|
||||
# Older versions of PyMongo don't support 'multi'
|
||||
self._collection.update(self._query, update, safe=safe_update)
|
||||
except pymongo.errors.OperationFailure, e:
|
||||
raise OperationError('Update failed [%s]' % str(e))
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
@@ -222,6 +506,12 @@ class QuerySet(object):
|
||||
collection in use; ``query``, which is an object representing the
|
||||
current query; and ``options``, which is an object containing any
|
||||
options specified as keyword arguments.
|
||||
|
||||
:param code: a string of Javascript code to execute
|
||||
:param fields: fields that you will be using in your function, which
|
||||
will be passed in to your function as arguments
|
||||
:param options: options that you want available to the function
|
||||
(accessed in Javascript through the ``options`` object)
|
||||
"""
|
||||
fields = [QuerySet._translate_field_name(self._document, f)
|
||||
for f in fields]
|
||||
@@ -238,6 +528,9 @@ class QuerySet(object):
|
||||
|
||||
def sum(self, field):
|
||||
"""Sum over the values of the specified field.
|
||||
|
||||
:param field: the field to sum over; use dot-notation to refer to
|
||||
embedded document fields
|
||||
"""
|
||||
sum_func = """
|
||||
function(sumField) {
|
||||
@@ -252,6 +545,9 @@ class QuerySet(object):
|
||||
|
||||
def average(self, field):
|
||||
"""Average over the values of the specified field.
|
||||
|
||||
:param field: the field to average over; use dot-notation to refer to
|
||||
embedded document fields
|
||||
"""
|
||||
average_func = """
|
||||
function(averageField) {
|
||||
@@ -272,6 +568,9 @@ class QuerySet(object):
|
||||
"""Returns a dictionary of all items present in a list field across
|
||||
the whole queried set of documents, and their corresponding frequency.
|
||||
This is useful for generating tag clouds, or searching documents.
|
||||
|
||||
:param list_field: the list field to use
|
||||
:param normalize: normalize the results so they add to 1.0
|
||||
"""
|
||||
freq_func = """
|
||||
function(listField) {
|
||||
@@ -297,6 +596,11 @@ class QuerySet(object):
|
||||
"""
|
||||
return self.exec_js(freq_func, list_field, normalize=normalize)
|
||||
|
||||
def __repr__(self):
|
||||
data = list(self[:REPR_OUTPUT_SIZE + 1])
|
||||
if len(data) > REPR_OUTPUT_SIZE:
|
||||
data[-1] = "...(remaining elements truncated)..."
|
||||
return repr(data)
|
||||
|
||||
class InvalidCollectionError(Exception):
|
||||
pass
|
||||
|
@@ -221,6 +221,123 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
def test_indexes(self):
|
||||
"""Ensure that indexes are used when meta[indexes] is specified.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
date = DateTimeField(name='addDate', default=datetime.datetime.now)
|
||||
category = StringField()
|
||||
tags = ListField(StringField())
|
||||
meta = {
|
||||
'indexes': [
|
||||
'-date',
|
||||
'tags',
|
||||
('category', '-date')
|
||||
],
|
||||
}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
# _id, types, '-date', 'tags', ('cat', 'date')
|
||||
self.assertEqual(len(info), 5)
|
||||
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(BlogPost.objects)
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)]
|
||||
in info.values())
|
||||
self.assertTrue([('_types', 1), ('addDate', -1)] in info.values())
|
||||
# tags is a list field so it shouldn't have _types in the index
|
||||
self.assertTrue([('tags', 1)] in info.values())
|
||||
|
||||
class ExtendedBlogPost(BlogPost):
|
||||
title = StringField()
|
||||
meta = {'indexes': ['title']}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
list(ExtendedBlogPost.objects)
|
||||
info = ExtendedBlogPost.objects._collection.index_information()
|
||||
self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)]
|
||||
in info.values())
|
||||
self.assertTrue([('_types', 1), ('addDate', -1)] in info.values())
|
||||
self.assertTrue([('_types', 1), ('title', 1)] in info.values())
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_unique(self):
|
||||
"""Ensure that uniqueness constraints are applied to fields.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
slug = StringField(unique=True)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post1 = BlogPost(title='test1', slug='test')
|
||||
post1.save()
|
||||
|
||||
# Two posts with the same slug is not allowed
|
||||
post2 = BlogPost(title='test2', slug='test')
|
||||
self.assertRaises(OperationError, post2.save)
|
||||
|
||||
class Date(EmbeddedDocument):
|
||||
year = IntField(name='yr')
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
date = EmbeddedDocumentField(Date)
|
||||
slug = StringField(unique_with='date.year')
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post1 = BlogPost(title='test1', date=Date(year=2009), slug='test')
|
||||
post1.save()
|
||||
|
||||
# day is different so won't raise exception
|
||||
post2 = BlogPost(title='test2', date=Date(year=2010), slug='test')
|
||||
post2.save()
|
||||
|
||||
# Now there will be two docs with the same slug and the same day: fail
|
||||
post3 = BlogPost(title='test3', date=Date(year=2010), slug='test')
|
||||
self.assertRaises(OperationError, post3.save)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_custom_id_field(self):
|
||||
"""Ensure that documents may be created with custom primary keys.
|
||||
"""
|
||||
class User(Document):
|
||||
username = StringField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
self.assertEqual(User._fields['username'].name, '_id')
|
||||
self.assertEqual(User._meta['id_field'], 'username')
|
||||
|
||||
def create_invalid_user():
|
||||
User(name='test').save() # no primary key field
|
||||
self.assertRaises(ValidationError, create_invalid_user)
|
||||
|
||||
def define_invalid_user():
|
||||
class EmailUser(User):
|
||||
email = StringField(primary_key=True)
|
||||
self.assertRaises(ValueError, define_invalid_user)
|
||||
|
||||
user = User(username='test', name='test user')
|
||||
user.save()
|
||||
|
||||
user_obj = User.objects.first()
|
||||
self.assertEqual(user_obj.id, 'test')
|
||||
|
||||
user_son = User.objects._collection.find_one()
|
||||
self.assertEqual(user_son['_id'], 'test')
|
||||
self.assertTrue('username' not in user_son['_id'])
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
def test_creation(self):
|
||||
"""Ensure that document may be created using keyword arguments.
|
||||
"""
|
||||
@@ -275,6 +392,25 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertTrue('content' in Comment._fields)
|
||||
self.assertFalse('id' in Comment._fields)
|
||||
self.assertFalse(hasattr(Comment, '_meta'))
|
||||
|
||||
def test_embedded_document_validation(self):
|
||||
"""Ensure that embedded documents may be validated.
|
||||
"""
|
||||
class Comment(EmbeddedDocument):
|
||||
date = DateTimeField()
|
||||
content = StringField(required=True)
|
||||
|
||||
comment = Comment()
|
||||
self.assertRaises(ValidationError, comment.validate)
|
||||
|
||||
comment.content = 'test'
|
||||
comment.validate()
|
||||
|
||||
comment.date = 4
|
||||
self.assertRaises(ValidationError, comment.validate)
|
||||
|
||||
comment.date = datetime.datetime.now()
|
||||
comment.validate()
|
||||
|
||||
def test_save(self):
|
||||
"""Ensure that a document may be saved in the database.
|
||||
|
@@ -231,6 +231,9 @@ class FieldTest(unittest.TestCase):
|
||||
content = StringField()
|
||||
author = ReferenceField(User)
|
||||
|
||||
User.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument)
|
||||
|
||||
user = User(name='Test User')
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import unittest
|
||||
import pymongo
|
||||
from datetime import datetime
|
||||
|
||||
from mongoengine.queryset import QuerySet
|
||||
from mongoengine import *
|
||||
@@ -16,7 +17,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.Person = Person
|
||||
|
||||
def test_initialisation(self):
|
||||
"""Ensure that CollectionManager is correctly initialised.
|
||||
"""Ensure that a QuerySet is correctly initialised by QuerySetManager.
|
||||
"""
|
||||
self.assertTrue(isinstance(self.Person.objects, QuerySet))
|
||||
self.assertEqual(self.Person.objects._collection.name(),
|
||||
@@ -48,6 +49,9 @@ class QuerySetTest(unittest.TestCase):
|
||||
person2 = self.Person(name="User B", age=30)
|
||||
person2.save()
|
||||
|
||||
q1 = Q(name='test')
|
||||
q2 = Q(age__gte=18)
|
||||
|
||||
# Find all people in the collection
|
||||
people = self.Person.objects
|
||||
self.assertEqual(len(people), 2)
|
||||
@@ -131,6 +135,77 @@ class QuerySetTest(unittest.TestCase):
|
||||
person = self.Person.objects.with_id(person1.id)
|
||||
self.assertEqual(person.name, "User A")
|
||||
|
||||
def test_filter_chaining(self):
|
||||
"""Ensure filters can be chained together.
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
is_published = BooleanField()
|
||||
published_date = DateTimeField()
|
||||
|
||||
@queryset_manager
|
||||
def published(queryset):
|
||||
return queryset(is_published=True)
|
||||
|
||||
blog_post_1 = BlogPost(title="Blog Post #1",
|
||||
is_published = True,
|
||||
published_date=datetime(2010, 1, 5, 0, 0 ,0))
|
||||
blog_post_2 = BlogPost(title="Blog Post #2",
|
||||
is_published = True,
|
||||
published_date=datetime(2010, 1, 6, 0, 0 ,0))
|
||||
blog_post_3 = BlogPost(title="Blog Post #3",
|
||||
is_published = True,
|
||||
published_date=datetime(2010, 1, 7, 0, 0 ,0))
|
||||
|
||||
blog_post_1.save()
|
||||
blog_post_2.save()
|
||||
blog_post_3.save()
|
||||
|
||||
# find all published blog posts before 2010-01-07
|
||||
published_posts = BlogPost.published()
|
||||
published_posts = published_posts.filter(
|
||||
published_date__lt=datetime(2010, 1, 7, 0, 0 ,0))
|
||||
self.assertEqual(published_posts.count(), 2)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_ordering(self):
|
||||
"""Ensure default ordering is applied and can be overridden.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
published_date = DateTimeField()
|
||||
|
||||
meta = {
|
||||
'ordering': ['-published_date']
|
||||
}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
blog_post_1 = BlogPost(title="Blog Post #1",
|
||||
published_date=datetime(2010, 1, 5, 0, 0 ,0))
|
||||
blog_post_2 = BlogPost(title="Blog Post #2",
|
||||
published_date=datetime(2010, 1, 6, 0, 0 ,0))
|
||||
blog_post_3 = BlogPost(title="Blog Post #3",
|
||||
published_date=datetime(2010, 1, 7, 0, 0 ,0))
|
||||
|
||||
blog_post_1.save()
|
||||
blog_post_2.save()
|
||||
blog_post_3.save()
|
||||
|
||||
# get the "first" BlogPost using default ordering
|
||||
# from BlogPost.meta.ordering
|
||||
latest_post = BlogPost.objects.first()
|
||||
self.assertEqual(latest_post.title, "Blog Post #3")
|
||||
|
||||
# override default ordering, order BlogPosts by "published_date"
|
||||
first_post = BlogPost.objects.order_by("+published_date").first()
|
||||
self.assertEqual(first_post.title, "Blog Post #1")
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_find_embedded(self):
|
||||
"""Ensure that an embedded document is properly returned from a query.
|
||||
"""
|
||||
@@ -141,6 +216,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
content = StringField()
|
||||
author = EmbeddedDocumentField(User)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(content='Had a good coffee today...')
|
||||
post.author = User(name='Test User')
|
||||
post.save()
|
||||
@@ -151,6 +228,42 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_q(self):
|
||||
class BlogPost(Document):
|
||||
publish_date = DateTimeField()
|
||||
published = BooleanField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post1 = BlogPost(publish_date=datetime(2010, 1, 8), published=False)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(publish_date=datetime(2010, 1, 15), published=True)
|
||||
post2.save()
|
||||
|
||||
post3 = BlogPost(published=True)
|
||||
post3.save()
|
||||
|
||||
post4 = BlogPost(publish_date=datetime(2010, 1, 8))
|
||||
post4.save()
|
||||
|
||||
post5 = BlogPost(publish_date=datetime(2010, 1, 15))
|
||||
post5.save()
|
||||
|
||||
post6 = BlogPost(published=False)
|
||||
post6.save()
|
||||
|
||||
date = datetime(2010, 1, 10)
|
||||
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
|
||||
posts = [post.id for post in q]
|
||||
|
||||
published_posts = (post1, post2, post3, post4)
|
||||
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||
|
||||
self.assertFalse(any(obj.id in posts for obj in [post5, post6]))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_delete(self):
|
||||
"""Ensure that documents are properly deleted from the database.
|
||||
"""
|
||||
@@ -166,6 +279,41 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.Person.objects.delete()
|
||||
self.assertEqual(len(self.Person.objects), 0)
|
||||
|
||||
def test_update(self):
|
||||
"""Ensure that atomic updates work properly.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
hits = IntField()
|
||||
tags = ListField(StringField())
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(name="Test Post", hits=5, tags=['test'])
|
||||
post.save()
|
||||
|
||||
BlogPost.objects.update(set__hits=10)
|
||||
post.reload()
|
||||
self.assertEqual(post.hits, 10)
|
||||
|
||||
BlogPost.objects.update_one(inc__hits=1)
|
||||
post.reload()
|
||||
self.assertEqual(post.hits, 11)
|
||||
|
||||
BlogPost.objects.update_one(dec__hits=1)
|
||||
post.reload()
|
||||
self.assertEqual(post.hits, 10)
|
||||
|
||||
BlogPost.objects.update(push__tags='mongo')
|
||||
post.reload()
|
||||
self.assertTrue('mongo' in post.tags)
|
||||
|
||||
BlogPost.objects.update_one(push_all__tags=['db', 'nosql'])
|
||||
post.reload()
|
||||
self.assertTrue('db' in post.tags and 'nosql' in post.tags)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_order_by(self):
|
||||
"""Ensure that QuerySets may be ordered.
|
||||
"""
|
||||
@@ -326,9 +474,69 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
|
||||
def test_types_index(self):
|
||||
"""Ensure that and index is used when '_types' is being used in a
|
||||
query.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
date = DateTimeField()
|
||||
meta = {'indexes': ['-date']}
|
||||
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(BlogPost.objects)
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
self.assertTrue([('_types', 1)] in info.values())
|
||||
self.assertTrue([('_types', 1), ('date', -1)] in info.values())
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
meta = {'allow_inheritance': False}
|
||||
|
||||
# _types is not used on objects where allow_inheritance is False
|
||||
list(BlogPost.objects)
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
self.assertFalse([('_types', 1)] in info.values())
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def tearDown(self):
|
||||
self.Person.drop_collection()
|
||||
|
||||
|
||||
class QTest(unittest.TestCase):
|
||||
|
||||
def test_or_and(self):
|
||||
q1 = Q(name='test')
|
||||
q2 = Q(age__gte=18)
|
||||
|
||||
query = ['(', {'name': 'test'}, '||', {'age__gte': 18}, ')']
|
||||
self.assertEqual((q1 | q2).query, query)
|
||||
|
||||
query = ['(', {'name': 'test'}, '&&', {'age__gte': 18}, ')']
|
||||
self.assertEqual((q1 & q2).query, query)
|
||||
|
||||
query = ['(', '(', {'name': 'test'}, '&&', {'age__gte': 18}, ')', '||',
|
||||
{'name': 'example'}, ')']
|
||||
self.assertEqual((q1 & q2 | Q(name='example')).query, query)
|
||||
|
||||
def test_item_query_as_js(self):
|
||||
"""Ensure that the _item_query_as_js utilitiy method works properly.
|
||||
"""
|
||||
q = Q()
|
||||
examples = [
|
||||
({'name': 'test'}, 'this.name == i0f0', {'i0f0': 'test'}),
|
||||
({'age': {'$gt': 18}}, 'this.age > i0f0o0', {'i0f0o0': 18}),
|
||||
({'name': 'test', 'age': {'$gt': 18, '$lte': 65}},
|
||||
'this.age <= i0f0o0 && this.age > i0f0o1 && this.name == i0f1',
|
||||
{'i0f0o0': 65, 'i0f0o1': 18, 'i0f1': 'test'}),
|
||||
]
|
||||
for item, js, scope in examples:
|
||||
test_scope = {}
|
||||
self.assertEqual(q._item_query_as_js(item, test_scope, 0), js)
|
||||
self.assertEqual(scope, test_scope)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
Reference in New Issue
Block a user