Compare commits
185 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
83c11a9834 | ||
|
5c912b930e | ||
|
1b17fb0ae7 | ||
|
d83e67c121 | ||
|
ae39ed94c9 | ||
|
1e51180d42 | ||
|
87ba69d02e | ||
|
8879d5560b | ||
|
c1621ee39c | ||
|
b0aa98edb4 | ||
|
a7a2fe0216 | ||
|
8e50f5fa3c | ||
|
31793520bf | ||
|
0b6b0368c5 | ||
|
d1d30a9280 | ||
|
420c6f2d1e | ||
|
34f06c4971 | ||
|
9cc4bbd49d | ||
|
f66b312869 | ||
|
2405ba8708 | ||
|
a91b6bff8b | ||
|
450dc11a68 | ||
|
1ce2f84ce5 | ||
|
f55b241cfa | ||
|
34d08ce8ef | ||
|
4f5aa8c43b | ||
|
27b375060d | ||
|
cbfdc401f7 | ||
|
b58bf3e0ce | ||
|
1fff7e9aca | ||
|
494b981b13 | ||
|
dd93995bd0 | ||
|
b3bb4add9c | ||
|
d305e71c27 | ||
|
0d92baa670 | ||
|
7a1b110f62 | ||
|
db8df057ce | ||
|
5d8ffded40 | ||
|
07f3e5356d | ||
|
1ece62f960 | ||
|
056c604dc3 | ||
|
2d08eec093 | ||
|
614b590551 | ||
|
6d90ce250a | ||
|
ea31846a19 | ||
|
e6317776c1 | ||
|
efeaba39a4 | ||
|
1a97dfd479 | ||
|
9fecf2b303 | ||
|
3d0d2f48ad | ||
|
581605e0e2 | ||
|
45d3a7f6ff | ||
|
7ca2ea0766 | ||
|
89220c142b | ||
|
c73ce3d220 | ||
|
b0f127af4e | ||
|
766d54795f | ||
|
bd41c6eea4 | ||
|
2435786713 | ||
|
9e7ea64bd2 | ||
|
89a6eee6af | ||
|
2ec1476e50 | ||
|
2d9b581f34 | ||
|
5bb63f645b | ||
|
a856c7cc37 | ||
|
26db9d8a9d | ||
|
8060179f6d | ||
|
77ebd87fed | ||
|
e4bc92235d | ||
|
27a4d83ce8 | ||
|
ece9b902f8 | ||
|
65a2f8a68b | ||
|
9c212306b8 | ||
|
1fdc7ce6bb | ||
|
0b22c140c5 | ||
|
944aa45459 | ||
|
c9842ba13a | ||
|
8840680303 | ||
|
376b9b1316 | ||
|
54bb1cb3d9 | ||
|
43468b474e | ||
|
28a957c684 | ||
|
ec5ddbf391 | ||
|
bab186e195 | ||
|
bc7e874476 | ||
|
97114b5948 | ||
|
45e015d71d | ||
|
0ff6531953 | ||
|
ba298c3cfc | ||
|
0479bea40b | ||
|
a536097804 | ||
|
bbefd0fdf9 | ||
|
2aa8b04c21 | ||
|
aeebdfec51 | ||
|
debfcdf498 | ||
|
5c4b33e8e6 | ||
|
eb54037b66 | ||
|
f48af8db3b | ||
|
97c5b957dd | ||
|
95e7397803 | ||
|
43a989978a | ||
|
27734a7c26 | ||
|
dd786d6fc4 | ||
|
be1c28fc45 | ||
|
20e41b3523 | ||
|
e07ecc5cf8 | ||
|
3360b72531 | ||
|
233b13d670 | ||
|
5bcbb4fdaa | ||
|
dbe2f5f2b8 | ||
|
ca8b58d66d | ||
|
f80f0b416f | ||
|
d7765511ee | ||
|
0240a09056 | ||
|
ab15c4eec9 | ||
|
4ce1ba81a6 | ||
|
530440b333 | ||
|
b80fda36af | ||
|
42d24263ef | ||
|
1e2797e7ce | ||
|
f7075766fc | ||
|
5647ca70bb | ||
|
2b8aa6bafc | ||
|
410443471c | ||
|
0bb9781b91 | ||
|
2769d6d7ca | ||
|
120b9433c2 | ||
|
605092bd88 | ||
|
a4a8c94374 | ||
|
0e93f6c0db | ||
|
aa2add39ad | ||
|
a928047147 | ||
|
c474ca0f13 | ||
|
88dc64653e | ||
|
5f4b70f3a9 | ||
|
51b429e5b0 | ||
|
360624eb6e | ||
|
d9d2291837 | ||
|
cbdf816232 | ||
|
2d71eb8a18 | ||
|
64d2532ce9 | ||
|
0376910f33 | ||
|
6d503119a1 | ||
|
bfae93e57e | ||
|
49a66ba81a | ||
|
a1d43fecd9 | ||
|
d0e42a4798 | ||
|
2a34358abc | ||
|
fd2bb8ea45 | ||
|
98e5daa0e0 | ||
|
ad2e119282 | ||
|
c20c30d8d1 | ||
|
66d215c9c1 | ||
|
46e088d379 | ||
|
bbdd15161a | ||
|
ea9dc8cfb8 | ||
|
6bd2ccc9bf | ||
|
56327c6b58 | ||
|
712e8a51e4 | ||
|
421f324f9e | ||
|
8fe4a70299 | ||
|
3af6d0dbfd | ||
|
e2bef076d3 | ||
|
1bf9f28f4b | ||
|
f1e7b97a93 | ||
|
8cfe13ad90 | ||
|
0f420abc8e | ||
|
3b5b715567 | ||
|
520051af25 | ||
|
7e376b40bb | ||
|
fd18a48608 | ||
|
64860c6287 | ||
|
58635b24ba | ||
|
3ec9dfc108 | ||
|
bd1572f11a | ||
|
540a0cc59c | ||
|
83eb4f6b16 | ||
|
95c58bd793 | ||
|
65591c7727 | ||
|
737cbf5f60 | ||
|
4c67cbb4b7 | ||
|
ed2cc2a60b | ||
|
61411bb259 | ||
|
fcdb0eff8f | ||
|
30d9347272 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -13,4 +13,5 @@ env/
|
|||||||
.settings
|
.settings
|
||||||
.project
|
.project
|
||||||
.pydevproject
|
.pydevproject
|
||||||
tests/bugfix.py
|
tests/test_bugfix.py
|
||||||
|
htmlcov/
|
12
.travis.yml
Normal file
12
.travis.yml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# http://travis-ci.org/#!/MongoEngine/mongoengine
|
||||||
|
language: python
|
||||||
|
python:
|
||||||
|
- 2.6
|
||||||
|
- 2.7
|
||||||
|
install:
|
||||||
|
- sudo apt-get install zlib1g zlib1g-dev
|
||||||
|
- sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/
|
||||||
|
- pip install PIL --use-mirrors ; true
|
||||||
|
- python setup.py install
|
||||||
|
script:
|
||||||
|
- python setup.py test
|
17
AUTHORS
17
AUTHORS
@@ -97,3 +97,20 @@ that much better:
|
|||||||
* Shalabh Aggarwal
|
* Shalabh Aggarwal
|
||||||
* Chris Williams
|
* Chris Williams
|
||||||
* Robert Kajic
|
* Robert Kajic
|
||||||
|
* Jacob Peddicord
|
||||||
|
* Nils Hasenbanck
|
||||||
|
* mostlystatic
|
||||||
|
* Greg Banks
|
||||||
|
* swashbuckler
|
||||||
|
* Adam Reeve
|
||||||
|
* Anthony Nemitz
|
||||||
|
* deignacio
|
||||||
|
* shaunduncan
|
||||||
|
* Meir Kriheli
|
||||||
|
* Andrey Fedoseev
|
||||||
|
* aparajita
|
||||||
|
* Tristan Escalada
|
||||||
|
* Alexander Koshelev
|
||||||
|
* Jaime Irurzun
|
||||||
|
* Alexandre González
|
||||||
|
* Thomas Steinacher
|
@@ -5,6 +5,9 @@ MongoEngine
|
|||||||
:Author: Harry Marr (http://github.com/hmarr)
|
:Author: Harry Marr (http://github.com/hmarr)
|
||||||
:Maintainer: Ross Lawley (http://github.com/rozza)
|
:Maintainer: Ross Lawley (http://github.com/rozza)
|
||||||
|
|
||||||
|
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
|
||||||
|
:target: http://travis-ci.org/MongoEngine/mongoengine
|
||||||
|
|
||||||
About
|
About
|
||||||
=====
|
=====
|
||||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||||
@@ -22,7 +25,7 @@ setup.py install``.
|
|||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
============
|
============
|
||||||
- pymongo 1.1+
|
- pymongo 2.1.1+
|
||||||
- sphinx (optional - for documentation generation)
|
- sphinx (optional - for documentation generation)
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
@@ -96,3 +99,4 @@ Contributing
|
|||||||
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
|
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
|
||||||
contribute to the project, fork it on GitHub and send a pull request, all
|
contribute to the project, fork it on GitHub and send a pull request, all
|
||||||
contributions and suggestions are welcome!
|
contributions and suggestions are welcome!
|
||||||
|
|
||||||
|
@@ -31,6 +31,9 @@ Documents
|
|||||||
.. autoclass:: mongoengine.document.MapReduceDocument
|
.. autoclass:: mongoengine.document.MapReduceDocument
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.ValidationError
|
||||||
|
:members:
|
||||||
|
|
||||||
Querying
|
Querying
|
||||||
========
|
========
|
||||||
|
|
||||||
@@ -44,25 +47,28 @@ Querying
|
|||||||
Fields
|
Fields
|
||||||
======
|
======
|
||||||
|
|
||||||
.. autoclass:: mongoengine.StringField
|
.. autoclass:: mongoengine.BinaryField
|
||||||
.. autoclass:: mongoengine.URLField
|
.. autoclass:: mongoengine.BooleanField
|
||||||
.. autoclass:: mongoengine.EmailField
|
|
||||||
.. autoclass:: mongoengine.IntField
|
|
||||||
.. autoclass:: mongoengine.FloatField
|
|
||||||
.. autoclass:: mongoengine.DecimalField
|
|
||||||
.. autoclass:: mongoengine.DateTimeField
|
|
||||||
.. autoclass:: mongoengine.ComplexDateTimeField
|
.. autoclass:: mongoengine.ComplexDateTimeField
|
||||||
.. autoclass:: mongoengine.ListField
|
.. autoclass:: mongoengine.DateTimeField
|
||||||
.. autoclass:: mongoengine.SortedListField
|
.. autoclass:: mongoengine.DecimalField
|
||||||
.. autoclass:: mongoengine.DictField
|
.. autoclass:: mongoengine.DictField
|
||||||
|
.. autoclass:: mongoengine.DynamicField
|
||||||
|
.. autoclass:: mongoengine.EmailField
|
||||||
|
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||||
|
.. autoclass:: mongoengine.FileField
|
||||||
|
.. autoclass:: mongoengine.FloatField
|
||||||
|
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
||||||
|
.. autoclass:: mongoengine.GenericReferenceField
|
||||||
|
.. autoclass:: mongoengine.GeoPointField
|
||||||
|
.. autoclass:: mongoengine.ImageField
|
||||||
|
.. autoclass:: mongoengine.IntField
|
||||||
|
.. autoclass:: mongoengine.ListField
|
||||||
.. autoclass:: mongoengine.MapField
|
.. autoclass:: mongoengine.MapField
|
||||||
.. autoclass:: mongoengine.ObjectIdField
|
.. autoclass:: mongoengine.ObjectIdField
|
||||||
.. autoclass:: mongoengine.ReferenceField
|
.. autoclass:: mongoengine.ReferenceField
|
||||||
.. autoclass:: mongoengine.GenericReferenceField
|
|
||||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
|
||||||
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
|
||||||
.. autoclass:: mongoengine.BooleanField
|
|
||||||
.. autoclass:: mongoengine.FileField
|
|
||||||
.. autoclass:: mongoengine.BinaryField
|
|
||||||
.. autoclass:: mongoengine.GeoPointField
|
|
||||||
.. autoclass:: mongoengine.SequenceField
|
.. autoclass:: mongoengine.SequenceField
|
||||||
|
.. autoclass:: mongoengine.SortedListField
|
||||||
|
.. autoclass:: mongoengine.StringField
|
||||||
|
.. autoclass:: mongoengine.URLField
|
||||||
|
.. autoclass:: mongoengine.UUIDField
|
||||||
|
@@ -2,7 +2,117 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
Changes in 0.6.x
|
Changes in 0.6.15
|
||||||
|
=================
|
||||||
|
- Updated validation error message
|
||||||
|
- Added support for null / zero / false values in item_frequencies
|
||||||
|
- Fixed cascade save edge case
|
||||||
|
- Fixed geo index creation through reference fields
|
||||||
|
- Added support for args / kwargs when using @queryset_manager
|
||||||
|
- Deref list custom id fix
|
||||||
|
|
||||||
|
Changes in 0.6.14
|
||||||
|
=================
|
||||||
|
- Fixed error dict with nested validation
|
||||||
|
- Fixed Int/Float fields and not equals None
|
||||||
|
- Exclude tests from installation
|
||||||
|
- Allow tuples for index meta
|
||||||
|
- Fixed use of str in instance checks
|
||||||
|
- Fixed unicode support in transform update
|
||||||
|
- Added support for add_to_set and each
|
||||||
|
|
||||||
|
Changes in 0.6.13
|
||||||
|
================
|
||||||
|
- Fixed EmbeddedDocument db_field validation issue
|
||||||
|
- Fixed StringField unicode issue
|
||||||
|
- Fixes __repr__ modifying the cursor
|
||||||
|
|
||||||
|
Changes in 0.6.12
|
||||||
|
=================
|
||||||
|
- Fixes scalar lookups for primary_key
|
||||||
|
- Fixes error with _delta handling DBRefs
|
||||||
|
|
||||||
|
Changes in 0.6.11
|
||||||
|
==================
|
||||||
|
- Fixed inconsistency handling None values field attrs
|
||||||
|
- Fixed map_field embedded db_field issue
|
||||||
|
- Fixed .save() _delta issue with DbRefs
|
||||||
|
- Fixed Django TestCase
|
||||||
|
- Added cmp to Embedded Document
|
||||||
|
- Added PULL reverse_delete_rule
|
||||||
|
- Fixed CASCADE delete bug
|
||||||
|
- Fixed db_field data load error
|
||||||
|
- Fixed recursive save with FileField
|
||||||
|
|
||||||
|
Changes in 0.6.10
|
||||||
|
=================
|
||||||
|
- Fixed basedict / baselist to return super(..)
|
||||||
|
- Promoted BaseDynamicField to DynamicField
|
||||||
|
|
||||||
|
Changes in 0.6.9
|
||||||
|
================
|
||||||
|
- Fixed sparse indexes on inherited docs
|
||||||
|
- Removed FileField auto deletion, needs more work maybe 0.7
|
||||||
|
|
||||||
|
Changes in 0.6.8
|
||||||
|
================
|
||||||
|
- Fixed FileField losing reference when no default set
|
||||||
|
- Removed possible race condition from FileField (grid_file)
|
||||||
|
- Added assignment to save, can now do: b = MyDoc(**kwargs).save()
|
||||||
|
- Added support for pull operations on nested EmbeddedDocuments
|
||||||
|
- Added support for choices with GenericReferenceFields
|
||||||
|
- Added support for choices with GenericEmbeddedDocumentFields
|
||||||
|
- Fixed Django 1.4 sessions first save data loss
|
||||||
|
- FileField now automatically delete files on .delete()
|
||||||
|
- Fix for GenericReference to_mongo method
|
||||||
|
- Fixed connection regression
|
||||||
|
- Updated Django User document, now allows inheritance
|
||||||
|
|
||||||
|
Changes in 0.6.7
|
||||||
|
================
|
||||||
|
- Fixed indexing on '_id' or 'pk' or 'id'
|
||||||
|
- Invalid data from the DB now raises a InvalidDocumentError
|
||||||
|
- Cleaned up the Validation Error - docs and code
|
||||||
|
- Added meta `auto_create_index` so you can disable index creation
|
||||||
|
- Added write concern options to inserts
|
||||||
|
- Fixed typo in meta for index options
|
||||||
|
- Bug fix Read preference now passed correctly
|
||||||
|
- Added support for File like objects for GridFS
|
||||||
|
- Fix for #473 - Dereferencing abstracts
|
||||||
|
|
||||||
|
Changes in 0.6.6
|
||||||
|
================
|
||||||
|
- Django 1.4 fixed (finally)
|
||||||
|
- Added tests for Django
|
||||||
|
|
||||||
|
Changes in 0.6.5
|
||||||
|
================
|
||||||
|
- More Django updates
|
||||||
|
|
||||||
|
Changes in 0.6.4
|
||||||
|
================
|
||||||
|
|
||||||
|
- Refactored connection / fixed replicasetconnection
|
||||||
|
- Bug fix for unknown connection alias error message
|
||||||
|
- Sessions support Django 1.3 and Django 1.4
|
||||||
|
- Minor fix for ReferenceField
|
||||||
|
|
||||||
|
Changes in 0.6.3
|
||||||
|
================
|
||||||
|
- Updated sessions for Django 1.4
|
||||||
|
- Bug fix for updates where listfields contain embedded documents
|
||||||
|
- Bug fix for collection naming and mixins
|
||||||
|
|
||||||
|
Changes in 0.6.2
|
||||||
|
================
|
||||||
|
- Updated documentation for ReplicaSet connections
|
||||||
|
- Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems.
|
||||||
|
|
||||||
|
Changes in 0.6.1
|
||||||
|
================
|
||||||
|
- Fix for replicaSet connections
|
||||||
|
|
||||||
|
Changes in 0.6
|
||||||
================
|
================
|
||||||
|
|
||||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||||
|
@@ -2,19 +2,21 @@
|
|||||||
Using MongoEngine with Django
|
Using MongoEngine with Django
|
||||||
=============================
|
=============================
|
||||||
|
|
||||||
|
.. note :: Updated to support Django 1.4
|
||||||
|
|
||||||
Connecting
|
Connecting
|
||||||
==========
|
==========
|
||||||
In your **settings.py** file, ignore the standard database settings (unless you
|
In your **settings.py** file, ignore the standard database settings (unless you
|
||||||
also plan to use the ORM in your project), and instead call
|
also plan to use the ORM in your project), and instead call
|
||||||
:func:`~mongoengine.connect` somewhere in the settings module.
|
:func:`~mongoengine.connect` somewhere in the settings module.
|
||||||
|
|
||||||
Authentication
|
Authentication
|
||||||
==============
|
==============
|
||||||
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
||||||
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
||||||
:class:`~mongoengine.Document`, but implements most of the methods and
|
:class:`~mongoengine.Document`, but implements most of the methods and
|
||||||
attributes that the standard Django :class:`User` model does - so the two are
|
attributes that the standard Django :class:`User` model does - so the two are
|
||||||
moderately compatible. Using this backend will allow you to store users in
|
moderately compatible. Using this backend will allow you to store users in
|
||||||
MongoDB but still use many of the Django authentication infrastucture (such as
|
MongoDB but still use many of the Django authentication infrastucture (such as
|
||||||
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
||||||
enable the MongoEngine auth backend, add the following to you **settings.py**
|
enable the MongoEngine auth backend, add the following to you **settings.py**
|
||||||
@@ -24,7 +26,7 @@ file::
|
|||||||
'mongoengine.django.auth.MongoEngineBackend',
|
'mongoengine.django.auth.MongoEngineBackend',
|
||||||
)
|
)
|
||||||
|
|
||||||
The :mod:`~mongoengine.django.auth` module also contains a
|
The :mod:`~mongoengine.django.auth` module also contains a
|
||||||
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
|
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
|
||||||
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
|
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
|
||||||
|
|
||||||
@@ -49,9 +51,9 @@ Storage
|
|||||||
=======
|
=======
|
||||||
With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`,
|
With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`,
|
||||||
it is useful to have a Django file storage backend that wraps this. The new
|
it is useful to have a Django file storage backend that wraps this. The new
|
||||||
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
||||||
Using it is very similar to using the default FileSystemStorage.::
|
Using it is very similar to using the default FileSystemStorage.::
|
||||||
|
|
||||||
from mongoengine.django.storage import GridFSStorage
|
from mongoengine.django.storage import GridFSStorage
|
||||||
fs = GridFSStorage()
|
fs = GridFSStorage()
|
||||||
|
|
||||||
|
@@ -26,7 +26,12 @@ name - just supply the uri as the :attr:`host` to
|
|||||||
|
|
||||||
connect('project1', host='mongodb://localhost/database_name')
|
connect('project1', host='mongodb://localhost/database_name')
|
||||||
|
|
||||||
|
ReplicaSets
|
||||||
|
===========
|
||||||
|
|
||||||
|
MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection`
|
||||||
|
to use them please use a URI style connection and provide the `replicaSet` name in the
|
||||||
|
connection kwargs.
|
||||||
|
|
||||||
Multiple Databases
|
Multiple Databases
|
||||||
==================
|
==================
|
||||||
|
@@ -62,28 +62,31 @@ not provided. Default values may optionally be a callable, which will be called
|
|||||||
to retrieve the value (such as in the above example). The field types available
|
to retrieve the value (such as in the above example). The field types available
|
||||||
are as follows:
|
are as follows:
|
||||||
|
|
||||||
* :class:`~mongoengine.StringField`
|
* :class:`~mongoengine.BinaryField`
|
||||||
* :class:`~mongoengine.URLField`
|
* :class:`~mongoengine.BooleanField`
|
||||||
* :class:`~mongoengine.EmailField`
|
|
||||||
* :class:`~mongoengine.IntField`
|
|
||||||
* :class:`~mongoengine.FloatField`
|
|
||||||
* :class:`~mongoengine.DecimalField`
|
|
||||||
* :class:`~mongoengine.DateTimeField`
|
|
||||||
* :class:`~mongoengine.ComplexDateTimeField`
|
* :class:`~mongoengine.ComplexDateTimeField`
|
||||||
* :class:`~mongoengine.ListField`
|
* :class:`~mongoengine.DateTimeField`
|
||||||
* :class:`~mongoengine.SortedListField`
|
* :class:`~mongoengine.DecimalField`
|
||||||
* :class:`~mongoengine.DictField`
|
* :class:`~mongoengine.DictField`
|
||||||
|
* :class:`~mongoengine.DynamicField`
|
||||||
|
* :class:`~mongoengine.EmailField`
|
||||||
|
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||||
|
* :class:`~mongoengine.FileField`
|
||||||
|
* :class:`~mongoengine.FloatField`
|
||||||
|
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
||||||
|
* :class:`~mongoengine.GenericReferenceField`
|
||||||
|
* :class:`~mongoengine.GeoPointField`
|
||||||
|
* :class:`~mongoengine.ImageField`
|
||||||
|
* :class:`~mongoengine.IntField`
|
||||||
|
* :class:`~mongoengine.ListField`
|
||||||
* :class:`~mongoengine.MapField`
|
* :class:`~mongoengine.MapField`
|
||||||
* :class:`~mongoengine.ObjectIdField`
|
* :class:`~mongoengine.ObjectIdField`
|
||||||
* :class:`~mongoengine.ReferenceField`
|
* :class:`~mongoengine.ReferenceField`
|
||||||
* :class:`~mongoengine.GenericReferenceField`
|
|
||||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
|
||||||
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
|
||||||
* :class:`~mongoengine.BooleanField`
|
|
||||||
* :class:`~mongoengine.FileField`
|
|
||||||
* :class:`~mongoengine.BinaryField`
|
|
||||||
* :class:`~mongoengine.GeoPointField`
|
|
||||||
* :class:`~mongoengine.SequenceField`
|
* :class:`~mongoengine.SequenceField`
|
||||||
|
* :class:`~mongoengine.SortedListField`
|
||||||
|
* :class:`~mongoengine.StringField`
|
||||||
|
* :class:`~mongoengine.URLField`
|
||||||
|
* :class:`~mongoengine.UUIDField`
|
||||||
|
|
||||||
Field arguments
|
Field arguments
|
||||||
---------------
|
---------------
|
||||||
@@ -98,7 +101,7 @@ arguments can be set on all fields:
|
|||||||
|
|
||||||
:attr:`required` (Default: False)
|
:attr:`required` (Default: False)
|
||||||
If set to True and the field is not set on the document instance, a
|
If set to True and the field is not set on the document instance, a
|
||||||
:class:`~mongoengine.base.ValidationError` will be raised when the document is
|
:class:`~mongoengine.ValidationError` will be raised when the document is
|
||||||
validated.
|
validated.
|
||||||
|
|
||||||
:attr:`default` (Default: None)
|
:attr:`default` (Default: None)
|
||||||
@@ -289,6 +292,10 @@ Its value can take any of the following constants:
|
|||||||
:const:`mongoengine.CASCADE`
|
:const:`mongoengine.CASCADE`
|
||||||
Any object containing fields that are refererring to the object being deleted
|
Any object containing fields that are refererring to the object being deleted
|
||||||
are deleted first.
|
are deleted first.
|
||||||
|
:const:`mongoengine.PULL`
|
||||||
|
Removes the reference to the object (using MongoDB's "pull" operation)
|
||||||
|
from any object's fields of
|
||||||
|
:class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`).
|
||||||
|
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
@@ -91,5 +91,5 @@ is an alias to :attr:`id`::
|
|||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
If you define your own primary key field, the field implicitly becomes
|
If you define your own primary key field, the field implicitly becomes
|
||||||
required, so a :class:`ValidationError` will be thrown if you don't provide
|
required, so a :class:`~mongoengine.ValidationError` will be thrown if
|
||||||
it.
|
you don't provide it.
|
||||||
|
@@ -65,7 +65,7 @@ Deleting stored files is achieved with the :func:`delete` method::
|
|||||||
|
|
||||||
marmot.photo.delete()
|
marmot.photo.delete()
|
||||||
|
|
||||||
.. note::
|
.. warning::
|
||||||
|
|
||||||
The FileField in a Document actually only stores the ID of a file in a
|
The FileField in a Document actually only stores the ID of a file in a
|
||||||
separate GridFS collection. This means that deleting a document
|
separate GridFS collection. This means that deleting a document
|
||||||
|
@@ -5,15 +5,13 @@ Signals
|
|||||||
|
|
||||||
.. versionadded:: 0.5
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
Signal support is provided by the excellent `blinker`_ library and
|
.. note::
|
||||||
will gracefully fall back if it is not available.
|
|
||||||
|
Signal support is provided by the excellent `blinker`_ library and
|
||||||
|
will gracefully fall back if it is not available.
|
||||||
|
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
The following document signals exist in MongoEngine and are pretty self explanatory:
|
|
||||||
=======
|
|
||||||
The following document signals exist in MongoEngine and are pretty self-explanatory:
|
The following document signals exist in MongoEngine and are pretty self-explanatory:
|
||||||
>>>>>>> master
|
|
||||||
|
|
||||||
* `mongoengine.signals.pre_init`
|
* `mongoengine.signals.pre_init`
|
||||||
* `mongoengine.signals.post_init`
|
* `mongoengine.signals.post_init`
|
||||||
|
@@ -18,6 +18,8 @@ Document.objects.with_id - now raises an InvalidQueryError if used with a filter
|
|||||||
FutureWarning - A future warning has been added to all inherited classes that
|
FutureWarning - A future warning has been added to all inherited classes that
|
||||||
don't define `allow_inheritance` in their meta.
|
don't define `allow_inheritance` in their meta.
|
||||||
|
|
||||||
|
You may need to update pyMongo to 2.0 for use with Sharding.
|
||||||
|
|
||||||
0.4 to 0.5
|
0.4 to 0.5
|
||||||
===========
|
===========
|
||||||
|
|
||||||
@@ -74,7 +76,7 @@ To upgrade use a Mixin class to set meta like so ::
|
|||||||
class MyAceDocument(Document, BaseMixin):
|
class MyAceDocument(Document, BaseMixin):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
MyAceDocument._get_collection_name() == myacedocument
|
MyAceDocument._get_collection_name() == "myacedocument"
|
||||||
|
|
||||||
Alternatively, you can rename your collections eg ::
|
Alternatively, you can rename your collections eg ::
|
||||||
|
|
||||||
|
@@ -12,7 +12,7 @@ from signals import *
|
|||||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||||
queryset.__all__ + signals.__all__)
|
queryset.__all__ + signals.__all__)
|
||||||
|
|
||||||
VERSION = (0, 6, 1)
|
VERSION = (0, 6, 15)
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import warnings
|
import warnings
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
from queryset import QuerySet, QuerySetManager
|
from queryset import QuerySet, QuerySetManager
|
||||||
from queryset import DoesNotExist, MultipleObjectsReturned
|
from queryset import DoesNotExist, MultipleObjectsReturned
|
||||||
@@ -25,7 +26,15 @@ class InvalidDocumentError(Exception):
|
|||||||
|
|
||||||
class ValidationError(AssertionError):
|
class ValidationError(AssertionError):
|
||||||
"""Validation exception.
|
"""Validation exception.
|
||||||
|
|
||||||
|
May represent an error validating a field or a
|
||||||
|
document containing fields with validation errors.
|
||||||
|
|
||||||
|
:ivar errors: A dictionary of errors for fields within this
|
||||||
|
document or list, or None if the error is for an
|
||||||
|
individual field.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
errors = {}
|
errors = {}
|
||||||
field_name = None
|
field_name = None
|
||||||
_message = None
|
_message = None
|
||||||
@@ -43,10 +52,12 @@ class ValidationError(AssertionError):
|
|||||||
|
|
||||||
def __getattribute__(self, name):
|
def __getattribute__(self, name):
|
||||||
message = super(ValidationError, self).__getattribute__(name)
|
message = super(ValidationError, self).__getattribute__(name)
|
||||||
if name == 'message' and self.field_name:
|
if name == 'message':
|
||||||
return message + ' ("%s")' % self.field_name
|
if self.field_name:
|
||||||
else:
|
message = '%s' % message
|
||||||
return message
|
if self.errors:
|
||||||
|
message = '%s(%s)' % (message, self._format_errors())
|
||||||
|
return message
|
||||||
|
|
||||||
def _get_message(self):
|
def _get_message(self):
|
||||||
return self._message
|
return self._message
|
||||||
@@ -57,6 +68,13 @@ class ValidationError(AssertionError):
|
|||||||
message = property(_get_message, _set_message)
|
message = property(_get_message, _set_message)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
|
"""Returns a dictionary of all errors within a document
|
||||||
|
|
||||||
|
Keys are field names or list indices and values are the
|
||||||
|
validation error messages, or a nested dictionary of
|
||||||
|
errors for an embedded document or list.
|
||||||
|
"""
|
||||||
|
|
||||||
def build_dict(source):
|
def build_dict(source):
|
||||||
errors_dict = {}
|
errors_dict = {}
|
||||||
if not source:
|
if not source:
|
||||||
@@ -73,6 +91,24 @@ class ValidationError(AssertionError):
|
|||||||
return {}
|
return {}
|
||||||
return build_dict(self.errors)
|
return build_dict(self.errors)
|
||||||
|
|
||||||
|
def _format_errors(self):
|
||||||
|
"""Returns a string listing all errors within a document"""
|
||||||
|
|
||||||
|
def generate_key(value, prefix=''):
|
||||||
|
if isinstance(value, list):
|
||||||
|
value = ' '.join([generate_key(k) for k in value])
|
||||||
|
if isinstance(value, dict):
|
||||||
|
value = ' '.join(
|
||||||
|
[generate_key(v, k) for k, v in value.iteritems()])
|
||||||
|
|
||||||
|
results = "%s.%s" % (prefix, value) if prefix else value
|
||||||
|
return results
|
||||||
|
|
||||||
|
error_dict = defaultdict(list)
|
||||||
|
for k, v in self.to_dict().iteritems():
|
||||||
|
error_dict[generate_key(v)].append(k)
|
||||||
|
return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()])
|
||||||
|
|
||||||
|
|
||||||
_document_registry = {}
|
_document_registry = {}
|
||||||
|
|
||||||
@@ -191,16 +227,18 @@ class BaseField(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def _validate(self, value):
|
def _validate(self, value):
|
||||||
|
from mongoengine import Document, EmbeddedDocument
|
||||||
# check choices
|
# check choices
|
||||||
if self.choices:
|
if self.choices:
|
||||||
|
is_cls = isinstance(value, (Document, EmbeddedDocument))
|
||||||
|
value_to_check = value.__class__ if is_cls else value
|
||||||
|
err_msg = 'an instance' if is_cls else 'one'
|
||||||
if isinstance(self.choices[0], (list, tuple)):
|
if isinstance(self.choices[0], (list, tuple)):
|
||||||
option_keys = [option_key for option_key, option_value in self.choices]
|
option_keys = [option_key for option_key, option_value in self.choices]
|
||||||
if value not in option_keys:
|
if value_to_check not in option_keys:
|
||||||
self.error('Value must be one of %s' % unicode(option_keys))
|
self.error('Value must be %s of %s' % (err_msg, unicode(option_keys)))
|
||||||
else:
|
elif value_to_check not in self.choices:
|
||||||
if value not in self.choices:
|
self.error('Value must be %s of %s' % (err_msg, unicode(self.choices)))
|
||||||
self.error('Value must be one of %s' % unicode(self.choices))
|
|
||||||
|
|
||||||
# check validation argument
|
# check validation argument
|
||||||
if self.validation is not None:
|
if self.validation is not None:
|
||||||
@@ -233,8 +271,10 @@ class ComplexBaseField(BaseField):
|
|||||||
if instance is None:
|
if instance is None:
|
||||||
# Document class being used rather than a document object
|
# Document class being used rather than a document object
|
||||||
return self
|
return self
|
||||||
|
from fields import GenericReferenceField, ReferenceField
|
||||||
if not self._dereference and instance._initialised:
|
dereference = self.field is None or isinstance(self.field,
|
||||||
|
(GenericReferenceField, ReferenceField))
|
||||||
|
if not self._dereference and instance._initialised and dereference:
|
||||||
from dereference import DeReference
|
from dereference import DeReference
|
||||||
self._dereference = DeReference() # Cached
|
self._dereference = DeReference() # Cached
|
||||||
instance._data[self.name] = self._dereference(
|
instance._data[self.name] = self._dereference(
|
||||||
@@ -368,12 +408,12 @@ class ComplexBaseField(BaseField):
|
|||||||
sequence = enumerate(value)
|
sequence = enumerate(value)
|
||||||
for k, v in sequence:
|
for k, v in sequence:
|
||||||
try:
|
try:
|
||||||
self.field.validate(v)
|
self.field._validate(v)
|
||||||
except (ValidationError, AssertionError), error:
|
except ValidationError, error:
|
||||||
if hasattr(error, 'errors'):
|
errors[k] = error.errors or error
|
||||||
errors[k] = error.errors
|
except (ValueError, AssertionError), error:
|
||||||
else:
|
errors[k] = error
|
||||||
errors[k] = error
|
|
||||||
if errors:
|
if errors:
|
||||||
field_class = self.field.__class__.__name__
|
field_class = self.field.__class__.__name__
|
||||||
self.error('Invalid %s item (%s)' % (field_class, value),
|
self.error('Invalid %s item (%s)' % (field_class, value),
|
||||||
@@ -401,47 +441,6 @@ class ComplexBaseField(BaseField):
|
|||||||
owner_document = property(_get_owner_document, _set_owner_document)
|
owner_document = property(_get_owner_document, _set_owner_document)
|
||||||
|
|
||||||
|
|
||||||
class BaseDynamicField(BaseField):
|
|
||||||
"""Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
|
|
||||||
|
|
||||||
def to_mongo(self, value):
|
|
||||||
"""Convert a Python type to a MongoDBcompatible type.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
return value
|
|
||||||
|
|
||||||
if hasattr(value, 'to_mongo'):
|
|
||||||
return value.to_mongo()
|
|
||||||
|
|
||||||
if not isinstance(value, (dict, list, tuple)):
|
|
||||||
return value
|
|
||||||
|
|
||||||
is_list = False
|
|
||||||
if not hasattr(value, 'items'):
|
|
||||||
is_list = True
|
|
||||||
value = dict([(k, v) for k, v in enumerate(value)])
|
|
||||||
|
|
||||||
data = {}
|
|
||||||
for k, v in value.items():
|
|
||||||
data[k] = self.to_mongo(v)
|
|
||||||
|
|
||||||
if is_list: # Convert back to a list
|
|
||||||
value = [v for k, v in sorted(data.items(), key=operator.itemgetter(0))]
|
|
||||||
else:
|
|
||||||
value = data
|
|
||||||
return value
|
|
||||||
|
|
||||||
def lookup_member(self, member_name):
|
|
||||||
return member_name
|
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
from mongoengine.fields import StringField
|
|
||||||
return StringField().prepare_query_value(op, value)
|
|
||||||
return self.to_mongo(value)
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectIdField(BaseField):
|
class ObjectIdField(BaseField):
|
||||||
"""An field wrapper around MongoDB's ObjectIds.
|
"""An field wrapper around MongoDB's ObjectIds.
|
||||||
"""
|
"""
|
||||||
@@ -478,13 +477,18 @@ class DocumentMetaclass(type):
|
|||||||
attrs.update(dict([(k, v) for k, v in base.__dict__.items()
|
attrs.update(dict([(k, v) for k, v in base.__dict__.items()
|
||||||
if issubclass(v.__class__, BaseField)]))
|
if issubclass(v.__class__, BaseField)]))
|
||||||
|
|
||||||
|
# Handle simple mixin's with meta
|
||||||
|
if hasattr(base, 'meta') and not isinstance(base, DocumentMetaclass):
|
||||||
|
meta = attrs.get('meta', {})
|
||||||
|
meta.update(base.meta)
|
||||||
|
attrs['meta'] = meta
|
||||||
|
|
||||||
for p_base in base.__bases__:
|
for p_base in base.__bases__:
|
||||||
#optimize :-)
|
#optimize :-)
|
||||||
if p_base in (object, BaseDocument):
|
if p_base in (object, BaseDocument):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
attrs.update(_get_mixin_fields(p_base))
|
attrs.update(_get_mixin_fields(p_base))
|
||||||
|
|
||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
metaclass = attrs.get('__metaclass__')
|
metaclass = attrs.get('__metaclass__')
|
||||||
@@ -498,6 +502,7 @@ class DocumentMetaclass(type):
|
|||||||
simple_class = True
|
simple_class = True
|
||||||
|
|
||||||
for base in bases:
|
for base in bases:
|
||||||
|
|
||||||
# Include all fields present in superclasses
|
# Include all fields present in superclasses
|
||||||
if hasattr(base, '_fields'):
|
if hasattr(base, '_fields'):
|
||||||
doc_fields.update(base._fields)
|
doc_fields.update(base._fields)
|
||||||
@@ -526,7 +531,8 @@ class DocumentMetaclass(type):
|
|||||||
simple_class = False
|
simple_class = False
|
||||||
|
|
||||||
doc_class_name = '.'.join(reversed(class_name))
|
doc_class_name = '.'.join(reversed(class_name))
|
||||||
meta = attrs.get('_meta', attrs.get('meta', {}))
|
meta = attrs.get('_meta', {})
|
||||||
|
meta.update(attrs.get('meta', {}))
|
||||||
|
|
||||||
if 'allow_inheritance' not in meta:
|
if 'allow_inheritance' not in meta:
|
||||||
meta['allow_inheritance'] = True
|
meta['allow_inheritance'] = True
|
||||||
@@ -704,7 +710,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
meta['queryset_class'] = manager.queryset_class
|
meta['queryset_class'] = manager.queryset_class
|
||||||
new_class.objects = manager
|
new_class.objects = manager
|
||||||
|
|
||||||
indicies = meta['indexes'] + abstract_base_indexes
|
indicies = list(meta['indexes']) + abstract_base_indexes
|
||||||
user_indexes = [QuerySet._build_index_spec(new_class, spec)
|
user_indexes = [QuerySet._build_index_spec(new_class, spec)
|
||||||
for spec in indicies] + base_indexes
|
for spec in indicies] + base_indexes
|
||||||
new_class._meta['indexes'] = user_indexes
|
new_class._meta['indexes'] = user_indexes
|
||||||
@@ -798,6 +804,7 @@ class BaseDocument(object):
|
|||||||
dynamic_data[key] = value
|
dynamic_data[key] = value
|
||||||
else:
|
else:
|
||||||
for key, value in values.items():
|
for key, value in values.items():
|
||||||
|
key = self._reverse_db_field_map.get(key, key)
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
|
||||||
# Set any get_fieldname_display methods
|
# Set any get_fieldname_display methods
|
||||||
@@ -818,7 +825,8 @@ class BaseDocument(object):
|
|||||||
|
|
||||||
field = None
|
field = None
|
||||||
if not hasattr(self, name) and not name.startswith('_'):
|
if not hasattr(self, name) and not name.startswith('_'):
|
||||||
field = BaseDynamicField(db_field=name)
|
from fields import DynamicField
|
||||||
|
field = DynamicField(db_field=name)
|
||||||
field.name = name
|
field.name = name
|
||||||
self._dynamic_fields[name] = field
|
self._dynamic_fields[name] = field
|
||||||
|
|
||||||
@@ -831,13 +839,6 @@ class BaseDocument(object):
|
|||||||
if hasattr(self, '_changed_fields'):
|
if hasattr(self, '_changed_fields'):
|
||||||
self._mark_as_changed(name)
|
self._mark_as_changed(name)
|
||||||
|
|
||||||
# Handle None values for required fields
|
|
||||||
if value is None and name in getattr(self, '_fields', {}):
|
|
||||||
self._data[name] = value
|
|
||||||
if hasattr(self, '_changed_fields'):
|
|
||||||
self._mark_as_changed(name)
|
|
||||||
return
|
|
||||||
|
|
||||||
if not self._created and name in self._meta.get('shard_key', tuple()):
|
if not self._created and name in self._meta.get('shard_key', tuple()):
|
||||||
from queryset import OperationError
|
from queryset import OperationError
|
||||||
raise OperationError("Shard Keys are immutable. Tried to update %s" % name)
|
raise OperationError("Shard Keys are immutable. Tried to update %s" % name)
|
||||||
@@ -902,8 +903,7 @@ class BaseDocument(object):
|
|||||||
errors[field.name] = ValidationError('Field is required',
|
errors[field.name] = ValidationError('Field is required',
|
||||||
field_name=field.name)
|
field_name=field.name)
|
||||||
if errors:
|
if errors:
|
||||||
raise ValidationError('Errors encountered validating document',
|
raise ValidationError('ValidationError', errors=errors)
|
||||||
errors=errors)
|
|
||||||
|
|
||||||
def to_mongo(self):
|
def to_mongo(self):
|
||||||
"""Return data dictionary ready for use with MongoDB.
|
"""Return data dictionary ready for use with MongoDB.
|
||||||
@@ -940,8 +940,8 @@ class BaseDocument(object):
|
|||||||
"""
|
"""
|
||||||
# get the class name from the document, falling back to the given
|
# get the class name from the document, falling back to the given
|
||||||
# class if unavailable
|
# class if unavailable
|
||||||
class_name = son.get(u'_cls', cls._class_name)
|
class_name = son.get('_cls', cls._class_name)
|
||||||
data = dict((str(key), value) for key, value in son.items())
|
data = dict(("%s" % key, value) for key, value in son.items())
|
||||||
|
|
||||||
if '_types' in data:
|
if '_types' in data:
|
||||||
del data['_types']
|
del data['_types']
|
||||||
@@ -954,11 +954,18 @@ class BaseDocument(object):
|
|||||||
cls = get_document(class_name)
|
cls = get_document(class_name)
|
||||||
|
|
||||||
changed_fields = []
|
changed_fields = []
|
||||||
|
errors_dict = {}
|
||||||
|
|
||||||
for field_name, field in cls._fields.items():
|
for field_name, field in cls._fields.items():
|
||||||
if field.db_field in data:
|
if field.db_field in data:
|
||||||
value = data[field.db_field]
|
value = data[field.db_field]
|
||||||
data[field_name] = (value if value is None
|
try:
|
||||||
|
data[field_name] = (value if value is None
|
||||||
else field.to_python(value))
|
else field.to_python(value))
|
||||||
|
if field_name != field.db_field:
|
||||||
|
del data[field.db_field]
|
||||||
|
except (AttributeError, ValueError), e:
|
||||||
|
errors_dict[field_name] = e
|
||||||
elif field.default:
|
elif field.default:
|
||||||
default = field.default
|
default = field.default
|
||||||
if callable(default):
|
if callable(default):
|
||||||
@@ -966,7 +973,13 @@ class BaseDocument(object):
|
|||||||
if isinstance(default, BaseDocument):
|
if isinstance(default, BaseDocument):
|
||||||
changed_fields.append(field_name)
|
changed_fields.append(field_name)
|
||||||
|
|
||||||
|
if errors_dict:
|
||||||
|
errors = "\n".join(["%s - %s" % (k, v) for k, v in errors_dict.items()])
|
||||||
|
raise InvalidDocumentError("""
|
||||||
|
Invalid data to create a `%s` instance.\n%s""".strip() % (cls._class_name, errors))
|
||||||
|
|
||||||
obj = cls(**data)
|
obj = cls(**data)
|
||||||
|
|
||||||
obj._changed_fields = changed_fields
|
obj._changed_fields = changed_fields
|
||||||
obj._created = False
|
obj._created = False
|
||||||
return obj
|
return obj
|
||||||
@@ -1037,13 +1050,16 @@ class BaseDocument(object):
|
|||||||
for path in set_fields:
|
for path in set_fields:
|
||||||
parts = path.split('.')
|
parts = path.split('.')
|
||||||
d = doc
|
d = doc
|
||||||
|
new_path = []
|
||||||
for p in parts:
|
for p in parts:
|
||||||
if hasattr(d, '__getattr__'):
|
if isinstance(d, DBRef):
|
||||||
d = getattr(p, d)
|
break
|
||||||
elif p.isdigit():
|
elif p.isdigit():
|
||||||
d = d[int(p)]
|
d = d[int(p)]
|
||||||
else:
|
elif hasattr(d, 'get'):
|
||||||
d = d.get(p)
|
d = d.get(p)
|
||||||
|
new_path.append(p)
|
||||||
|
path = '.'.join(new_path)
|
||||||
set_data[path] = d
|
set_data[path] = d
|
||||||
else:
|
else:
|
||||||
set_data = doc
|
set_data = doc
|
||||||
@@ -1100,7 +1116,11 @@ class BaseDocument(object):
|
|||||||
inspected = inspected or []
|
inspected = inspected or []
|
||||||
geo_indices = []
|
geo_indices = []
|
||||||
inspected.append(cls)
|
inspected.append(cls)
|
||||||
|
|
||||||
|
from fields import EmbeddedDocumentField, GeoPointField
|
||||||
for field in cls._fields.values():
|
for field in cls._fields.values():
|
||||||
|
if not isinstance(field, (EmbeddedDocumentField, GeoPointField)):
|
||||||
|
continue
|
||||||
if hasattr(field, 'document_type'):
|
if hasattr(field, 'document_type'):
|
||||||
field_cls = field.document_type
|
field_cls = field.document_type
|
||||||
if field_cls in inspected:
|
if field_cls in inspected:
|
||||||
@@ -1205,15 +1225,15 @@ class BaseList(list):
|
|||||||
def __init__(self, list_items, instance, name):
|
def __init__(self, list_items, instance, name):
|
||||||
self._instance = instance
|
self._instance = instance
|
||||||
self._name = name
|
self._name = name
|
||||||
super(BaseList, self).__init__(list_items)
|
return super(BaseList, self).__init__(list_items)
|
||||||
|
|
||||||
def __setitem__(self, *args, **kwargs):
|
def __setitem__(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
super(BaseList, self).__setitem__(*args, **kwargs)
|
return super(BaseList, self).__setitem__(*args, **kwargs)
|
||||||
|
|
||||||
def __delitem__(self, *args, **kwargs):
|
def __delitem__(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
super(BaseList, self).__delitem__(*args, **kwargs)
|
return super(BaseList, self).__delitem__(*args, **kwargs)
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
self.observer = None
|
self.observer = None
|
||||||
@@ -1267,23 +1287,23 @@ class BaseDict(dict):
|
|||||||
def __init__(self, dict_items, instance, name):
|
def __init__(self, dict_items, instance, name):
|
||||||
self._instance = instance
|
self._instance = instance
|
||||||
self._name = name
|
self._name = name
|
||||||
super(BaseDict, self).__init__(dict_items)
|
return super(BaseDict, self).__init__(dict_items)
|
||||||
|
|
||||||
def __setitem__(self, *args, **kwargs):
|
def __setitem__(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
super(BaseDict, self).__setitem__(*args, **kwargs)
|
return super(BaseDict, self).__setitem__(*args, **kwargs)
|
||||||
|
|
||||||
def __delete__(self, *args, **kwargs):
|
def __delete__(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
super(BaseDict, self).__delete__(*args, **kwargs)
|
return super(BaseDict, self).__delete__(*args, **kwargs)
|
||||||
|
|
||||||
def __delitem__(self, *args, **kwargs):
|
def __delitem__(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
super(BaseDict, self).__delitem__(*args, **kwargs)
|
return super(BaseDict, self).__delitem__(*args, **kwargs)
|
||||||
|
|
||||||
def __delattr__(self, *args, **kwargs):
|
def __delattr__(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
super(BaseDict, self).__delattr__(*args, **kwargs)
|
return super(BaseDict, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
self.instance = None
|
self.instance = None
|
||||||
@@ -1296,19 +1316,19 @@ class BaseDict(dict):
|
|||||||
|
|
||||||
def clear(self, *args, **kwargs):
|
def clear(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
super(BaseDict, self).clear(*args, **kwargs)
|
return super(BaseDict, self).clear(*args, **kwargs)
|
||||||
|
|
||||||
def pop(self, *args, **kwargs):
|
def pop(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
super(BaseDict, self).pop(*args, **kwargs)
|
return super(BaseDict, self).pop(*args, **kwargs)
|
||||||
|
|
||||||
def popitem(self, *args, **kwargs):
|
def popitem(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
super(BaseDict, self).popitem(*args, **kwargs)
|
return super(BaseDict, self).popitem(*args, **kwargs)
|
||||||
|
|
||||||
def update(self, *args, **kwargs):
|
def update(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
super(BaseDict, self).update(*args, **kwargs)
|
return super(BaseDict, self).update(*args, **kwargs)
|
||||||
|
|
||||||
def _mark_as_changed(self):
|
def _mark_as_changed(self):
|
||||||
if hasattr(self._instance, '_mark_as_changed'):
|
if hasattr(self._instance, '_mark_as_changed'):
|
||||||
|
@@ -39,22 +39,7 @@ def register_connection(alias, name, host='localhost', port=27017,
|
|||||||
"""
|
"""
|
||||||
global _connection_settings
|
global _connection_settings
|
||||||
|
|
||||||
# Handle uri style connections
|
conn_settings = {
|
||||||
if "://" in host:
|
|
||||||
uri_dict = uri_parser.parse_uri(host)
|
|
||||||
if uri_dict.get('database') is None:
|
|
||||||
raise ConnectionError("If using URI style connection include "\
|
|
||||||
"database name in string")
|
|
||||||
_connection_settings[alias] = {
|
|
||||||
'host': host,
|
|
||||||
'name': uri_dict.get('database'),
|
|
||||||
'username': uri_dict.get('username'),
|
|
||||||
'password': uri_dict.get('password')
|
|
||||||
}
|
|
||||||
_connection_settings[alias].update(kwargs)
|
|
||||||
return
|
|
||||||
|
|
||||||
_connection_settings[alias] = {
|
|
||||||
'name': name,
|
'name': name,
|
||||||
'host': host,
|
'host': host,
|
||||||
'port': port,
|
'port': port,
|
||||||
@@ -64,7 +49,25 @@ def register_connection(alias, name, host='localhost', port=27017,
|
|||||||
'password': password,
|
'password': password,
|
||||||
'read_preference': read_preference
|
'read_preference': read_preference
|
||||||
}
|
}
|
||||||
_connection_settings[alias].update(kwargs)
|
|
||||||
|
# Handle uri style connections
|
||||||
|
if "://" in host:
|
||||||
|
uri_dict = uri_parser.parse_uri(host)
|
||||||
|
if uri_dict.get('database') is None:
|
||||||
|
raise ConnectionError("If using URI style connection include "\
|
||||||
|
"database name in string")
|
||||||
|
conn_settings.update({
|
||||||
|
'host': host,
|
||||||
|
'name': uri_dict.get('database'),
|
||||||
|
'username': uri_dict.get('username'),
|
||||||
|
'password': uri_dict.get('password'),
|
||||||
|
'read_preference': read_preference,
|
||||||
|
})
|
||||||
|
if "replicaSet" in host:
|
||||||
|
conn_settings['replicaSet'] = True
|
||||||
|
|
||||||
|
conn_settings.update(kwargs)
|
||||||
|
_connection_settings[alias] = conn_settings
|
||||||
|
|
||||||
|
|
||||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||||
@@ -86,7 +89,7 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
|||||||
|
|
||||||
if alias not in _connections:
|
if alias not in _connections:
|
||||||
if alias not in _connection_settings:
|
if alias not in _connection_settings:
|
||||||
msg = 'Connection with alias "%s" has not been defined'
|
msg = 'Connection with alias "%s" has not been defined' % alias
|
||||||
if alias == DEFAULT_CONNECTION_NAME:
|
if alias == DEFAULT_CONNECTION_NAME:
|
||||||
msg = 'You have not defined a default connection'
|
msg = 'You have not defined a default connection'
|
||||||
raise ConnectionError(msg)
|
raise ConnectionError(msg)
|
||||||
@@ -105,12 +108,18 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
|||||||
for slave_alias in conn_settings['slaves']:
|
for slave_alias in conn_settings['slaves']:
|
||||||
slaves.append(get_connection(slave_alias))
|
slaves.append(get_connection(slave_alias))
|
||||||
conn_settings['slaves'] = slaves
|
conn_settings['slaves'] = slaves
|
||||||
conn_settings.pop('read_preference')
|
conn_settings.pop('read_preference', None)
|
||||||
|
|
||||||
connection_class = Connection
|
connection_class = Connection
|
||||||
if 'replicaSet' in conn_settings:
|
if 'replicaSet' in conn_settings:
|
||||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||||
|
# Discard port since it can't be used on ReplicaSetConnection
|
||||||
|
conn_settings.pop('port', None)
|
||||||
|
# Discard replicaSet if not base string
|
||||||
|
if not isinstance(conn_settings['replicaSet'], basestring):
|
||||||
|
conn_settings.pop('replicaSet', None)
|
||||||
connection_class = ReplicaSetConnection
|
connection_class = ReplicaSetConnection
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_connections[alias] = connection_class(**conn_settings)
|
_connections[alias] = connection_class(**conn_settings)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
|
@@ -112,6 +112,10 @@ class DeReference(object):
|
|||||||
for ref in references:
|
for ref in references:
|
||||||
if '_cls' in ref:
|
if '_cls' in ref:
|
||||||
doc = get_document(ref["_cls"])._from_son(ref)
|
doc = get_document(ref["_cls"])._from_son(ref)
|
||||||
|
elif doc_type is None:
|
||||||
|
doc = get_document(
|
||||||
|
''.join(x.capitalize()
|
||||||
|
for x in col.split('_')))._from_son(ref)
|
||||||
else:
|
else:
|
||||||
doc = doc_type._from_son(ref)
|
doc = doc_type._from_son(ref)
|
||||||
object_map[doc.id] = doc
|
object_map[doc.id] = doc
|
||||||
@@ -162,7 +166,7 @@ class DeReference(object):
|
|||||||
else:
|
else:
|
||||||
data[k] = v
|
data[k] = v
|
||||||
|
|
||||||
if k in self.object_map:
|
if k in self.object_map and not is_list:
|
||||||
data[k] = self.object_map[k]
|
data[k] = self.object_map[k]
|
||||||
elif hasattr(v, '_fields'):
|
elif hasattr(v, '_fields'):
|
||||||
for field_name, field in v._fields.iteritems():
|
for field_name, field in v._fields.iteritems():
|
||||||
|
@@ -1,23 +1,39 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
|
||||||
from django.utils.encoding import smart_str
|
from django.utils.encoding import smart_str
|
||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
import datetime
|
try:
|
||||||
|
from django.contrib.auth.hashers import check_password, make_password
|
||||||
|
except ImportError:
|
||||||
|
"""Handle older versions of Django"""
|
||||||
|
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||||
|
|
||||||
|
def get_hexdigest(algorithm, salt, raw_password):
|
||||||
|
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||||
|
if algorithm == 'md5':
|
||||||
|
return md5_constructor(salt + raw_password).hexdigest()
|
||||||
|
elif algorithm == 'sha1':
|
||||||
|
return sha_constructor(salt + raw_password).hexdigest()
|
||||||
|
raise ValueError('Got unknown password algorithm type in password')
|
||||||
|
|
||||||
|
def check_password(raw_password, password):
|
||||||
|
algo, salt, hash = password.split('$')
|
||||||
|
return hash == get_hexdigest(algo, salt, raw_password)
|
||||||
|
|
||||||
|
def make_password(raw_password):
|
||||||
|
from random import random
|
||||||
|
algo = 'sha1'
|
||||||
|
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||||
|
hash = get_hexdigest(algo, salt, raw_password)
|
||||||
|
return '%s$%s$%s' % (algo, salt, hash)
|
||||||
|
|
||||||
|
|
||||||
REDIRECT_FIELD_NAME = 'next'
|
REDIRECT_FIELD_NAME = 'next'
|
||||||
|
|
||||||
def get_hexdigest(algorithm, salt, raw_password):
|
|
||||||
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
|
||||||
if algorithm == 'md5':
|
|
||||||
return md5_constructor(salt + raw_password).hexdigest()
|
|
||||||
elif algorithm == 'sha1':
|
|
||||||
return sha_constructor(salt + raw_password).hexdigest()
|
|
||||||
raise ValueError('Got unknown password algorithm type in password')
|
|
||||||
|
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
"""A User document that aims to mirror most of the API specified by Django
|
"""A User document that aims to mirror most of the API specified by Django
|
||||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||||
@@ -34,7 +50,7 @@ class User(Document):
|
|||||||
email = EmailField(verbose_name=_('e-mail address'))
|
email = EmailField(verbose_name=_('e-mail address'))
|
||||||
password = StringField(max_length=128,
|
password = StringField(max_length=128,
|
||||||
verbose_name=_('password'),
|
verbose_name=_('password'),
|
||||||
help_text=_("Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
||||||
is_staff = BooleanField(default=False,
|
is_staff = BooleanField(default=False,
|
||||||
verbose_name=_('staff status'),
|
verbose_name=_('staff status'),
|
||||||
help_text=_("Designates whether the user can log into this admin site."))
|
help_text=_("Designates whether the user can log into this admin site."))
|
||||||
@@ -50,6 +66,7 @@ class User(Document):
|
|||||||
verbose_name=_('date joined'))
|
verbose_name=_('date joined'))
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
'indexes': [
|
'indexes': [
|
||||||
{'fields': ['username'], 'unique': True}
|
{'fields': ['username'], 'unique': True}
|
||||||
]
|
]
|
||||||
@@ -75,11 +92,7 @@ class User(Document):
|
|||||||
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
||||||
password is hashed before storage.
|
password is hashed before storage.
|
||||||
"""
|
"""
|
||||||
from random import random
|
self.password = make_password(raw_password)
|
||||||
algo = 'sha1'
|
|
||||||
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
|
||||||
hash = get_hexdigest(algo, salt, raw_password)
|
|
||||||
self.password = '%s$%s$%s' % (algo, salt, hash)
|
|
||||||
self.save()
|
self.save()
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@@ -89,8 +102,7 @@ class User(Document):
|
|||||||
:attr:`~mongoengine.django.auth.User.password` as the password is
|
:attr:`~mongoengine.django.auth.User.password` as the password is
|
||||||
hashed before storage.
|
hashed before storage.
|
||||||
"""
|
"""
|
||||||
algo, salt, hash = self.password.split('$')
|
return check_password(raw_password, self.password)
|
||||||
return hash == get_hexdigest(algo, salt, raw_password)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_user(cls, username, password, email=None):
|
def create_user(cls, username, password, email=None):
|
||||||
|
@@ -1,3 +1,6 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
||||||
from django.core.exceptions import SuspiciousOperation
|
from django.core.exceptions import SuspiciousOperation
|
||||||
from django.utils.encoding import force_unicode
|
from django.utils.encoding import force_unicode
|
||||||
@@ -6,18 +9,18 @@ from mongoengine.document import Document
|
|||||||
from mongoengine import fields
|
from mongoengine import fields
|
||||||
from mongoengine.queryset import OperationError
|
from mongoengine.queryset import OperationError
|
||||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
||||||
from django.conf import settings
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
||||||
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
||||||
DEFAULT_CONNECTION_NAME)
|
DEFAULT_CONNECTION_NAME)
|
||||||
|
|
||||||
|
|
||||||
class MongoSession(Document):
|
class MongoSession(Document):
|
||||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||||
session_data = fields.StringField()
|
session_data = fields.StringField()
|
||||||
expire_date = fields.DateTimeField()
|
expire_date = fields.DateTimeField()
|
||||||
|
|
||||||
meta = {'collection': 'django_session',
|
meta = {'collection': 'django_session',
|
||||||
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
||||||
'allow_inheritance': False}
|
'allow_inheritance': False}
|
||||||
@@ -41,7 +44,7 @@ class SessionStore(SessionBase):
|
|||||||
|
|
||||||
def create(self):
|
def create(self):
|
||||||
while True:
|
while True:
|
||||||
self.session_key = self._get_new_session_key()
|
self._session_key = self._get_new_session_key()
|
||||||
try:
|
try:
|
||||||
self.save(must_create=True)
|
self.save(must_create=True)
|
||||||
except CreateError:
|
except CreateError:
|
||||||
@@ -51,6 +54,8 @@ class SessionStore(SessionBase):
|
|||||||
return
|
return
|
||||||
|
|
||||||
def save(self, must_create=False):
|
def save(self, must_create=False):
|
||||||
|
if self.session_key is None:
|
||||||
|
self._session_key = self._get_new_session_key()
|
||||||
s = MongoSession(session_key=self.session_key)
|
s = MongoSession(session_key=self.session_key)
|
||||||
s.session_data = self.encode(self._get_session(no_load=must_create))
|
s.session_data = self.encode(self._get_session(no_load=must_create))
|
||||||
s.expire_date = self.get_expiry_date()
|
s.expire_date = self.get_expiry_date()
|
||||||
|
@@ -10,7 +10,7 @@ class MongoTestCase(TestCase):
|
|||||||
"""
|
"""
|
||||||
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
||||||
def __init__(self, methodName='runtest'):
|
def __init__(self, methodName='runtest'):
|
||||||
self.db = connect(self.db_name)
|
self.db = connect(self.db_name).get_db()
|
||||||
super(MongoTestCase, self).__init__(methodName)
|
super(MongoTestCase, self).__init__(methodName)
|
||||||
|
|
||||||
def _post_teardown(self):
|
def _post_teardown(self):
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import pymongo
|
import pymongo
|
||||||
|
|
||||||
from bson.dbref import DBRef
|
from bson.dbref import DBRef
|
||||||
|
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
@@ -39,6 +40,11 @@ class EmbeddedDocument(BaseDocument):
|
|||||||
else:
|
else:
|
||||||
super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
|
super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, self.__class__):
|
||||||
|
return self._data == other._data
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class Document(BaseDocument):
|
class Document(BaseDocument):
|
||||||
"""The base class used for defining the structure and properties of
|
"""The base class used for defining the structure and properties of
|
||||||
@@ -74,8 +80,14 @@ class Document(BaseDocument):
|
|||||||
names. Index direction may be specified by prefixing the field names with
|
names. Index direction may be specified by prefixing the field names with
|
||||||
a **+** or **-** sign.
|
a **+** or **-** sign.
|
||||||
|
|
||||||
|
Automatic index creation can be disabled by specifying
|
||||||
|
attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
|
||||||
|
False then indexes will not be created by MongoEngine. This is useful in
|
||||||
|
production systems where index creation is performed as part of a deployment
|
||||||
|
system.
|
||||||
|
|
||||||
By default, _types will be added to the start of every index (that
|
By default, _types will be added to the start of every index (that
|
||||||
doesn't contain a list) if allow_inheritence is True. This can be
|
doesn't contain a list) if allow_inheritance is True. This can be
|
||||||
disabled by either setting types to False on the specific index or
|
disabled by either setting types to False on the specific index or
|
||||||
by setting index_types to False on the meta dictionary for the document.
|
by setting index_types to False on the meta dictionary for the document.
|
||||||
"""
|
"""
|
||||||
@@ -147,8 +159,9 @@ class Document(BaseDocument):
|
|||||||
:meth:`~pymongo.collection.Collection.save` OR
|
:meth:`~pymongo.collection.Collection.save` OR
|
||||||
:meth:`~pymongo.collection.Collection.insert`
|
:meth:`~pymongo.collection.Collection.insert`
|
||||||
which will be used as options for the resultant ``getLastError`` command.
|
which will be used as options for the resultant ``getLastError`` command.
|
||||||
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
|
For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will
|
||||||
have recorded the write and will force an fsync on each server being written to.
|
wait until at least two servers have recorded the write and will force an
|
||||||
|
fsync on each server being written to.
|
||||||
:param cascade: Sets the flag for cascading saves. You can set a default by setting
|
:param cascade: Sets the flag for cascading saves. You can set a default by setting
|
||||||
"cascade" in the document __meta__
|
"cascade" in the document __meta__
|
||||||
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
|
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
|
||||||
@@ -213,6 +226,7 @@ class Document(BaseDocument):
|
|||||||
if cascade_kwargs: # Allow granular control over cascades
|
if cascade_kwargs: # Allow granular control over cascades
|
||||||
kwargs.update(cascade_kwargs)
|
kwargs.update(cascade_kwargs)
|
||||||
kwargs['_refs'] = _refs
|
kwargs['_refs'] = _refs
|
||||||
|
#self._changed_fields = []
|
||||||
self.cascade_save(**kwargs)
|
self.cascade_save(**kwargs)
|
||||||
|
|
||||||
except pymongo.errors.OperationFailure, err:
|
except pymongo.errors.OperationFailure, err:
|
||||||
@@ -226,17 +240,24 @@ class Document(BaseDocument):
|
|||||||
self._changed_fields = []
|
self._changed_fields = []
|
||||||
self._created = False
|
self._created = False
|
||||||
signals.post_save.send(self.__class__, document=self, created=created)
|
signals.post_save.send(self.__class__, document=self, created=created)
|
||||||
|
return self
|
||||||
|
|
||||||
def cascade_save(self, *args, **kwargs):
|
def cascade_save(self, *args, **kwargs):
|
||||||
"""Recursively saves any references / generic references on an object"""
|
"""Recursively saves any references / generic references on an object"""
|
||||||
from fields import ReferenceField, GenericReferenceField
|
from fields import ReferenceField, GenericReferenceField
|
||||||
_refs = kwargs.get('_refs', []) or []
|
_refs = kwargs.get('_refs', []) or []
|
||||||
|
|
||||||
for name, cls in self._fields.items():
|
for name, cls in self._fields.items():
|
||||||
|
|
||||||
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
|
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ref = getattr(self, name)
|
ref = getattr(self, name)
|
||||||
if not ref:
|
if not ref:
|
||||||
continue
|
continue
|
||||||
|
if isinstance(ref, DBRef):
|
||||||
|
continue
|
||||||
|
|
||||||
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
||||||
if ref and ref_id not in _refs:
|
if ref and ref_id not in _refs:
|
||||||
_refs.append(ref_id)
|
_refs.append(ref_id)
|
||||||
@@ -351,7 +372,7 @@ class DynamicDocument(Document):
|
|||||||
way as an ordinary document but has expando style properties. Any data
|
way as an ordinary document but has expando style properties. Any data
|
||||||
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
||||||
not a field is automatically converted into a
|
not a field is automatically converted into a
|
||||||
:class:`~mongoengine.BaseDynamicField` and data can be attributed to that
|
:class:`~mongoengine.DynamicField` and data can be attributed to that
|
||||||
field.
|
field.
|
||||||
|
|
||||||
..note::
|
..note::
|
||||||
|
@@ -8,7 +8,7 @@ import uuid
|
|||||||
from bson import Binary, DBRef, SON, ObjectId
|
from bson import Binary, DBRef, SON, ObjectId
|
||||||
|
|
||||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||||
ValidationError, get_document)
|
ValidationError, get_document, BaseDocument)
|
||||||
from queryset import DO_NOTHING, QuerySet
|
from queryset import DO_NOTHING, QuerySet
|
||||||
from document import Document, EmbeddedDocument
|
from document import Document, EmbeddedDocument
|
||||||
from connection import get_db, DEFAULT_CONNECTION_NAME
|
from connection import get_db, DEFAULT_CONNECTION_NAME
|
||||||
@@ -30,7 +30,7 @@ except ImportError:
|
|||||||
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
||||||
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
||||||
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
|
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
|
||||||
'DecimalField', 'ComplexDateTimeField', 'URLField',
|
'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField',
|
||||||
'GenericReferenceField', 'FileField', 'BinaryField',
|
'GenericReferenceField', 'FileField', 'BinaryField',
|
||||||
'SortedListField', 'EmailField', 'GeoPointField', 'ImageField',
|
'SortedListField', 'EmailField', 'GeoPointField', 'ImageField',
|
||||||
'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField']
|
'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField']
|
||||||
@@ -49,10 +49,13 @@ class StringField(BaseField):
|
|||||||
super(StringField, self).__init__(**kwargs)
|
super(StringField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return unicode(value)
|
if isinstance(value, unicode):
|
||||||
|
return value
|
||||||
|
else:
|
||||||
|
return value.decode('utf-8')
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, (str, unicode)):
|
if not isinstance(value, basestring):
|
||||||
self.error('StringField only accepts string values')
|
self.error('StringField only accepts string values')
|
||||||
|
|
||||||
if self.max_length is not None and len(value) > self.max_length:
|
if self.max_length is not None and len(value) > self.max_length:
|
||||||
@@ -164,6 +167,9 @@ class IntField(BaseField):
|
|||||||
self.error('Integer value is too large')
|
self.error('Integer value is too large')
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
return int(value)
|
return int(value)
|
||||||
|
|
||||||
|
|
||||||
@@ -182,7 +188,7 @@ class FloatField(BaseField):
|
|||||||
if isinstance(value, int):
|
if isinstance(value, int):
|
||||||
value = float(value)
|
value = float(value)
|
||||||
if not isinstance(value, float):
|
if not isinstance(value, float):
|
||||||
self.error('FoatField only accepts float values')
|
self.error('FloatField only accepts float values')
|
||||||
|
|
||||||
if self.min_value is not None and value < self.min_value:
|
if self.min_value is not None and value < self.min_value:
|
||||||
self.error('Float value is too small')
|
self.error('Float value is too small')
|
||||||
@@ -191,6 +197,9 @@ class FloatField(BaseField):
|
|||||||
self.error('Float value is too large')
|
self.error('Float value is too large')
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
return float(value)
|
return float(value)
|
||||||
|
|
||||||
|
|
||||||
@@ -369,7 +378,7 @@ class ComplexDateTimeField(StringField):
|
|||||||
return self._convert_from_string(data)
|
return self._convert_from_string(data)
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
value = self._convert_from_datetime(value)
|
value = self._convert_from_datetime(value) if value else value
|
||||||
return super(ComplexDateTimeField, self).__set__(instance, value)
|
return super(ComplexDateTimeField, self).__set__(instance, value)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
@@ -441,6 +450,9 @@ class GenericEmbeddedDocumentField(BaseField):
|
|||||||
:class:`~mongoengine.EmbeddedDocument` to be stored.
|
:class:`~mongoengine.EmbeddedDocument` to be stored.
|
||||||
|
|
||||||
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||||
|
|
||||||
|
..note :: You can use the choices param to limit the acceptable
|
||||||
|
EmbeddedDocument types
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
@@ -470,6 +482,50 @@ class GenericEmbeddedDocumentField(BaseField):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicField(BaseField):
|
||||||
|
"""A tryly dynamic field type capable of handling different and varying
|
||||||
|
types of data.
|
||||||
|
|
||||||
|
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
"""Convert a Python type to a MongoDBcompatible type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, 'to_mongo'):
|
||||||
|
return value.to_mongo()
|
||||||
|
|
||||||
|
if not isinstance(value, (dict, list, tuple)):
|
||||||
|
return value
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
is_list = True
|
||||||
|
value = dict([(k, v) for k, v in enumerate(value)])
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
for k, v in value.items():
|
||||||
|
data[k] = self.to_mongo(v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
value = [v for k, v in sorted(data.items(), key=itemgetter(0))]
|
||||||
|
else:
|
||||||
|
value = data
|
||||||
|
return value
|
||||||
|
|
||||||
|
def lookup_member(self, member_name):
|
||||||
|
return member_name
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
from mongoengine.fields import StringField
|
||||||
|
return StringField().prepare_query_value(op, value)
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
|
||||||
class ListField(ComplexBaseField):
|
class ListField(ComplexBaseField):
|
||||||
"""A list field that wraps a standard field, allowing multiple instances
|
"""A list field that wraps a standard field, allowing multiple instances
|
||||||
of the field to be used as a list in the database.
|
of the field to be used as a list in the database.
|
||||||
@@ -497,6 +553,7 @@ class ListField(ComplexBaseField):
|
|||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
if self.field:
|
if self.field:
|
||||||
if op in ('set', 'unset') and (not isinstance(value, basestring)
|
if op in ('set', 'unset') and (not isinstance(value, basestring)
|
||||||
|
and not isinstance(value, BaseDocument)
|
||||||
and hasattr(value, '__iter__')):
|
and hasattr(value, '__iter__')):
|
||||||
return [self.field.prepare_query_value(op, v) for v in value]
|
return [self.field.prepare_query_value(op, v) for v in value]
|
||||||
return self.field.prepare_query_value(op, value)
|
return self.field.prepare_query_value(op, value)
|
||||||
@@ -611,6 +668,18 @@ class ReferenceField(BaseField):
|
|||||||
* NULLIFY - Updates the reference to null.
|
* NULLIFY - Updates the reference to null.
|
||||||
* CASCADE - Deletes the documents associated with the reference.
|
* CASCADE - Deletes the documents associated with the reference.
|
||||||
* DENY - Prevent the deletion of the reference object.
|
* DENY - Prevent the deletion of the reference object.
|
||||||
|
* PULL - Pull the reference from a :class:`~mongoengine.ListField` of references
|
||||||
|
|
||||||
|
Alternative syntax for registering delete rules (useful when implementing
|
||||||
|
bi-directional delete rules)
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Bar(Document):
|
||||||
|
content = StringField()
|
||||||
|
foo = ReferenceField('Foo')
|
||||||
|
|
||||||
|
Bar.register_delete_rule(Foo, 'bar', NULLIFY)
|
||||||
|
|
||||||
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
||||||
"""
|
"""
|
||||||
@@ -656,6 +725,9 @@ class ReferenceField(BaseField):
|
|||||||
return super(ReferenceField, self).__get__(instance, owner)
|
return super(ReferenceField, self).__get__(instance, owner)
|
||||||
|
|
||||||
def to_mongo(self, document):
|
def to_mongo(self, document):
|
||||||
|
if isinstance(document, DBRef):
|
||||||
|
return document
|
||||||
|
|
||||||
id_field_name = self.document_type._meta['id_field']
|
id_field_name = self.document_type._meta['id_field']
|
||||||
id_field = self.document_type._fields[id_field_name]
|
id_field = self.document_type._fields[id_field_name]
|
||||||
|
|
||||||
@@ -697,6 +769,8 @@ class GenericReferenceField(BaseField):
|
|||||||
..note :: Any documents used as a generic reference must be registered in the
|
..note :: Any documents used as a generic reference must be registered in the
|
||||||
document registry. Importing the model will automatically register it.
|
document registry. Importing the model will automatically register it.
|
||||||
|
|
||||||
|
..note :: You can use the choices param to limit the acceptable Document types
|
||||||
|
|
||||||
.. versionadded:: 0.3
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -731,6 +805,9 @@ class GenericReferenceField(BaseField):
|
|||||||
if document is None:
|
if document is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
if isinstance(document, (dict, SON)):
|
||||||
|
return document
|
||||||
|
|
||||||
id_field_name = document.__class__._meta['id_field']
|
id_field_name = document.__class__._meta['id_field']
|
||||||
id_field = document.__class__._fields[id_field_name]
|
id_field = document.__class__._fields[id_field_name]
|
||||||
|
|
||||||
@@ -767,11 +844,10 @@ class BinaryField(BaseField):
|
|||||||
return Binary(value)
|
return Binary(value)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
# Returns str not unicode as this is binary data
|
return "%s" % value
|
||||||
return str(value)
|
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, str):
|
if not isinstance(value, basestring):
|
||||||
self.error('BinaryField only accepts string values')
|
self.error('BinaryField only accepts string values')
|
||||||
|
|
||||||
if self.max_bytes is not None and len(value) > self.max_bytes:
|
if self.max_bytes is not None and len(value) > self.max_bytes:
|
||||||
@@ -825,6 +901,13 @@ class GridFSProxy(object):
|
|||||||
self_dict['_fs'] = None
|
self_dict['_fs'] = None
|
||||||
return self_dict
|
return self_dict
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
|
||||||
|
|
||||||
|
def __cmp__(self, other):
|
||||||
|
return cmp((self.grid_id, self.collection_name, self.db_alias),
|
||||||
|
(other.grid_id, other.collection_name, other.db_alias))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fs(self):
|
def fs(self):
|
||||||
if not self._fs:
|
if not self._fs:
|
||||||
@@ -871,10 +954,14 @@ class GridFSProxy(object):
|
|||||||
self.newfile.writelines(lines)
|
self.newfile.writelines(lines)
|
||||||
|
|
||||||
def read(self, size=-1):
|
def read(self, size=-1):
|
||||||
try:
|
gridout = self.get()
|
||||||
return self.get().read(size)
|
if gridout is None:
|
||||||
except:
|
|
||||||
return None
|
return None
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
return gridout.read(size)
|
||||||
|
except:
|
||||||
|
return ""
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
# Delete file from GridFS, FileField still remains
|
# Delete file from GridFS, FileField still remains
|
||||||
@@ -919,19 +1006,20 @@ class FileField(BaseField):
|
|||||||
|
|
||||||
# Check if a file already exists for this model
|
# Check if a file already exists for this model
|
||||||
grid_file = instance._data.get(self.name)
|
grid_file = instance._data.get(self.name)
|
||||||
self.grid_file = grid_file
|
if not isinstance(grid_file, self.proxy_class):
|
||||||
if isinstance(self.grid_file, self.proxy_class):
|
grid_file = self.proxy_class(key=self.name, instance=instance,
|
||||||
if not self.grid_file.key:
|
db_alias=self.db_alias,
|
||||||
self.grid_file.key = self.name
|
collection_name=self.collection_name)
|
||||||
self.grid_file.instance = instance
|
instance._data[self.name] = grid_file
|
||||||
return self.grid_file
|
|
||||||
return self.proxy_class(key=self.name, instance=instance,
|
if not grid_file.key:
|
||||||
db_alias=self.db_alias,
|
grid_file.key = self.name
|
||||||
collection_name=self.collection_name)
|
grid_file.instance = instance
|
||||||
|
return grid_file
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
key = self.name
|
key = self.name
|
||||||
if isinstance(value, file) or isinstance(value, str):
|
if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, basestring):
|
||||||
# using "FileField() = file/string" notation
|
# using "FileField() = file/string" notation
|
||||||
grid_file = instance._data.get(self.name)
|
grid_file = instance._data.get(self.name)
|
||||||
# If a file already exists, delete it
|
# If a file already exists, delete it
|
||||||
|
@@ -4,13 +4,15 @@ import copy
|
|||||||
import itertools
|
import itertools
|
||||||
import operator
|
import operator
|
||||||
|
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
import pymongo
|
import pymongo
|
||||||
from bson.code import Code
|
from bson.code import Code
|
||||||
|
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
|
|
||||||
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
|
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
|
||||||
'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY']
|
'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL']
|
||||||
|
|
||||||
|
|
||||||
# The maximum number of items to display in a QuerySet.__repr__
|
# The maximum number of items to display in a QuerySet.__repr__
|
||||||
@@ -21,6 +23,7 @@ DO_NOTHING = 0
|
|||||||
NULLIFY = 1
|
NULLIFY = 1
|
||||||
CASCADE = 2
|
CASCADE = 2
|
||||||
DENY = 3
|
DENY = 3
|
||||||
|
PULL = 4
|
||||||
|
|
||||||
|
|
||||||
class DoesNotExist(Exception):
|
class DoesNotExist(Exception):
|
||||||
@@ -340,6 +343,7 @@ class QuerySet(object):
|
|||||||
self._timeout = True
|
self._timeout = True
|
||||||
self._class_check = True
|
self._class_check = True
|
||||||
self._slave_okay = False
|
self._slave_okay = False
|
||||||
|
self._iter = False
|
||||||
self._scalar = []
|
self._scalar = []
|
||||||
|
|
||||||
# If inheritance is allowed, only return instances and instances of
|
# If inheritance is allowed, only return instances and instances of
|
||||||
@@ -394,61 +398,6 @@ class QuerySet(object):
|
|||||||
unique=index_spec.get('unique', False))
|
unique=index_spec.get('unique', False))
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _build_index_spec(cls, doc_cls, spec):
|
|
||||||
"""Build a PyMongo index spec from a MongoEngine index spec.
|
|
||||||
"""
|
|
||||||
if isinstance(spec, basestring):
|
|
||||||
spec = {'fields': [spec]}
|
|
||||||
if isinstance(spec, (list, tuple)):
|
|
||||||
spec = {'fields': spec}
|
|
||||||
|
|
||||||
index_list = []
|
|
||||||
use_types = doc_cls._meta.get('allow_inheritance', True)
|
|
||||||
for key in spec['fields']:
|
|
||||||
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
|
|
||||||
direction = pymongo.ASCENDING
|
|
||||||
if key.startswith("-"):
|
|
||||||
direction = pymongo.DESCENDING
|
|
||||||
elif key.startswith("*"):
|
|
||||||
direction = pymongo.GEO2D
|
|
||||||
if key.startswith(("+", "-", "*")):
|
|
||||||
key = key[1:]
|
|
||||||
|
|
||||||
# Use real field name, do it manually because we need field
|
|
||||||
# objects for the next part (list field checking)
|
|
||||||
parts = key.split('.')
|
|
||||||
fields = QuerySet._lookup_field(doc_cls, parts)
|
|
||||||
parts = [field.db_field for field in fields]
|
|
||||||
key = '.'.join(parts)
|
|
||||||
index_list.append((key, direction))
|
|
||||||
|
|
||||||
# Check if a list field is being used, don't use _types if it is
|
|
||||||
if use_types and not all(f._index_with_types for f in fields):
|
|
||||||
use_types = False
|
|
||||||
|
|
||||||
# If _types is being used, prepend it to every specified index
|
|
||||||
index_types = doc_cls._meta.get('index_types', True)
|
|
||||||
allow_inheritance = doc_cls._meta.get('allow_inheritance')
|
|
||||||
if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D:
|
|
||||||
index_list.insert(0, ('_types', 1))
|
|
||||||
|
|
||||||
spec['fields'] = index_list
|
|
||||||
|
|
||||||
if spec.get('sparse', False) and len(spec['fields']) > 1:
|
|
||||||
raise ValueError(
|
|
||||||
'Sparse indexes can only have one field in them. '
|
|
||||||
'See https://jira.mongodb.org/browse/SERVER-2193')
|
|
||||||
|
|
||||||
return spec
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _reset_already_indexed(cls, document=None):
|
|
||||||
"""Helper to reset already indexed, can be useful for testing purposes"""
|
|
||||||
if document:
|
|
||||||
cls.__already_indexed.discard(document)
|
|
||||||
cls.__already_indexed.clear()
|
|
||||||
|
|
||||||
def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query):
|
def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query):
|
||||||
"""Filter the selected documents by calling the
|
"""Filter the selected documents by calling the
|
||||||
:class:`~mongoengine.queryset.QuerySet` with a query.
|
:class:`~mongoengine.queryset.QuerySet` with a query.
|
||||||
@@ -481,13 +430,128 @@ class QuerySet(object):
|
|||||||
"""Returns all documents."""
|
"""Returns all documents."""
|
||||||
return self.__call__()
|
return self.__call__()
|
||||||
|
|
||||||
|
def _ensure_indexes(self):
|
||||||
|
"""Checks the document meta data and ensures all the indexes exist.
|
||||||
|
|
||||||
|
.. note:: You can disable automatic index creation by setting
|
||||||
|
`auto_create_index` to False in the documents meta data
|
||||||
|
"""
|
||||||
|
background = self._document._meta.get('index_background', False)
|
||||||
|
drop_dups = self._document._meta.get('index_drop_dups', False)
|
||||||
|
index_opts = self._document._meta.get('index_opts', {})
|
||||||
|
index_types = self._document._meta.get('index_types', True)
|
||||||
|
|
||||||
|
# determine if an index which we are creating includes
|
||||||
|
# _type as its first field; if so, we can avoid creating
|
||||||
|
# an extra index on _type, as mongodb will use the existing
|
||||||
|
# index to service queries against _type
|
||||||
|
types_indexed = False
|
||||||
|
def includes_types(fields):
|
||||||
|
first_field = None
|
||||||
|
if len(fields):
|
||||||
|
if isinstance(fields[0], basestring):
|
||||||
|
first_field = fields[0]
|
||||||
|
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||||
|
first_field = fields[0][0]
|
||||||
|
return first_field == '_types'
|
||||||
|
|
||||||
|
# Ensure indexes created by uniqueness constraints
|
||||||
|
for index in self._document._meta['unique_indexes']:
|
||||||
|
types_indexed = types_indexed or includes_types(index)
|
||||||
|
self._collection.ensure_index(index, unique=True,
|
||||||
|
background=background, drop_dups=drop_dups, **index_opts)
|
||||||
|
|
||||||
|
# Ensure document-defined indexes are created
|
||||||
|
if self._document._meta['indexes']:
|
||||||
|
for spec in self._document._meta['indexes']:
|
||||||
|
types_indexed = types_indexed or includes_types(spec['fields'])
|
||||||
|
opts = index_opts.copy()
|
||||||
|
opts['unique'] = spec.get('unique', False)
|
||||||
|
opts['sparse'] = spec.get('sparse', False)
|
||||||
|
self._collection.ensure_index(spec['fields'],
|
||||||
|
background=background, **opts)
|
||||||
|
|
||||||
|
# If _types is being used (for polymorphism), it needs an index,
|
||||||
|
# only if another index doesn't begin with _types
|
||||||
|
if index_types and '_types' in self._query and not types_indexed:
|
||||||
|
self._collection.ensure_index('_types',
|
||||||
|
background=background, **index_opts)
|
||||||
|
|
||||||
|
# Add geo indicies
|
||||||
|
for field in self._document._geo_indices():
|
||||||
|
index_spec = [(field.db_field, pymongo.GEO2D)]
|
||||||
|
self._collection.ensure_index(index_spec,
|
||||||
|
background=background, **index_opts)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _build_index_spec(cls, doc_cls, spec):
|
||||||
|
"""Build a PyMongo index spec from a MongoEngine index spec.
|
||||||
|
"""
|
||||||
|
if isinstance(spec, basestring):
|
||||||
|
spec = {'fields': [spec]}
|
||||||
|
if isinstance(spec, (list, tuple)):
|
||||||
|
spec = {'fields': spec}
|
||||||
|
|
||||||
|
index_list = []
|
||||||
|
direction = None
|
||||||
|
use_types = doc_cls._meta.get('allow_inheritance', True)
|
||||||
|
for key in spec['fields']:
|
||||||
|
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
|
||||||
|
direction = pymongo.ASCENDING
|
||||||
|
if key.startswith("-"):
|
||||||
|
direction = pymongo.DESCENDING
|
||||||
|
elif key.startswith("*"):
|
||||||
|
direction = pymongo.GEO2D
|
||||||
|
if key.startswith(("+", "-", "*")):
|
||||||
|
key = key[1:]
|
||||||
|
|
||||||
|
# Use real field name, do it manually because we need field
|
||||||
|
# objects for the next part (list field checking)
|
||||||
|
parts = key.split('.')
|
||||||
|
if parts in (['pk'], ['id'], ['_id']):
|
||||||
|
key = '_id'
|
||||||
|
else:
|
||||||
|
fields = QuerySet._lookup_field(doc_cls, parts)
|
||||||
|
parts = [field if field == '_id' else field.db_field for field in fields]
|
||||||
|
key = '.'.join(parts)
|
||||||
|
index_list.append((key, direction))
|
||||||
|
|
||||||
|
# If sparse - dont include types
|
||||||
|
if spec.get('sparse', False):
|
||||||
|
use_types = False
|
||||||
|
|
||||||
|
# Check if a list field is being used, don't use _types if it is
|
||||||
|
if use_types and not all(f._index_with_types for f in fields):
|
||||||
|
use_types = False
|
||||||
|
|
||||||
|
# If _types is being used, prepend it to every specified index
|
||||||
|
index_types = doc_cls._meta.get('index_types', True)
|
||||||
|
allow_inheritance = doc_cls._meta.get('allow_inheritance')
|
||||||
|
if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D:
|
||||||
|
index_list.insert(0, ('_types', 1))
|
||||||
|
|
||||||
|
spec['fields'] = index_list
|
||||||
|
if spec.get('sparse', False) and len(spec['fields']) > 1:
|
||||||
|
raise ValueError(
|
||||||
|
'Sparse indexes can only have one field in them. '
|
||||||
|
'See https://jira.mongodb.org/browse/SERVER-2193')
|
||||||
|
|
||||||
|
return spec
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _reset_already_indexed(cls, document=None):
|
||||||
|
"""Helper to reset already indexed, can be useful for testing purposes"""
|
||||||
|
if document:
|
||||||
|
cls.__already_indexed.discard(document)
|
||||||
|
cls.__already_indexed.clear()
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _collection(self):
|
def _collection(self):
|
||||||
"""Property that returns the collection object. This allows us to
|
"""Property that returns the collection object. This allows us to
|
||||||
perform operations only if the collection is accessed.
|
perform operations only if the collection is accessed.
|
||||||
"""
|
"""
|
||||||
if self._document not in QuerySet.__already_indexed:
|
if self._document not in QuerySet.__already_indexed:
|
||||||
|
|
||||||
# Ensure collection exists
|
# Ensure collection exists
|
||||||
db = self._document._get_db()
|
db = self._document._get_db()
|
||||||
if self._collection_obj.name not in db.collection_names():
|
if self._collection_obj.name not in db.collection_names():
|
||||||
@@ -496,52 +560,8 @@ class QuerySet(object):
|
|||||||
|
|
||||||
QuerySet.__already_indexed.add(self._document)
|
QuerySet.__already_indexed.add(self._document)
|
||||||
|
|
||||||
background = self._document._meta.get('index_background', False)
|
if self._document._meta.get('auto_create_index', True):
|
||||||
drop_dups = self._document._meta.get('index_drop_dups', False)
|
self._ensure_indexes()
|
||||||
index_opts = self._document._meta.get('index_options', {})
|
|
||||||
index_types = self._document._meta.get('index_types', True)
|
|
||||||
|
|
||||||
# determine if an index which we are creating includes
|
|
||||||
# _type as its first field; if so, we can avoid creating
|
|
||||||
# an extra index on _type, as mongodb will use the existing
|
|
||||||
# index to service queries against _type
|
|
||||||
types_indexed = False
|
|
||||||
def includes_types(fields):
|
|
||||||
first_field = None
|
|
||||||
if len(fields):
|
|
||||||
if isinstance(fields[0], basestring):
|
|
||||||
first_field = fields[0]
|
|
||||||
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
|
||||||
first_field = fields[0][0]
|
|
||||||
return first_field == '_types'
|
|
||||||
|
|
||||||
# Ensure indexes created by uniqueness constraints
|
|
||||||
for index in self._document._meta['unique_indexes']:
|
|
||||||
types_indexed = types_indexed or includes_types(index)
|
|
||||||
self._collection.ensure_index(index, unique=True,
|
|
||||||
background=background, drop_dups=drop_dups, **index_opts)
|
|
||||||
|
|
||||||
# Ensure document-defined indexes are created
|
|
||||||
if self._document._meta['indexes']:
|
|
||||||
for spec in self._document._meta['indexes']:
|
|
||||||
types_indexed = types_indexed or includes_types(spec['fields'])
|
|
||||||
opts = index_opts.copy()
|
|
||||||
opts['unique'] = spec.get('unique', False)
|
|
||||||
opts['sparse'] = spec.get('sparse', False)
|
|
||||||
self._collection.ensure_index(spec['fields'],
|
|
||||||
background=background, **opts)
|
|
||||||
|
|
||||||
# If _types is being used (for polymorphism), it needs an index,
|
|
||||||
# only if another index doesn't begin with _types
|
|
||||||
if index_types and '_types' in self._query and not types_indexed:
|
|
||||||
self._collection.ensure_index('_types',
|
|
||||||
background=background, **index_opts)
|
|
||||||
|
|
||||||
# Add geo indicies
|
|
||||||
for field in self._document._geo_indices():
|
|
||||||
index_spec = [(field.db_field, pymongo.GEO2D)]
|
|
||||||
self._collection.ensure_index(index_spec,
|
|
||||||
background=background, **index_opts)
|
|
||||||
|
|
||||||
return self._collection_obj
|
return self._collection_obj
|
||||||
|
|
||||||
@@ -603,6 +623,7 @@ class QuerySet(object):
|
|||||||
"Can't use index on unsubscriptable field (%s)" % err)
|
"Can't use index on unsubscriptable field (%s)" % err)
|
||||||
fields.append(field_name)
|
fields.append(field_name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if field is None:
|
if field is None:
|
||||||
# Look up first field from the document
|
# Look up first field from the document
|
||||||
if field_name == 'pk':
|
if field_name == 'pk':
|
||||||
@@ -611,8 +632,8 @@ class QuerySet(object):
|
|||||||
if field_name in document._fields:
|
if field_name in document._fields:
|
||||||
field = document._fields[field_name]
|
field = document._fields[field_name]
|
||||||
elif document._dynamic:
|
elif document._dynamic:
|
||||||
from base import BaseDynamicField
|
from fields import DynamicField
|
||||||
field = BaseDynamicField(db_field=field_name)
|
field = DynamicField(db_field=field_name)
|
||||||
else:
|
else:
|
||||||
raise InvalidQueryError('Cannot resolve field "%s"'
|
raise InvalidQueryError('Cannot resolve field "%s"'
|
||||||
% field_name)
|
% field_name)
|
||||||
@@ -620,8 +641,11 @@ class QuerySet(object):
|
|||||||
from mongoengine.fields import ReferenceField, GenericReferenceField
|
from mongoengine.fields import ReferenceField, GenericReferenceField
|
||||||
if isinstance(field, (ReferenceField, GenericReferenceField)):
|
if isinstance(field, (ReferenceField, GenericReferenceField)):
|
||||||
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
|
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
|
||||||
# Look up subfield on the previous field
|
if getattr(field, 'field', None):
|
||||||
new_field = field.lookup_member(field_name)
|
new_field = field.field.lookup_member(field_name)
|
||||||
|
else:
|
||||||
|
# Look up subfield on the previous field
|
||||||
|
new_field = field.lookup_member(field_name)
|
||||||
from base import ComplexBaseField
|
from base import ComplexBaseField
|
||||||
if not new_field and isinstance(field, ComplexBaseField):
|
if not new_field and isinstance(field, ComplexBaseField):
|
||||||
fields.append(field_name)
|
fields.append(field_name)
|
||||||
@@ -680,7 +704,7 @@ class QuerySet(object):
|
|||||||
cleaned_fields = []
|
cleaned_fields = []
|
||||||
for field in fields:
|
for field in fields:
|
||||||
append_field = True
|
append_field = True
|
||||||
if isinstance(field, str):
|
if isinstance(field, basestring):
|
||||||
parts.append(field)
|
parts.append(field)
|
||||||
append_field = False
|
append_field = False
|
||||||
else:
|
else:
|
||||||
@@ -781,15 +805,19 @@ class QuerySet(object):
|
|||||||
dictionary of default values for the new document may be provided as a
|
dictionary of default values for the new document may be provided as a
|
||||||
keyword argument called :attr:`defaults`.
|
keyword argument called :attr:`defaults`.
|
||||||
|
|
||||||
|
.. note:: This requires two separate operations and therefore a
|
||||||
|
race condition exists. Because there are no transactions in mongoDB
|
||||||
|
other approaches should be investigated, to ensure you don't
|
||||||
|
accidently duplicate data when using this method.
|
||||||
|
|
||||||
:param write_options: optional extra keyword arguments used if we
|
:param write_options: optional extra keyword arguments used if we
|
||||||
have to create a new document.
|
have to create a new document.
|
||||||
Passes any write_options onto :meth:`~mongoengine.Document.save`
|
Passes any write_options onto :meth:`~mongoengine.Document.save`
|
||||||
|
|
||||||
.. versionadded:: 0.3
|
|
||||||
|
|
||||||
:param auto_save: if the object is to be saved automatically if not found.
|
:param auto_save: if the object is to be saved automatically if not found.
|
||||||
|
|
||||||
.. versionadded:: 0.6
|
.. versionadded:: 0.3
|
||||||
|
.. versionupdated:: 0.6 - added `auto_save`
|
||||||
"""
|
"""
|
||||||
defaults = query.get('defaults', {})
|
defaults = query.get('defaults', {})
|
||||||
if 'defaults' in query:
|
if 'defaults' in query:
|
||||||
@@ -824,11 +852,21 @@ class QuerySet(object):
|
|||||||
result = None
|
result = None
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def insert(self, doc_or_docs, load_bulk=True):
|
def insert(self, doc_or_docs, load_bulk=True, safe=False, write_options=None):
|
||||||
"""bulk insert documents
|
"""bulk insert documents
|
||||||
|
|
||||||
|
If ``safe=True`` and the operation is unsuccessful, an
|
||||||
|
:class:`~mongoengine.OperationError` will be raised.
|
||||||
|
|
||||||
:param docs_or_doc: a document or list of documents to be inserted
|
:param docs_or_doc: a document or list of documents to be inserted
|
||||||
:param load_bulk (optional): If True returns the list of document instances
|
:param load_bulk (optional): If True returns the list of document instances
|
||||||
|
:param safe: check if the operation succeeded before returning
|
||||||
|
:param write_options: Extra keyword arguments are passed down to
|
||||||
|
:meth:`~pymongo.collection.Collection.insert`
|
||||||
|
which will be used as options for the resultant ``getLastError`` command.
|
||||||
|
For example, ``insert(..., {w: 2, fsync: True})`` will wait until at least two
|
||||||
|
servers have recorded the write and will force an fsync on each server being
|
||||||
|
written to.
|
||||||
|
|
||||||
By default returns document instances, set ``load_bulk`` to False to
|
By default returns document instances, set ``load_bulk`` to False to
|
||||||
return just ``ObjectIds``
|
return just ``ObjectIds``
|
||||||
@@ -837,6 +875,10 @@ class QuerySet(object):
|
|||||||
"""
|
"""
|
||||||
from document import Document
|
from document import Document
|
||||||
|
|
||||||
|
if not write_options:
|
||||||
|
write_options = {}
|
||||||
|
write_options.update({'safe': safe})
|
||||||
|
|
||||||
docs = doc_or_docs
|
docs = doc_or_docs
|
||||||
return_one = False
|
return_one = False
|
||||||
if isinstance(docs, Document) or issubclass(docs.__class__, Document):
|
if isinstance(docs, Document) or issubclass(docs.__class__, Document):
|
||||||
@@ -854,7 +896,13 @@ class QuerySet(object):
|
|||||||
raw.append(doc.to_mongo())
|
raw.append(doc.to_mongo())
|
||||||
|
|
||||||
signals.pre_bulk_insert.send(self._document, documents=docs)
|
signals.pre_bulk_insert.send(self._document, documents=docs)
|
||||||
ids = self._collection.insert(raw)
|
try:
|
||||||
|
ids = self._collection.insert(raw, **write_options)
|
||||||
|
except pymongo.errors.OperationFailure, err:
|
||||||
|
message = 'Could not save document (%s)'
|
||||||
|
if u'duplicate key' in unicode(err):
|
||||||
|
message = u'Tried to save duplicate unique keys (%s)'
|
||||||
|
raise OperationError(message % unicode(err))
|
||||||
|
|
||||||
if not load_bulk:
|
if not load_bulk:
|
||||||
signals.post_bulk_insert.send(
|
signals.post_bulk_insert.send(
|
||||||
@@ -907,6 +955,7 @@ class QuerySet(object):
|
|||||||
def next(self):
|
def next(self):
|
||||||
"""Wrap the result in a :class:`~mongoengine.Document` object.
|
"""Wrap the result in a :class:`~mongoengine.Document` object.
|
||||||
"""
|
"""
|
||||||
|
self._iter = True
|
||||||
try:
|
try:
|
||||||
if self._limit == 0:
|
if self._limit == 0:
|
||||||
raise StopIteration
|
raise StopIteration
|
||||||
@@ -923,6 +972,7 @@ class QuerySet(object):
|
|||||||
|
|
||||||
.. versionadded:: 0.3
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
|
self._iter = False
|
||||||
self._cursor.rewind()
|
self._cursor.rewind()
|
||||||
|
|
||||||
def count(self):
|
def count(self):
|
||||||
@@ -1273,11 +1323,17 @@ class QuerySet(object):
|
|||||||
document_cls, field_name = rule_entry
|
document_cls, field_name = rule_entry
|
||||||
rule = doc._meta['delete_rules'][rule_entry]
|
rule = doc._meta['delete_rules'][rule_entry]
|
||||||
if rule == CASCADE:
|
if rule == CASCADE:
|
||||||
document_cls.objects(**{field_name + '__in': self}).delete(safe=safe)
|
ref_q = document_cls.objects(**{field_name + '__in': self})
|
||||||
|
if doc != document_cls or (doc == document_cls and ref_q.count() > 0):
|
||||||
|
ref_q.delete(safe=safe)
|
||||||
elif rule == NULLIFY:
|
elif rule == NULLIFY:
|
||||||
document_cls.objects(**{field_name + '__in': self}).update(
|
document_cls.objects(**{field_name + '__in': self}).update(
|
||||||
safe_update=safe,
|
safe_update=safe,
|
||||||
**{'unset__%s' % field_name: 1})
|
**{'unset__%s' % field_name: 1})
|
||||||
|
elif rule == PULL:
|
||||||
|
document_cls.objects(**{field_name + '__in': self}).update(
|
||||||
|
safe_update=safe,
|
||||||
|
**{'pull_all__%s' % field_name: self})
|
||||||
|
|
||||||
self._collection.remove(self._query, safe=safe)
|
self._collection.remove(self._query, safe=safe)
|
||||||
|
|
||||||
@@ -1318,7 +1374,7 @@ class QuerySet(object):
|
|||||||
cleaned_fields = []
|
cleaned_fields = []
|
||||||
for field in fields:
|
for field in fields:
|
||||||
append_field = True
|
append_field = True
|
||||||
if isinstance(field, str):
|
if isinstance(field, basestring):
|
||||||
# Convert the S operator to $
|
# Convert the S operator to $
|
||||||
if field == 'S':
|
if field == 'S':
|
||||||
field = '$'
|
field = '$'
|
||||||
@@ -1332,20 +1388,36 @@ class QuerySet(object):
|
|||||||
# Convert value to proper value
|
# Convert value to proper value
|
||||||
field = cleaned_fields[-1]
|
field = cleaned_fields[-1]
|
||||||
|
|
||||||
if op in (None, 'set', 'push', 'pull', 'addToSet'):
|
if op in (None, 'set', 'push', 'pull'):
|
||||||
if field.required or value is not None:
|
if field.required or value is not None:
|
||||||
value = field.prepare_query_value(op, value)
|
value = field.prepare_query_value(op, value)
|
||||||
elif op in ('pushAll', 'pullAll'):
|
elif op in ('pushAll', 'pullAll'):
|
||||||
value = [field.prepare_query_value(op, v) for v in value]
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
elif op == 'addToSet':
|
||||||
|
if isinstance(value, (list, tuple, set)):
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
elif field.required or value is not None:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
|
||||||
key = '.'.join(parts)
|
key = '.'.join(parts)
|
||||||
|
|
||||||
if not op:
|
if not op:
|
||||||
raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value")
|
raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value")
|
||||||
|
|
||||||
if op:
|
if 'pull' in op and '.' in key:
|
||||||
|
# Dot operators don't work on pull operations
|
||||||
|
# it uses nested dict syntax
|
||||||
|
if op == 'pullAll':
|
||||||
|
raise InvalidQueryError("pullAll operations only support a single field depth")
|
||||||
|
|
||||||
|
parts.reverse()
|
||||||
|
for key in parts:
|
||||||
|
value = {key: value}
|
||||||
|
elif op == 'addToSet' and isinstance(value, list):
|
||||||
|
value = {key: {"$each": value}}
|
||||||
|
else:
|
||||||
value = {key: value}
|
value = {key: value}
|
||||||
key = '$' + op
|
key = '$' + op
|
||||||
|
|
||||||
if key not in mongo_update:
|
if key not in mongo_update:
|
||||||
mongo_update[key] = value
|
mongo_update[key] = value
|
||||||
@@ -1371,8 +1443,15 @@ class QuerySet(object):
|
|||||||
write_options = {}
|
write_options = {}
|
||||||
|
|
||||||
update = QuerySet._transform_update(self._document, **update)
|
update = QuerySet._transform_update(self._document, **update)
|
||||||
|
query = self._query
|
||||||
|
|
||||||
|
# SERVER-5247 hack
|
||||||
|
remove_types = "_types" in query and ".$." in unicode(update)
|
||||||
|
if remove_types:
|
||||||
|
del query["_types"]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ret = self._collection.update(self._query, update, multi=multi,
|
ret = self._collection.update(query, update, multi=multi,
|
||||||
upsert=upsert, safe=safe_update,
|
upsert=upsert, safe=safe_update,
|
||||||
**write_options)
|
**write_options)
|
||||||
if ret is not None and 'n' in ret:
|
if ret is not None and 'n' in ret:
|
||||||
@@ -1400,10 +1479,17 @@ class QuerySet(object):
|
|||||||
if not write_options:
|
if not write_options:
|
||||||
write_options = {}
|
write_options = {}
|
||||||
update = QuerySet._transform_update(self._document, **update)
|
update = QuerySet._transform_update(self._document, **update)
|
||||||
|
query = self._query
|
||||||
|
|
||||||
|
# SERVER-5247 hack
|
||||||
|
remove_types = "_types" in query and ".$." in unicode(update)
|
||||||
|
if remove_types:
|
||||||
|
del query["_types"]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Explicitly provide 'multi=False' to newer versions of PyMongo
|
# Explicitly provide 'multi=False' to newer versions of PyMongo
|
||||||
# as the default may change to 'True'
|
# as the default may change to 'True'
|
||||||
ret = self._collection.update(self._query, update, multi=False,
|
ret = self._collection.update(query, update, multi=False,
|
||||||
upsert=upsert, safe=safe_update,
|
upsert=upsert, safe=safe_update,
|
||||||
**write_options)
|
**write_options)
|
||||||
|
|
||||||
@@ -1421,8 +1507,6 @@ class QuerySet(object):
|
|||||||
def lookup(obj, name):
|
def lookup(obj, name):
|
||||||
chunks = name.split('__')
|
chunks = name.split('__')
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
if hasattr(obj, '_db_field_map'):
|
|
||||||
chunk = obj._db_field_map.get(chunk, chunk)
|
|
||||||
obj = getattr(obj, chunk)
|
obj = getattr(obj, chunk)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
@@ -1634,10 +1718,11 @@ class QuerySet(object):
|
|||||||
def _item_frequencies_map_reduce(self, field, normalize=False):
|
def _item_frequencies_map_reduce(self, field, normalize=False):
|
||||||
map_func = """
|
map_func = """
|
||||||
function() {
|
function() {
|
||||||
path = '{{~%(field)s}}'.split('.');
|
var path = '{{~%(field)s}}'.split('.');
|
||||||
field = this;
|
var field = this;
|
||||||
|
|
||||||
for (p in path) {
|
for (p in path) {
|
||||||
if (field)
|
if (typeof field != 'undefined')
|
||||||
field = field[path[p]];
|
field = field[path[p]];
|
||||||
else
|
else
|
||||||
break;
|
break;
|
||||||
@@ -1646,7 +1731,7 @@ class QuerySet(object):
|
|||||||
field.forEach(function(item) {
|
field.forEach(function(item) {
|
||||||
emit(item, 1);
|
emit(item, 1);
|
||||||
});
|
});
|
||||||
} else if (field) {
|
} else if (typeof field != 'undefined') {
|
||||||
emit(field, 1);
|
emit(field, 1);
|
||||||
} else {
|
} else {
|
||||||
emit(null, 1);
|
emit(null, 1);
|
||||||
@@ -1670,12 +1755,12 @@ class QuerySet(object):
|
|||||||
if isinstance(key, float):
|
if isinstance(key, float):
|
||||||
if int(key) == key:
|
if int(key) == key:
|
||||||
key = int(key)
|
key = int(key)
|
||||||
key = str(key)
|
frequencies[key] = int(f.value)
|
||||||
frequencies[key] = f.value
|
|
||||||
|
|
||||||
if normalize:
|
if normalize:
|
||||||
count = sum(frequencies.values())
|
count = sum(frequencies.values())
|
||||||
frequencies = dict([(k, v / count) for k, v in frequencies.items()])
|
frequencies = dict([(k, float(v) / count)
|
||||||
|
for k, v in frequencies.items()])
|
||||||
|
|
||||||
return frequencies
|
return frequencies
|
||||||
|
|
||||||
@@ -1683,31 +1768,28 @@ class QuerySet(object):
|
|||||||
"""Uses exec_js to execute"""
|
"""Uses exec_js to execute"""
|
||||||
freq_func = """
|
freq_func = """
|
||||||
function(path) {
|
function(path) {
|
||||||
path = path.split('.');
|
var path = path.split('.');
|
||||||
|
|
||||||
if (options.normalize) {
|
var total = 0.0;
|
||||||
var total = 0.0;
|
db[collection].find(query).forEach(function(doc) {
|
||||||
db[collection].find(query).forEach(function(doc) {
|
var field = doc;
|
||||||
field = doc;
|
for (p in path) {
|
||||||
for (p in path) {
|
if (field)
|
||||||
if (field)
|
field = field[path[p]];
|
||||||
field = field[path[p]];
|
else
|
||||||
else
|
break;
|
||||||
break;
|
}
|
||||||
}
|
if (field && field.constructor == Array) {
|
||||||
if (field && field.constructor == Array) {
|
total += field.length;
|
||||||
total += field.length;
|
} else {
|
||||||
} else {
|
total++;
|
||||||
total++;
|
}
|
||||||
}
|
});
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
var frequencies = {};
|
var frequencies = {};
|
||||||
|
var types = {};
|
||||||
var inc = 1.0;
|
var inc = 1.0;
|
||||||
if (options.normalize) {
|
|
||||||
inc /= total;
|
|
||||||
}
|
|
||||||
db[collection].find(query).forEach(function(doc) {
|
db[collection].find(query).forEach(function(doc) {
|
||||||
field = doc;
|
field = doc;
|
||||||
for (p in path) {
|
for (p in path) {
|
||||||
@@ -1722,34 +1804,48 @@ class QuerySet(object):
|
|||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
var item = field;
|
var item = field;
|
||||||
|
types[item] = item;
|
||||||
frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
|
frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return frequencies;
|
return [total, frequencies, types];
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
data = self.exec_js(freq_func, field, normalize=normalize)
|
total, data, types = self.exec_js(freq_func, field)
|
||||||
if 'undefined' in data:
|
values = dict([(types.get(k), int(v)) for k, v in data.iteritems()])
|
||||||
data[None] = data['undefined']
|
|
||||||
del(data['undefined'])
|
if normalize:
|
||||||
return data
|
values = dict([(k, float(v) / total) for k, v in values.items()])
|
||||||
|
|
||||||
|
frequencies = {}
|
||||||
|
for k, v in values.iteritems():
|
||||||
|
if isinstance(k, float):
|
||||||
|
if int(k) == k:
|
||||||
|
k = int(k)
|
||||||
|
|
||||||
|
frequencies[k] = v
|
||||||
|
|
||||||
|
return frequencies
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
limit = REPR_OUTPUT_SIZE + 1
|
"""Provides the string representation of the QuerySet
|
||||||
start = (0 if self._skip is None else self._skip)
|
|
||||||
if self._limit is None:
|
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
||||||
stop = start + limit
|
"""
|
||||||
if self._limit is not None:
|
|
||||||
if self._limit - start > limit:
|
if self._iter:
|
||||||
stop = start + limit
|
return '.. queryset mid-iteration ..'
|
||||||
else:
|
|
||||||
stop = self._limit
|
data = []
|
||||||
try:
|
for i in xrange(REPR_OUTPUT_SIZE + 1):
|
||||||
data = list(self[start:stop])
|
try:
|
||||||
except pymongo.errors.InvalidOperation:
|
data.append(self.next())
|
||||||
return ".. queryset mid-iteration .."
|
except StopIteration:
|
||||||
|
break
|
||||||
if len(data) > REPR_OUTPUT_SIZE:
|
if len(data) > REPR_OUTPUT_SIZE:
|
||||||
data[-1] = "...(remaining elements truncated)..."
|
data[-1] = "...(remaining elements truncated)..."
|
||||||
|
|
||||||
|
self.rewind()
|
||||||
return repr(data)
|
return repr(data)
|
||||||
|
|
||||||
def select_related(self, max_depth=1):
|
def select_related(self, max_depth=1):
|
||||||
@@ -1785,10 +1881,13 @@ class QuerySetManager(object):
|
|||||||
queryset_class = owner._meta['queryset_class'] or QuerySet
|
queryset_class = owner._meta['queryset_class'] or QuerySet
|
||||||
queryset = queryset_class(owner, owner._get_collection())
|
queryset = queryset_class(owner, owner._get_collection())
|
||||||
if self.get_queryset:
|
if self.get_queryset:
|
||||||
if self.get_queryset.func_code.co_argcount == 1:
|
var_names = self.get_queryset.func_code.co_varnames
|
||||||
|
if var_names == ('queryset',):
|
||||||
queryset = self.get_queryset(queryset)
|
queryset = self.get_queryset(queryset)
|
||||||
else:
|
elif var_names == ('doc_cls', 'queryset',):
|
||||||
queryset = self.get_queryset(owner, queryset)
|
queryset = self.get_queryset(owner, queryset)
|
||||||
|
else:
|
||||||
|
queryset = partial(self.get_queryset, owner, queryset)
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
%define srcname mongoengine
|
%define srcname mongoengine
|
||||||
|
|
||||||
Name: python-%{srcname}
|
Name: python-%{srcname}
|
||||||
Version: 0.6.1
|
Version: 0.6.15
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||||
|
|
||||||
@@ -51,12 +51,4 @@ rm -rf $RPM_BUILD_ROOT
|
|||||||
# %{python_sitearch}/*
|
# %{python_sitearch}/*
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
* Mon Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6
|
* See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html
|
||||||
- 0.6 released
|
|
||||||
* Thu Oct 27 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.3-1
|
|
||||||
- Update to latest dev version
|
|
||||||
- Add PIL dependency for ImageField
|
|
||||||
* Wed Oct 12 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.2-1
|
|
||||||
- Update version
|
|
||||||
* Fri Sep 23 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.0-1
|
|
||||||
- Initial version
|
|
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pymongo
|
13
setup.cfg
Normal file
13
setup.cfg
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
[aliases]
|
||||||
|
test = nosetests
|
||||||
|
|
||||||
|
[nosetests]
|
||||||
|
verbosity = 2
|
||||||
|
detailed-errors = 1
|
||||||
|
#with-coverage = 1
|
||||||
|
#cover-erase = 1
|
||||||
|
#cover-html = 1
|
||||||
|
#cover-html-dir = ../htmlcov
|
||||||
|
#cover-package = mongoengine
|
||||||
|
where = tests
|
||||||
|
#tests = test_bugfix.py
|
5
setup.py
5
setup.py
@@ -35,7 +35,7 @@ CLASSIFIERS = [
|
|||||||
|
|
||||||
setup(name='mongoengine',
|
setup(name='mongoengine',
|
||||||
version=VERSION,
|
version=VERSION,
|
||||||
packages=find_packages(),
|
packages=find_packages(exclude=('tests',)),
|
||||||
author='Harry Marr',
|
author='Harry Marr',
|
||||||
author_email='harry.marr@{nospam}gmail.com',
|
author_email='harry.marr@{nospam}gmail.com',
|
||||||
maintainer="Ross Lawley",
|
maintainer="Ross Lawley",
|
||||||
@@ -48,6 +48,5 @@ setup(name='mongoengine',
|
|||||||
platforms=['any'],
|
platforms=['any'],
|
||||||
classifiers=CLASSIFIERS,
|
classifiers=CLASSIFIERS,
|
||||||
install_requires=['pymongo'],
|
install_requires=['pymongo'],
|
||||||
test_suite='tests',
|
tests_require=['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL']
|
||||||
tests_require=['blinker', 'django>=1.3', 'PIL']
|
|
||||||
)
|
)
|
||||||
|
@@ -1,8 +1,11 @@
|
|||||||
import unittest
|
import datetime
|
||||||
import pymongo
|
import pymongo
|
||||||
|
import unittest
|
||||||
|
|
||||||
import mongoengine.connection
|
import mongoengine.connection
|
||||||
|
|
||||||
|
from bson.tz_util import utc
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||||
|
|
||||||
@@ -65,6 +68,31 @@ class ConnectionTest(unittest.TestCase):
|
|||||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||||
self.assertEqual(db.name, 'mongoenginetest2')
|
self.assertEqual(db.name, 'mongoenginetest2')
|
||||||
|
|
||||||
|
def test_connection_kwargs(self):
|
||||||
|
"""Ensure that connection kwargs get passed to pymongo.
|
||||||
|
"""
|
||||||
|
connect('mongoenginetest', alias='t1', tz_aware=True)
|
||||||
|
conn = get_connection('t1')
|
||||||
|
|
||||||
|
self.assertTrue(conn.tz_aware)
|
||||||
|
|
||||||
|
connect('mongoenginetest2', alias='t2')
|
||||||
|
conn = get_connection('t2')
|
||||||
|
self.assertFalse(conn.tz_aware)
|
||||||
|
|
||||||
|
def test_datetime(self):
|
||||||
|
connect('mongoenginetest', tz_aware=True)
|
||||||
|
d = datetime.datetime(2010, 5, 5, tzinfo=utc)
|
||||||
|
|
||||||
|
class DateDoc(Document):
|
||||||
|
the_date = DateTimeField(required=True)
|
||||||
|
|
||||||
|
DateDoc.drop_collection()
|
||||||
|
DateDoc(the_date=d).save()
|
||||||
|
|
||||||
|
date_doc = DateDoc.objects.first()
|
||||||
|
self.assertEqual(d, date_doc.the_date)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
@@ -810,3 +810,56 @@ class FieldTest(unittest.TestCase):
|
|||||||
room = Room.objects.first().select_related()
|
room = Room.objects.first().select_related()
|
||||||
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
|
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
|
||||||
self.assertEquals(room.staffs_with_position[1]['staff'], bob)
|
self.assertEquals(room.staffs_with_position[1]['staff'], bob)
|
||||||
|
|
||||||
|
def test_document_reload_no_inheritance(self):
|
||||||
|
class Foo(Document):
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
bar = ReferenceField('Bar')
|
||||||
|
baz = ReferenceField('Baz')
|
||||||
|
|
||||||
|
class Bar(Document):
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
msg = StringField(required=True, default='Blammo!')
|
||||||
|
|
||||||
|
class Baz(Document):
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
msg = StringField(required=True, default='Kaboom!')
|
||||||
|
|
||||||
|
Foo.drop_collection()
|
||||||
|
Bar.drop_collection()
|
||||||
|
Baz.drop_collection()
|
||||||
|
|
||||||
|
bar = Bar()
|
||||||
|
bar.save()
|
||||||
|
baz = Baz()
|
||||||
|
baz.save()
|
||||||
|
foo = Foo()
|
||||||
|
foo.bar = bar
|
||||||
|
foo.baz = baz
|
||||||
|
foo.save()
|
||||||
|
foo.reload()
|
||||||
|
|
||||||
|
self.assertEquals(type(foo.bar), Bar)
|
||||||
|
self.assertEquals(type(foo.baz), Baz)
|
||||||
|
|
||||||
|
def test_list_lookup_not_checked_in_map(self):
|
||||||
|
"""Ensure we dereference list data correctly
|
||||||
|
"""
|
||||||
|
class Comment(Document):
|
||||||
|
id = IntField(primary_key=True)
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
class Message(Document):
|
||||||
|
id = IntField(primary_key=True)
|
||||||
|
comments = ListField(ReferenceField(Comment))
|
||||||
|
|
||||||
|
Comment.drop_collection()
|
||||||
|
Message.drop_collection()
|
||||||
|
|
||||||
|
c1 = Comment(id=0, text='zero').save()
|
||||||
|
c2 = Comment(id=1, text='one').save()
|
||||||
|
Message(id=1, comments=[c1, c2]).save()
|
||||||
|
|
||||||
|
msg = Message.objects.get(id=1)
|
||||||
|
self.assertEqual(0, msg.comments[0].id)
|
||||||
|
self.assertEqual(1, msg.comments[1].id)
|
@@ -12,6 +12,10 @@ from django.core.paginator import Paginator
|
|||||||
|
|
||||||
settings.configure()
|
settings.configure()
|
||||||
|
|
||||||
|
from django.contrib.sessions.tests import SessionTestsMixin
|
||||||
|
from mongoengine.django.sessions import SessionStore, MongoSession
|
||||||
|
|
||||||
|
|
||||||
class QuerySetTest(unittest.TestCase):
|
class QuerySetTest(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@@ -88,3 +92,19 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
end = p * 2
|
end = p * 2
|
||||||
start = end - 1
|
start = end - 1
|
||||||
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
|
||||||
|
backend = SessionStore
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
MongoSession.drop_collection()
|
||||||
|
super(MongoDBSessionTest, self).setUp()
|
||||||
|
|
||||||
|
def test_first_save(self):
|
||||||
|
session = SessionStore()
|
||||||
|
session['test'] = True
|
||||||
|
session.save()
|
||||||
|
self.assertTrue('test' in session)
|
@@ -1,3 +1,4 @@
|
|||||||
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import pymongo
|
import pymongo
|
||||||
import bson
|
import bson
|
||||||
@@ -6,13 +7,15 @@ import warnings
|
|||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from fixtures import Base, Mixin, PickleEmbedded, PickleTest
|
from tests.fixtures import Base, Mixin, PickleEmbedded, PickleTest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.base import NotRegistered, InvalidDocumentError
|
from mongoengine.base import NotRegistered, InvalidDocumentError
|
||||||
from mongoengine.queryset import InvalidQueryError
|
from mongoengine.queryset import InvalidQueryError
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
||||||
|
|
||||||
|
|
||||||
class DocumentTest(unittest.TestCase):
|
class DocumentTest(unittest.TestCase):
|
||||||
|
|
||||||
@@ -96,7 +99,7 @@ class DocumentTest(unittest.TestCase):
|
|||||||
# Ensure Document isn't treated like an actual document
|
# Ensure Document isn't treated like an actual document
|
||||||
self.assertFalse(hasattr(Document, '_fields'))
|
self.assertFalse(hasattr(Document, '_fields'))
|
||||||
|
|
||||||
def test_collection_name(self):
|
def test_collection_naming(self):
|
||||||
"""Ensure that a collection with a specified name may be used.
|
"""Ensure that a collection with a specified name may be used.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -157,11 +160,12 @@ class DocumentTest(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
class BaseDocument(Document, BaseMixin):
|
class BaseDocument(Document, BaseMixin):
|
||||||
pass
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
class MyDocument(BaseDocument):
|
class MyDocument(BaseDocument):
|
||||||
pass
|
pass
|
||||||
self.assertEquals('mydocument', OldMixinNamingConvention._get_collection_name())
|
|
||||||
|
self.assertEquals('basedocument', MyDocument._get_collection_name())
|
||||||
|
|
||||||
def test_get_superclasses(self):
|
def test_get_superclasses(self):
|
||||||
"""Ensure that the correct list of superclasses is assembled.
|
"""Ensure that the correct list of superclasses is assembled.
|
||||||
@@ -660,6 +664,49 @@ class DocumentTest(unittest.TestCase):
|
|||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_db_field_load(self):
|
||||||
|
"""Ensure we load data correctly
|
||||||
|
"""
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField(required=True)
|
||||||
|
_rank = StringField(required=False, db_field="rank")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rank(self):
|
||||||
|
return self._rank or "Private"
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="Jack", _rank="Corporal").save()
|
||||||
|
|
||||||
|
Person(name="Fred").save()
|
||||||
|
|
||||||
|
self.assertEquals(Person.objects.get(name="Jack").rank, "Corporal")
|
||||||
|
self.assertEquals(Person.objects.get(name="Fred").rank, "Private")
|
||||||
|
|
||||||
|
def test_db_embedded_doc_field_load(self):
|
||||||
|
"""Ensure we load embedded document data correctly
|
||||||
|
"""
|
||||||
|
class Rank(EmbeddedDocument):
|
||||||
|
title = StringField(required=True)
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField(required=True)
|
||||||
|
rank_ = EmbeddedDocumentField(Rank, required=False, db_field='rank')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rank(self):
|
||||||
|
return self.rank_.title if self.rank_ is not None else "Private"
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="Jack", rank_=Rank(title="Corporal")).save()
|
||||||
|
|
||||||
|
Person(name="Fred").save()
|
||||||
|
|
||||||
|
self.assertEquals(Person.objects.get(name="Jack").rank, "Corporal")
|
||||||
|
self.assertEquals(Person.objects.get(name="Fred").rank, "Private")
|
||||||
|
|
||||||
def test_explicit_geo2d_index(self):
|
def test_explicit_geo2d_index(self):
|
||||||
"""Ensure that geo2d indexes work when created via meta[indexes]
|
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||||
"""
|
"""
|
||||||
@@ -740,6 +787,28 @@ class DocumentTest(unittest.TestCase):
|
|||||||
self.assertEqual(info.keys(), ['_types_1_user_guid_1', '_id_', '_types_1_name_1'])
|
self.assertEqual(info.keys(), ['_types_1_user_guid_1', '_id_', '_types_1_name_1'])
|
||||||
Person.drop_collection()
|
Person.drop_collection()
|
||||||
|
|
||||||
|
def test_disable_index_creation(self):
|
||||||
|
"""Tests setting auto_create_index to False on the connection will
|
||||||
|
disable any index generation.
|
||||||
|
"""
|
||||||
|
class User(Document):
|
||||||
|
meta = {
|
||||||
|
'indexes': ['user_guid'],
|
||||||
|
'auto_create_index': False
|
||||||
|
}
|
||||||
|
user_guid = StringField(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
u = User(user_guid='123')
|
||||||
|
u.save()
|
||||||
|
|
||||||
|
self.assertEquals(1, User.objects.count())
|
||||||
|
info = User.objects._collection.index_information()
|
||||||
|
self.assertEqual(info.keys(), ['_id_'])
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
def test_embedded_document_index(self):
|
def test_embedded_document_index(self):
|
||||||
"""Tests settings an index on an embedded document
|
"""Tests settings an index on an embedded document
|
||||||
"""
|
"""
|
||||||
@@ -803,15 +872,26 @@ class DocumentTest(unittest.TestCase):
|
|||||||
|
|
||||||
def test_geo_indexes_recursion(self):
|
def test_geo_indexes_recursion(self):
|
||||||
|
|
||||||
class User(Document):
|
class Location(Document):
|
||||||
channel = ReferenceField('Channel')
|
name = StringField()
|
||||||
location = GeoPointField()
|
location = GeoPointField()
|
||||||
|
|
||||||
class Channel(Document):
|
class Parent(Document):
|
||||||
user = ReferenceField('User')
|
name = StringField()
|
||||||
location = GeoPointField()
|
location = ReferenceField(Location)
|
||||||
|
|
||||||
self.assertEquals(len(User._geo_indices()), 2)
|
Location.drop_collection()
|
||||||
|
Parent.drop_collection()
|
||||||
|
|
||||||
|
list(Parent.objects)
|
||||||
|
|
||||||
|
collection = Parent._get_collection()
|
||||||
|
info = collection.index_information()
|
||||||
|
|
||||||
|
self.assertFalse('location_2d' in info)
|
||||||
|
|
||||||
|
self.assertEquals(len(Parent._geo_indices()), 0)
|
||||||
|
self.assertEquals(len(Location._geo_indices()), 1)
|
||||||
|
|
||||||
def test_covered_index(self):
|
def test_covered_index(self):
|
||||||
"""Ensure that covered indexes can be used
|
"""Ensure that covered indexes can be used
|
||||||
@@ -841,6 +921,26 @@ class DocumentTest(unittest.TestCase):
|
|||||||
query_plan = Test.objects(a=1).only('a').exclude('id').explain()
|
query_plan = Test.objects(a=1).only('a').exclude('id').explain()
|
||||||
self.assertTrue(query_plan['indexOnly'])
|
self.assertTrue(query_plan['indexOnly'])
|
||||||
|
|
||||||
|
def test_index_on_id(self):
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
['categories', 'id']
|
||||||
|
],
|
||||||
|
'allow_inheritance': False
|
||||||
|
}
|
||||||
|
|
||||||
|
title = StringField(required=True)
|
||||||
|
description = StringField(required=True)
|
||||||
|
categories = ListField()
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
indexes = BlogPost.objects._collection.index_information()
|
||||||
|
self.assertEquals(indexes['categories_1__id_1']['key'],
|
||||||
|
[('categories', 1), ('_id', 1)])
|
||||||
|
|
||||||
def test_hint(self):
|
def test_hint(self):
|
||||||
|
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
@@ -1216,6 +1316,22 @@ class DocumentTest(unittest.TestCase):
|
|||||||
comment.date = datetime.now()
|
comment.date = datetime.now()
|
||||||
comment.validate()
|
comment.validate()
|
||||||
|
|
||||||
|
def test_embedded_db_field_validate(self):
|
||||||
|
|
||||||
|
class SubDoc(EmbeddedDocument):
|
||||||
|
val = IntField()
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
Doc(e=SubDoc(val=15)).save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
doc.validate()
|
||||||
|
self.assertEquals([None, 'e'], doc._data.keys())
|
||||||
|
|
||||||
def test_save(self):
|
def test_save(self):
|
||||||
"""Ensure that a document may be saved in the database.
|
"""Ensure that a document may be saved in the database.
|
||||||
"""
|
"""
|
||||||
@@ -1285,6 +1401,30 @@ class DocumentTest(unittest.TestCase):
|
|||||||
p0.name = 'wpjunior'
|
p0.name = 'wpjunior'
|
||||||
p0.save()
|
p0.save()
|
||||||
|
|
||||||
|
def test_save_max_recursion_not_hit_with_file_field(self):
|
||||||
|
|
||||||
|
class Foo(Document):
|
||||||
|
name = StringField()
|
||||||
|
picture = FileField()
|
||||||
|
bar = ReferenceField('self')
|
||||||
|
|
||||||
|
Foo.drop_collection()
|
||||||
|
|
||||||
|
a = Foo(name='hello')
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
a.bar = a
|
||||||
|
a.picture = open(TEST_IMAGE_PATH, 'rb')
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
# Confirm can save and it resets the changed fields without hitting
|
||||||
|
# max recursion error
|
||||||
|
b = Foo.objects.with_id(a.id)
|
||||||
|
b.name='world'
|
||||||
|
b.save()
|
||||||
|
|
||||||
|
self.assertEquals(b.picture, b.bar.picture, b.bar.bar.picture)
|
||||||
|
|
||||||
def test_save_cascades(self):
|
def test_save_cascades(self):
|
||||||
|
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
@@ -1548,6 +1688,77 @@ class DocumentTest(unittest.TestCase):
|
|||||||
site = Site.objects.first()
|
site = Site.objects.first()
|
||||||
self.assertEqual(site.page.log_message, "Error: Dummy message")
|
self.assertEqual(site.page.log_message, "Error: Dummy message")
|
||||||
|
|
||||||
|
def test_circular_reference_deltas(self):
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
owns = ListField(ReferenceField('Organization'))
|
||||||
|
|
||||||
|
class Organization(Document):
|
||||||
|
name = StringField()
|
||||||
|
owner = ReferenceField('Person')
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Organization.drop_collection()
|
||||||
|
|
||||||
|
person = Person(name="owner")
|
||||||
|
person.save()
|
||||||
|
organization = Organization(name="company")
|
||||||
|
organization.save()
|
||||||
|
|
||||||
|
person.owns.append(organization)
|
||||||
|
organization.owner = person
|
||||||
|
|
||||||
|
person.save()
|
||||||
|
organization.save()
|
||||||
|
|
||||||
|
p = Person.objects[0].select_related()
|
||||||
|
o = Organization.objects.first()
|
||||||
|
self.assertEquals(p.owns[0], o)
|
||||||
|
self.assertEquals(o.owner, p)
|
||||||
|
|
||||||
|
def test_circular_reference_deltas_2(self):
|
||||||
|
|
||||||
|
class Person( Document ):
|
||||||
|
name = StringField()
|
||||||
|
owns = ListField( ReferenceField( 'Organization' ) )
|
||||||
|
employer = ReferenceField( 'Organization' )
|
||||||
|
|
||||||
|
class Organization( Document ):
|
||||||
|
name = StringField()
|
||||||
|
owner = ReferenceField( 'Person' )
|
||||||
|
employees = ListField( ReferenceField( 'Person' ) )
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Organization.drop_collection()
|
||||||
|
|
||||||
|
person = Person( name="owner" )
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
employee = Person( name="employee" )
|
||||||
|
employee.save()
|
||||||
|
|
||||||
|
organization = Organization( name="company" )
|
||||||
|
organization.save()
|
||||||
|
|
||||||
|
person.owns.append( organization )
|
||||||
|
organization.owner = person
|
||||||
|
|
||||||
|
organization.employees.append( employee )
|
||||||
|
employee.employer = organization
|
||||||
|
|
||||||
|
person.save()
|
||||||
|
organization.save()
|
||||||
|
employee.save()
|
||||||
|
|
||||||
|
p = Person.objects.get(name="owner")
|
||||||
|
e = Person.objects.get(name="employee")
|
||||||
|
o = Organization.objects.first()
|
||||||
|
|
||||||
|
self.assertEquals(p.owns[0], o)
|
||||||
|
self.assertEquals(o.owner, p)
|
||||||
|
self.assertEquals(e.employer, o)
|
||||||
|
|
||||||
def test_delta(self):
|
def test_delta(self):
|
||||||
|
|
||||||
class Doc(Document):
|
class Doc(Document):
|
||||||
@@ -2375,6 +2586,22 @@ class DocumentTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertRaises(InvalidDocumentError, throw_invalid_document_error)
|
self.assertRaises(InvalidDocumentError, throw_invalid_document_error)
|
||||||
|
|
||||||
|
def test_invalid_son(self):
|
||||||
|
"""Raise an error if loading invalid data"""
|
||||||
|
class Occurrence(EmbeddedDocument):
|
||||||
|
number = IntField()
|
||||||
|
|
||||||
|
class Word(Document):
|
||||||
|
stem = StringField()
|
||||||
|
count = IntField(default=1)
|
||||||
|
forms = ListField(StringField(), default=list)
|
||||||
|
occurs = ListField(EmbeddedDocumentField(Occurrence), default=list)
|
||||||
|
|
||||||
|
def raise_invalid_document():
|
||||||
|
Word._from_son({'stem': [1,2,3], 'forms': 1, 'count': 'one', 'occurs': {"hello": None}})
|
||||||
|
|
||||||
|
self.assertRaises(InvalidDocumentError, raise_invalid_document)
|
||||||
|
|
||||||
def test_reverse_delete_rule_cascade_and_nullify(self):
|
def test_reverse_delete_rule_cascade_and_nullify(self):
|
||||||
"""Ensure that a referenced document is also deleted upon deletion.
|
"""Ensure that a referenced document is also deleted upon deletion.
|
||||||
"""
|
"""
|
||||||
@@ -2437,6 +2664,40 @@ class DocumentTest(unittest.TestCase):
|
|||||||
author.delete()
|
author.delete()
|
||||||
self.assertEqual(len(BlogPost.objects), 0)
|
self.assertEqual(len(BlogPost.objects), 0)
|
||||||
|
|
||||||
|
def test_two_way_reverse_delete_rule(self):
|
||||||
|
"""Ensure that Bi-Directional relationships work with
|
||||||
|
reverse_delete_rule
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Bar(Document):
|
||||||
|
content = StringField()
|
||||||
|
foo = ReferenceField('Foo')
|
||||||
|
|
||||||
|
class Foo(Document):
|
||||||
|
content = StringField()
|
||||||
|
bar = ReferenceField(Bar)
|
||||||
|
|
||||||
|
Bar.register_delete_rule(Foo, 'bar', NULLIFY)
|
||||||
|
Foo.register_delete_rule(Bar, 'foo', NULLIFY)
|
||||||
|
|
||||||
|
|
||||||
|
Bar.drop_collection()
|
||||||
|
Foo.drop_collection()
|
||||||
|
|
||||||
|
b = Bar(content="Hello")
|
||||||
|
b.save()
|
||||||
|
|
||||||
|
f = Foo(content="world", bar=b)
|
||||||
|
f.save()
|
||||||
|
|
||||||
|
b.foo = f
|
||||||
|
b.save()
|
||||||
|
|
||||||
|
f.delete()
|
||||||
|
|
||||||
|
self.assertEqual(len(Bar.objects), 1) # No effect on the BlogPost
|
||||||
|
self.assertEqual(Bar.objects.get().foo, None)
|
||||||
|
|
||||||
def test_invalid_reverse_delete_rules_raise_errors(self):
|
def test_invalid_reverse_delete_rules_raise_errors(self):
|
||||||
|
|
||||||
def throw_invalid_document_error():
|
def throw_invalid_document_error():
|
||||||
@@ -2838,5 +3099,114 @@ class DocumentTest(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
) ]), "1,2")
|
) ]), "1,2")
|
||||||
|
|
||||||
|
|
||||||
|
class ValidatorErrorTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_to_dict(self):
|
||||||
|
"""Ensure a ValidationError handles error to_dict correctly.
|
||||||
|
"""
|
||||||
|
error = ValidationError('root')
|
||||||
|
self.assertEquals(error.to_dict(), {})
|
||||||
|
|
||||||
|
# 1st level error schema
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertEquals(error.to_dict()['1st'], 'bad 1st')
|
||||||
|
|
||||||
|
# 2nd level error schema
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||||
|
'2nd': ValidationError('bad 2nd'),
|
||||||
|
})}
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
||||||
|
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||||
|
self.assertEquals(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||||
|
|
||||||
|
# moar levels
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||||
|
'2nd': ValidationError('bad 2nd', errors={
|
||||||
|
'3rd': ValidationError('bad 3rd', errors={
|
||||||
|
'4th': ValidationError('Inception'),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
})}
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||||
|
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
||||||
|
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
||||||
|
self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||||
|
'Inception')
|
||||||
|
|
||||||
|
self.assertEquals(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
|
||||||
|
|
||||||
|
def test_model_validation(self):
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
username = StringField(primary_key=True)
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
User().validate()
|
||||||
|
except ValidationError, e:
|
||||||
|
expected_error_message = """ValidationError(Field is required: ['username', 'name'])"""
|
||||||
|
self.assertEquals(e.message, expected_error_message)
|
||||||
|
self.assertEquals(e.to_dict(), {
|
||||||
|
'username': 'Field is required',
|
||||||
|
'name': 'Field is required'})
|
||||||
|
|
||||||
|
def test_spaces_in_keys(self):
|
||||||
|
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
setattr(doc, 'hello world', 1)
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
one = Doc.objects.filter(**{'hello world': 1}).count()
|
||||||
|
self.assertEqual(1, one)
|
||||||
|
|
||||||
|
|
||||||
|
def test_fields_rewrite(self):
|
||||||
|
class BasePerson(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
meta = {'abstract': True}
|
||||||
|
|
||||||
|
class Person(BasePerson):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
p = Person(age=15)
|
||||||
|
self.assertRaises(ValidationError, p.validate)
|
||||||
|
|
||||||
|
def test_cascaded_save_wrong_reference(self):
|
||||||
|
|
||||||
|
class ADocument(Document):
|
||||||
|
val = IntField()
|
||||||
|
|
||||||
|
class BDocument(Document):
|
||||||
|
a = ReferenceField(ADocument)
|
||||||
|
|
||||||
|
ADocument.drop_collection()
|
||||||
|
BDocument.drop_collection()
|
||||||
|
|
||||||
|
a = ADocument()
|
||||||
|
a.val = 15
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
b = BDocument()
|
||||||
|
b.a = a
|
||||||
|
b.save()
|
||||||
|
|
||||||
|
a.delete()
|
||||||
|
|
||||||
|
b = BDocument.objects.first()
|
||||||
|
b.save(cascade=True)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
@@ -2,6 +2,9 @@ import datetime
|
|||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
import uuid
|
import uuid
|
||||||
|
import StringIO
|
||||||
|
import tempfile
|
||||||
|
import gridfs
|
||||||
|
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
|
||||||
@@ -18,6 +21,10 @@ class FieldTest(unittest.TestCase):
|
|||||||
connect(db='mongoenginetest')
|
connect(db='mongoenginetest')
|
||||||
self.db = get_db()
|
self.db = get_db()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.db.drop_collection('fs.files')
|
||||||
|
self.db.drop_collection('fs.chunks')
|
||||||
|
|
||||||
def test_default_values(self):
|
def test_default_values(self):
|
||||||
"""Ensure that default field values are used when creating a document.
|
"""Ensure that default field values are used when creating a document.
|
||||||
"""
|
"""
|
||||||
@@ -75,7 +82,6 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
# Retrive data from db and verify it.
|
# Retrive data from db and verify it.
|
||||||
ret = HandleNoneFields.objects.all()[0]
|
ret = HandleNoneFields.objects.all()[0]
|
||||||
|
|
||||||
self.assertEqual(ret.str_fld, None)
|
self.assertEqual(ret.str_fld, None)
|
||||||
self.assertEqual(ret.int_fld, None)
|
self.assertEqual(ret.int_fld, None)
|
||||||
self.assertEqual(ret.flt_fld, None)
|
self.assertEqual(ret.flt_fld, None)
|
||||||
@@ -121,6 +127,19 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertRaises(ValidationError, ret.validate)
|
self.assertRaises(ValidationError, ret.validate)
|
||||||
|
|
||||||
|
def test_int_and_float_ne_operator(self):
|
||||||
|
class TestDocument(Document):
|
||||||
|
int_fld = IntField()
|
||||||
|
float_fld = FloatField()
|
||||||
|
|
||||||
|
TestDocument.drop_collection()
|
||||||
|
|
||||||
|
TestDocument(int_fld=None, float_fld=None).save()
|
||||||
|
TestDocument(int_fld=1, float_fld=1).save()
|
||||||
|
|
||||||
|
self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count())
|
||||||
|
self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count())
|
||||||
|
|
||||||
def test_object_id_validation(self):
|
def test_object_id_validation(self):
|
||||||
"""Ensure that invalid values cannot be assigned to string fields.
|
"""Ensure that invalid values cannot be assigned to string fields.
|
||||||
"""
|
"""
|
||||||
@@ -339,24 +358,6 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.assertNotEquals(log.date, d1)
|
self.assertNotEquals(log.date, d1)
|
||||||
self.assertEquals(log.date, d2)
|
self.assertEquals(log.date, d2)
|
||||||
|
|
||||||
# Pre UTC microseconds above 1000 is wonky.
|
|
||||||
# log.date has an invalid microsecond value so I can't construct
|
|
||||||
# a date to compare.
|
|
||||||
#
|
|
||||||
# However, the timedelta is predicable with pre UTC timestamps
|
|
||||||
# It always adds 16 seconds and [777216-776217] microseconds
|
|
||||||
for i in xrange(1001, 3113, 33):
|
|
||||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i)
|
|
||||||
log.date = d1
|
|
||||||
log.save()
|
|
||||||
log.reload()
|
|
||||||
self.assertNotEquals(log.date, d1)
|
|
||||||
|
|
||||||
delta = log.date - d1
|
|
||||||
self.assertEquals(delta.seconds, 16)
|
|
||||||
microseconds = 777216 - (i % 1000)
|
|
||||||
self.assertEquals(delta.microseconds, microseconds)
|
|
||||||
|
|
||||||
LogEntry.drop_collection()
|
LogEntry.drop_collection()
|
||||||
|
|
||||||
def test_complexdatetime_storage(self):
|
def test_complexdatetime_storage(self):
|
||||||
@@ -906,6 +907,48 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
Extensible.drop_collection()
|
Extensible.drop_collection()
|
||||||
|
|
||||||
|
def test_embedded_mapfield_db_field(self):
|
||||||
|
|
||||||
|
class Embedded(EmbeddedDocument):
|
||||||
|
number = IntField(default=0, db_field='i')
|
||||||
|
|
||||||
|
class Test(Document):
|
||||||
|
my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field='x')
|
||||||
|
|
||||||
|
Test.drop_collection()
|
||||||
|
|
||||||
|
test = Test()
|
||||||
|
test.my_map['DICTIONARY_KEY'] = Embedded(number=1)
|
||||||
|
test.save()
|
||||||
|
|
||||||
|
Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1)
|
||||||
|
|
||||||
|
test = Test.objects.get()
|
||||||
|
self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2)
|
||||||
|
doc = self.db.test.find_one()
|
||||||
|
self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2)
|
||||||
|
|
||||||
|
def test_embedded_db_field(self):
|
||||||
|
|
||||||
|
class Embedded(EmbeddedDocument):
|
||||||
|
number = IntField(default=0, db_field='i')
|
||||||
|
|
||||||
|
class Test(Document):
|
||||||
|
embedded = EmbeddedDocumentField(Embedded, db_field='x')
|
||||||
|
|
||||||
|
Test.drop_collection()
|
||||||
|
|
||||||
|
test = Test()
|
||||||
|
test.embedded = Embedded(number=1)
|
||||||
|
test.save()
|
||||||
|
|
||||||
|
Test.objects.update_one(inc__embedded__number=1)
|
||||||
|
|
||||||
|
test = Test.objects.get()
|
||||||
|
self.assertEqual(test.embedded.number, 2)
|
||||||
|
doc = self.db.test.find_one()
|
||||||
|
self.assertEqual(doc['x']['i'], 2)
|
||||||
|
|
||||||
def test_embedded_document_validation(self):
|
def test_embedded_document_validation(self):
|
||||||
"""Ensure that invalid embedded documents cannot be assigned to
|
"""Ensure that invalid embedded documents cannot be assigned to
|
||||||
embedded document fields.
|
embedded document fields.
|
||||||
@@ -1300,6 +1343,74 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.assertEquals(repr(Person.objects(city=None)),
|
self.assertEquals(repr(Person.objects(city=None)),
|
||||||
"[<Person: Person object>]")
|
"[<Person: Person object>]")
|
||||||
|
|
||||||
|
|
||||||
|
def test_generic_reference_choices(self):
|
||||||
|
"""Ensure that a GenericReferenceField can handle choices
|
||||||
|
"""
|
||||||
|
class Link(Document):
|
||||||
|
title = StringField()
|
||||||
|
|
||||||
|
class Post(Document):
|
||||||
|
title = StringField()
|
||||||
|
|
||||||
|
class Bookmark(Document):
|
||||||
|
bookmark_object = GenericReferenceField(choices=(Post,))
|
||||||
|
|
||||||
|
Link.drop_collection()
|
||||||
|
Post.drop_collection()
|
||||||
|
Bookmark.drop_collection()
|
||||||
|
|
||||||
|
link_1 = Link(title="Pitchfork")
|
||||||
|
link_1.save()
|
||||||
|
|
||||||
|
post_1 = Post(title="Behind the Scenes of the Pavement Reunion")
|
||||||
|
post_1.save()
|
||||||
|
|
||||||
|
bm = Bookmark(bookmark_object=link_1)
|
||||||
|
self.assertRaises(ValidationError, bm.validate)
|
||||||
|
|
||||||
|
bm = Bookmark(bookmark_object=post_1)
|
||||||
|
bm.save()
|
||||||
|
|
||||||
|
bm = Bookmark.objects.first()
|
||||||
|
self.assertEqual(bm.bookmark_object, post_1)
|
||||||
|
|
||||||
|
def test_generic_reference_list_choices(self):
|
||||||
|
"""Ensure that a ListField properly dereferences generic references and
|
||||||
|
respects choices.
|
||||||
|
"""
|
||||||
|
class Link(Document):
|
||||||
|
title = StringField()
|
||||||
|
|
||||||
|
class Post(Document):
|
||||||
|
title = StringField()
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
bookmarks = ListField(GenericReferenceField(choices=(Post,)))
|
||||||
|
|
||||||
|
Link.drop_collection()
|
||||||
|
Post.drop_collection()
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
link_1 = Link(title="Pitchfork")
|
||||||
|
link_1.save()
|
||||||
|
|
||||||
|
post_1 = Post(title="Behind the Scenes of the Pavement Reunion")
|
||||||
|
post_1.save()
|
||||||
|
|
||||||
|
user = User(bookmarks=[link_1])
|
||||||
|
self.assertRaises(ValidationError, user.validate)
|
||||||
|
|
||||||
|
user = User(bookmarks=[post_1])
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
user = User.objects.first()
|
||||||
|
self.assertEqual(user.bookmarks, [post_1])
|
||||||
|
|
||||||
|
Link.drop_collection()
|
||||||
|
Post.drop_collection()
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
def test_binary_fields(self):
|
def test_binary_fields(self):
|
||||||
"""Ensure that binary fields can be stored and retrieved.
|
"""Ensure that binary fields can be stored and retrieved.
|
||||||
"""
|
"""
|
||||||
@@ -1481,6 +1592,21 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.assertEquals(result.file.read(), text)
|
self.assertEquals(result.file.read(), text)
|
||||||
self.assertEquals(result.file.content_type, content_type)
|
self.assertEquals(result.file.content_type, content_type)
|
||||||
result.file.delete() # Remove file from GridFS
|
result.file.delete() # Remove file from GridFS
|
||||||
|
PutFile.objects.delete()
|
||||||
|
|
||||||
|
# Ensure file-like objects are stored
|
||||||
|
putfile = PutFile()
|
||||||
|
putstring = StringIO.StringIO()
|
||||||
|
putstring.write(text)
|
||||||
|
putstring.seek(0)
|
||||||
|
putfile.file.put(putstring, content_type=content_type)
|
||||||
|
putfile.save()
|
||||||
|
putfile.validate()
|
||||||
|
result = PutFile.objects.first()
|
||||||
|
self.assertTrue(putfile == result)
|
||||||
|
self.assertEquals(result.file.read(), text)
|
||||||
|
self.assertEquals(result.file.content_type, content_type)
|
||||||
|
result.file.delete()
|
||||||
|
|
||||||
streamfile = StreamFile()
|
streamfile = StreamFile()
|
||||||
streamfile.file.new_file(content_type=content_type)
|
streamfile.file.new_file(content_type=content_type)
|
||||||
@@ -1530,6 +1656,49 @@ class FieldTest(unittest.TestCase):
|
|||||||
file = FileField()
|
file = FileField()
|
||||||
DemoFile.objects.create()
|
DemoFile.objects.create()
|
||||||
|
|
||||||
|
|
||||||
|
def test_file_field_no_default(self):
|
||||||
|
|
||||||
|
class GridDocument(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
GridDocument.drop_collection()
|
||||||
|
|
||||||
|
with tempfile.TemporaryFile() as f:
|
||||||
|
f.write("Hello World!")
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
# Test without default
|
||||||
|
doc_a = GridDocument()
|
||||||
|
doc_a.save()
|
||||||
|
|
||||||
|
|
||||||
|
doc_b = GridDocument.objects.with_id(doc_a.id)
|
||||||
|
doc_b.the_file.replace(f, filename='doc_b')
|
||||||
|
doc_b.save()
|
||||||
|
self.assertNotEquals(doc_b.the_file.grid_id, None)
|
||||||
|
|
||||||
|
# Test it matches
|
||||||
|
doc_c = GridDocument.objects.with_id(doc_b.id)
|
||||||
|
self.assertEquals(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
|
||||||
|
|
||||||
|
# Test with default
|
||||||
|
doc_d = GridDocument(the_file='')
|
||||||
|
doc_d.save()
|
||||||
|
|
||||||
|
doc_e = GridDocument.objects.with_id(doc_d.id)
|
||||||
|
self.assertEquals(doc_d.the_file.grid_id, doc_e.the_file.grid_id)
|
||||||
|
|
||||||
|
doc_e.the_file.replace(f, filename='doc_e')
|
||||||
|
doc_e.save()
|
||||||
|
|
||||||
|
doc_f = GridDocument.objects.with_id(doc_e.id)
|
||||||
|
self.assertEquals(doc_e.the_file.grid_id, doc_f.the_file.grid_id)
|
||||||
|
|
||||||
|
db = GridDocument._get_db()
|
||||||
|
grid_fs = gridfs.GridFS(db)
|
||||||
|
self.assertEquals(['doc_b', 'doc_e'], grid_fs.list())
|
||||||
|
|
||||||
def test_file_uniqueness(self):
|
def test_file_uniqueness(self):
|
||||||
"""Ensure that each instance of a FileField is unique
|
"""Ensure that each instance of a FileField is unique
|
||||||
"""
|
"""
|
||||||
@@ -1828,6 +1997,8 @@ class FieldTest(unittest.TestCase):
|
|||||||
name = StringField()
|
name = StringField()
|
||||||
like = GenericEmbeddedDocumentField()
|
like = GenericEmbeddedDocumentField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
person = Person(name='Test User')
|
person = Person(name='Test User')
|
||||||
person.like = Car(name='Fiat')
|
person.like = Car(name='Fiat')
|
||||||
person.save()
|
person.save()
|
||||||
@@ -1841,6 +2012,59 @@ class FieldTest(unittest.TestCase):
|
|||||||
person = Person.objects.first()
|
person = Person.objects.first()
|
||||||
self.assertTrue(isinstance(person.like, Dish))
|
self.assertTrue(isinstance(person.like, Dish))
|
||||||
|
|
||||||
|
def test_generic_embedded_document_choices(self):
|
||||||
|
"""Ensure you can limit GenericEmbeddedDocument choices
|
||||||
|
"""
|
||||||
|
class Car(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Dish(EmbeddedDocument):
|
||||||
|
food = StringField(required=True)
|
||||||
|
number = IntField()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
like = GenericEmbeddedDocumentField(choices=(Dish,))
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
person = Person(name='Test User')
|
||||||
|
person.like = Car(name='Fiat')
|
||||||
|
self.assertRaises(ValidationError, person.validate)
|
||||||
|
|
||||||
|
person.like = Dish(food="arroz", number=15)
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
self.assertTrue(isinstance(person.like, Dish))
|
||||||
|
|
||||||
|
def test_generic_list_embedded_document_choices(self):
|
||||||
|
"""Ensure you can limit GenericEmbeddedDocument choices inside a list
|
||||||
|
field
|
||||||
|
"""
|
||||||
|
class Car(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Dish(EmbeddedDocument):
|
||||||
|
food = StringField(required=True)
|
||||||
|
number = IntField()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
likes = ListField(GenericEmbeddedDocumentField(choices=(Dish,)))
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
person = Person(name='Test User')
|
||||||
|
person.likes = [Car(name='Fiat')]
|
||||||
|
self.assertRaises(ValidationError, person.validate)
|
||||||
|
|
||||||
|
person.likes = [Dish(food="arroz", number=15)]
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
self.assertTrue(isinstance(person.likes[0], Dish))
|
||||||
|
|
||||||
def test_recursive_validation(self):
|
def test_recursive_validation(self):
|
||||||
"""Ensure that a validation result to_dict is available.
|
"""Ensure that a validation result to_dict is available.
|
||||||
"""
|
"""
|
||||||
@@ -1880,49 +2104,11 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.assertTrue(1 in error_dict['comments'])
|
self.assertTrue(1 in error_dict['comments'])
|
||||||
self.assertTrue('content' in error_dict['comments'][1])
|
self.assertTrue('content' in error_dict['comments'][1])
|
||||||
self.assertEquals(error_dict['comments'][1]['content'],
|
self.assertEquals(error_dict['comments'][1]['content'],
|
||||||
u'Field is required ("content")')
|
'Field is required')
|
||||||
|
|
||||||
post.comments[1].content = 'here we go'
|
post.comments[1].content = 'here we go'
|
||||||
post.validate()
|
post.validate()
|
||||||
|
|
||||||
|
|
||||||
class ValidatorErrorTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def test_to_dict(self):
|
|
||||||
"""Ensure a ValidationError handles error to_dict correctly.
|
|
||||||
"""
|
|
||||||
error = ValidationError('root')
|
|
||||||
self.assertEquals(error.to_dict(), {})
|
|
||||||
|
|
||||||
# 1st level error schema
|
|
||||||
error.errors = {'1st': ValidationError('bad 1st'), }
|
|
||||||
self.assertTrue('1st' in error.to_dict())
|
|
||||||
self.assertEquals(error.to_dict()['1st'], 'bad 1st')
|
|
||||||
|
|
||||||
# 2nd level error schema
|
|
||||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
|
||||||
'2nd': ValidationError('bad 2nd'),
|
|
||||||
})}
|
|
||||||
self.assertTrue('1st' in error.to_dict())
|
|
||||||
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
|
||||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
|
||||||
self.assertEquals(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
|
||||||
|
|
||||||
# moar levels
|
|
||||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
|
||||||
'2nd': ValidationError('bad 2nd', errors={
|
|
||||||
'3rd': ValidationError('bad 3rd', errors={
|
|
||||||
'4th': ValidationError('Inception'),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
})}
|
|
||||||
self.assertTrue('1st' in error.to_dict())
|
|
||||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
|
||||||
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
|
||||||
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
|
||||||
self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
|
||||||
'Inception')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
@@ -329,11 +329,11 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
BlogPost(title="ABC", comments=[c1, c2]).save()
|
BlogPost(title="ABC", comments=[c1, c2]).save()
|
||||||
|
|
||||||
BlogPost.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1)
|
||||||
|
|
||||||
post = BlogPost.objects.first()
|
post = BlogPost.objects.first()
|
||||||
self.assertEquals(post.comments[0].by, 'joe')
|
self.assertEquals(post.comments[1].by, 'jane')
|
||||||
self.assertEquals(post.comments[0].votes, 4)
|
self.assertEquals(post.comments[1].votes, 8)
|
||||||
|
|
||||||
# Currently the $ operator only applies to the first matched item in
|
# Currently the $ operator only applies to the first matched item in
|
||||||
# the query
|
# the query
|
||||||
@@ -480,7 +480,7 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(person.name, "User C")
|
self.assertEqual(person.name, "User C")
|
||||||
|
|
||||||
def test_bulk_insert(self):
|
def test_bulk_insert(self):
|
||||||
"""Ensure that query by array position works.
|
"""Ensure that bulk insert works
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
class Comment(EmbeddedDocument):
|
||||||
@@ -490,7 +490,7 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
class Blog(Document):
|
class Blog(Document):
|
||||||
title = StringField()
|
title = StringField(unique=True)
|
||||||
tags = ListField(StringField())
|
tags = ListField(StringField())
|
||||||
posts = ListField(EmbeddedDocumentField(Post))
|
posts = ListField(EmbeddedDocumentField(Post))
|
||||||
|
|
||||||
@@ -563,6 +563,23 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
obj_id = Blog.objects.insert(blog1, load_bulk=False)
|
obj_id = Blog.objects.insert(blog1, load_bulk=False)
|
||||||
self.assertEquals(obj_id.__class__.__name__, 'ObjectId')
|
self.assertEquals(obj_id.__class__.__name__, 'ObjectId')
|
||||||
|
|
||||||
|
Blog.drop_collection()
|
||||||
|
post3 = Post(comments=[comment1, comment1])
|
||||||
|
blog1 = Blog(title="foo", posts=[post1, post2])
|
||||||
|
blog2 = Blog(title="bar", posts=[post2, post3])
|
||||||
|
blog3 = Blog(title="baz", posts=[post1, post2])
|
||||||
|
Blog.objects.insert([blog1, blog2])
|
||||||
|
|
||||||
|
def throw_operation_error_not_unique():
|
||||||
|
Blog.objects.insert([blog2, blog3], safe=True)
|
||||||
|
|
||||||
|
self.assertRaises(OperationError, throw_operation_error_not_unique)
|
||||||
|
self.assertEqual(Blog.objects.count(), 2)
|
||||||
|
|
||||||
|
Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True})
|
||||||
|
self.assertEqual(Blog.objects.count(), 3)
|
||||||
|
|
||||||
|
|
||||||
def test_slave_okay(self):
|
def test_slave_okay(self):
|
||||||
"""Ensures that a query can take slave_okay syntax
|
"""Ensures that a query can take slave_okay syntax
|
||||||
"""
|
"""
|
||||||
@@ -619,17 +636,38 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(people1, people2)
|
self.assertEqual(people1, people2)
|
||||||
self.assertEqual(people1, people3)
|
self.assertEqual(people1, people3)
|
||||||
|
|
||||||
def test_repr_iteration(self):
|
def test_repr(self):
|
||||||
"""Ensure that QuerySet __repr__ can handle loops
|
"""Test repr behavior isnt destructive"""
|
||||||
"""
|
|
||||||
self.Person(name='Person 1').save()
|
|
||||||
self.Person(name='Person 2').save()
|
|
||||||
|
|
||||||
queryset = self.Person.objects
|
class Doc(Document):
|
||||||
self.assertEquals('[<Person: Person object>, <Person: Person object>]', repr(queryset))
|
number = IntField()
|
||||||
for person in queryset:
|
|
||||||
self.assertEquals('.. queryset mid-iteration ..', repr(queryset))
|
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<Doc: %s>" % self.number
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1000):
|
||||||
|
Doc(number=i).save()
|
||||||
|
|
||||||
|
docs = Doc.objects.order_by('number')
|
||||||
|
|
||||||
|
self.assertEquals(docs.count(), 1000)
|
||||||
|
self.assertEquals(len(docs), 1000)
|
||||||
|
|
||||||
|
docs_string = "%s" % docs
|
||||||
|
self.assertTrue("Doc: 0" in docs_string)
|
||||||
|
|
||||||
|
self.assertEquals(docs.count(), 1000)
|
||||||
|
self.assertEquals(len(docs), 1000)
|
||||||
|
|
||||||
|
# Limit and skip
|
||||||
|
self.assertEquals('[<Doc: 1>, <Doc: 2>, <Doc: 3>]', "%s" % docs[1:4])
|
||||||
|
|
||||||
|
self.assertEquals(docs.count(), 3)
|
||||||
|
self.assertEquals(len(docs), 3)
|
||||||
|
for doc in docs:
|
||||||
|
self.assertEqual('.. queryset mid-iteration ..', repr(docs))
|
||||||
|
|
||||||
def test_regex_query_shortcuts(self):
|
def test_regex_query_shortcuts(self):
|
||||||
"""Ensure that contains, startswith, endswith, etc work.
|
"""Ensure that contains, startswith, endswith, etc work.
|
||||||
@@ -1327,6 +1365,37 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.Person.objects(name='Test User').delete()
|
self.Person.objects(name='Test User').delete()
|
||||||
self.assertEqual(1, BlogPost.objects.count())
|
self.assertEqual(1, BlogPost.objects.count())
|
||||||
|
|
||||||
|
def test_reverse_delete_rule_cascade_self_referencing(self):
|
||||||
|
"""Ensure self-referencing CASCADE deletes do not result in infinite loop
|
||||||
|
"""
|
||||||
|
class Category(Document):
|
||||||
|
name = StringField()
|
||||||
|
parent = ReferenceField('self', reverse_delete_rule=CASCADE)
|
||||||
|
|
||||||
|
num_children = 3
|
||||||
|
base = Category(name='Root')
|
||||||
|
base.save()
|
||||||
|
|
||||||
|
# Create a simple parent-child tree
|
||||||
|
for i in range(num_children):
|
||||||
|
child_name = 'Child-%i' % i
|
||||||
|
child = Category(name=child_name, parent=base)
|
||||||
|
child.save()
|
||||||
|
|
||||||
|
for i in range(num_children):
|
||||||
|
child_child_name = 'Child-Child-%i' % i
|
||||||
|
child_child = Category(name=child_child_name, parent=child)
|
||||||
|
child_child.save()
|
||||||
|
|
||||||
|
tree_size = 1 + num_children + (num_children * num_children)
|
||||||
|
self.assertEquals(tree_size, Category.objects.count())
|
||||||
|
self.assertEquals(num_children, Category.objects(parent=base).count())
|
||||||
|
|
||||||
|
# The delete should effectively wipe out the Category collection
|
||||||
|
# without resulting in infinite parent-child cascade recursion
|
||||||
|
base.delete()
|
||||||
|
self.assertEquals(0, Category.objects.count())
|
||||||
|
|
||||||
def test_reverse_delete_rule_nullify(self):
|
def test_reverse_delete_rule_nullify(self):
|
||||||
"""Ensure nullification of references to deleted documents.
|
"""Ensure nullification of references to deleted documents.
|
||||||
"""
|
"""
|
||||||
@@ -1371,6 +1440,36 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertRaises(OperationError, self.Person.objects.delete)
|
self.assertRaises(OperationError, self.Person.objects.delete)
|
||||||
|
|
||||||
|
def test_reverse_delete_rule_pull(self):
|
||||||
|
"""Ensure pulling of references to deleted documents.
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
content = StringField()
|
||||||
|
authors = ListField(ReferenceField(self.Person,
|
||||||
|
reverse_delete_rule=PULL))
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
self.Person.drop_collection()
|
||||||
|
|
||||||
|
me = self.Person(name='Test User')
|
||||||
|
me.save()
|
||||||
|
|
||||||
|
someoneelse = self.Person(name='Some-one Else')
|
||||||
|
someoneelse.save()
|
||||||
|
|
||||||
|
post = BlogPost(content='Watching TV', authors=[me, someoneelse])
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
another = BlogPost(content='Chilling Out', authors=[someoneelse])
|
||||||
|
another.save()
|
||||||
|
|
||||||
|
someoneelse.delete()
|
||||||
|
post.reload()
|
||||||
|
another.reload()
|
||||||
|
|
||||||
|
self.assertEqual(post.authors, [me])
|
||||||
|
self.assertEqual(another.authors, [])
|
||||||
|
|
||||||
def test_update(self):
|
def test_update(self):
|
||||||
"""Ensure that atomic updates work properly.
|
"""Ensure that atomic updates work properly.
|
||||||
"""
|
"""
|
||||||
@@ -1421,7 +1520,7 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
def test_update_push_and_pull(self):
|
def test_update_push_and_pull_add_to_set(self):
|
||||||
"""Ensure that the 'pull' update operation works correctly.
|
"""Ensure that the 'pull' update operation works correctly.
|
||||||
"""
|
"""
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
@@ -1454,6 +1553,52 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
post.reload()
|
post.reload()
|
||||||
self.assertEqual(post.tags, ["code", "mongodb"])
|
self.assertEqual(post.tags, ["code", "mongodb"])
|
||||||
|
|
||||||
|
def test_add_to_set_each(self):
|
||||||
|
class Item(Document):
|
||||||
|
name = StringField(required=True)
|
||||||
|
description = StringField(max_length=50)
|
||||||
|
parents = ListField(ReferenceField('self'))
|
||||||
|
|
||||||
|
Item.drop_collection()
|
||||||
|
|
||||||
|
item = Item(name='test item').save()
|
||||||
|
parent_1 = Item(name='parent 1').save()
|
||||||
|
parent_2 = Item(name='parent 2').save()
|
||||||
|
|
||||||
|
item.update(add_to_set__parents=[parent_1, parent_2, parent_1])
|
||||||
|
item.reload()
|
||||||
|
|
||||||
|
self.assertEqual([parent_1, parent_2], item.parents)
|
||||||
|
|
||||||
|
def test_pull_nested(self):
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Collaborator(EmbeddedDocument):
|
||||||
|
user = StringField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return '%s' % self.user
|
||||||
|
|
||||||
|
class Site(Document):
|
||||||
|
name = StringField(max_length=75, unique=True, required=True)
|
||||||
|
collaborators = ListField(EmbeddedDocumentField(Collaborator))
|
||||||
|
|
||||||
|
|
||||||
|
Site.drop_collection()
|
||||||
|
|
||||||
|
c = Collaborator(user='Esteban')
|
||||||
|
s = Site(name="test", collaborators=[c])
|
||||||
|
s.save()
|
||||||
|
|
||||||
|
Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban')
|
||||||
|
self.assertEqual(Site.objects.first().collaborators, [])
|
||||||
|
|
||||||
|
def pull_all():
|
||||||
|
Site.objects(id=s.id).update_one(pull_all__collaborators__user=['Ross'])
|
||||||
|
|
||||||
|
self.assertRaises(InvalidQueryError, pull_all)
|
||||||
|
|
||||||
def test_update_one_pop_generic_reference(self):
|
def test_update_one_pop_generic_reference(self):
|
||||||
|
|
||||||
@@ -1518,6 +1663,37 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
|
||||||
|
def test_set_list_embedded_documents(self):
|
||||||
|
|
||||||
|
class Author(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Message(Document):
|
||||||
|
title = StringField()
|
||||||
|
authors = ListField(EmbeddedDocumentField('Author'))
|
||||||
|
|
||||||
|
Message.drop_collection()
|
||||||
|
|
||||||
|
message = Message(title="hello", authors=[Author(name="Harry")])
|
||||||
|
message.save()
|
||||||
|
|
||||||
|
Message.objects(authors__name="Harry").update_one(
|
||||||
|
set__authors__S=Author(name="Ross"))
|
||||||
|
|
||||||
|
message = message.reload()
|
||||||
|
self.assertEquals(message.authors[0].name, "Ross")
|
||||||
|
|
||||||
|
Message.objects(authors__name="Ross").update_one(
|
||||||
|
set__authors=[Author(name="Harry"),
|
||||||
|
Author(name="Ross"),
|
||||||
|
Author(name="Adam")])
|
||||||
|
|
||||||
|
message = message.reload()
|
||||||
|
self.assertEquals(message.authors[0].name, "Harry")
|
||||||
|
self.assertEquals(message.authors[1].name, "Ross")
|
||||||
|
self.assertEquals(message.authors[2].name, "Adam")
|
||||||
|
|
||||||
def test_order_by(self):
|
def test_order_by(self):
|
||||||
"""Ensure that QuerySets may be ordered.
|
"""Ensure that QuerySets may be ordered.
|
||||||
"""
|
"""
|
||||||
@@ -1818,9 +1994,9 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
# Check item_frequencies works for non-list fields
|
# Check item_frequencies works for non-list fields
|
||||||
def test_assertions(f):
|
def test_assertions(f):
|
||||||
self.assertEqual(set(['1', '2']), set(f.keys()))
|
self.assertEqual(set([1, 2]), set(f.keys()))
|
||||||
self.assertEqual(f['1'], 1)
|
self.assertEqual(f[1], 1)
|
||||||
self.assertEqual(f['2'], 2)
|
self.assertEqual(f[2], 2)
|
||||||
|
|
||||||
exec_js = BlogPost.objects.item_frequencies('hits')
|
exec_js = BlogPost.objects.item_frequencies('hits')
|
||||||
map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True)
|
map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True)
|
||||||
@@ -1920,7 +2096,6 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
data = EmbeddedDocumentField(Data, required=True)
|
data = EmbeddedDocumentField(Data, required=True)
|
||||||
extra = EmbeddedDocumentField(Extra)
|
extra = EmbeddedDocumentField(Extra)
|
||||||
|
|
||||||
|
|
||||||
Person.drop_collection()
|
Person.drop_collection()
|
||||||
|
|
||||||
p = Person()
|
p = Person()
|
||||||
@@ -1938,6 +2113,52 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
ot = Person.objects.item_frequencies('extra.tag', map_reduce=True)
|
ot = Person.objects.item_frequencies('extra.tag', map_reduce=True)
|
||||||
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
|
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
|
||||||
|
|
||||||
|
def test_item_frequencies_with_0_values(self):
|
||||||
|
class Test(Document):
|
||||||
|
val = IntField()
|
||||||
|
|
||||||
|
Test.drop_collection()
|
||||||
|
t = Test()
|
||||||
|
t.val = 0
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
ot = Test.objects.item_frequencies('val', map_reduce=True)
|
||||||
|
self.assertEquals(ot, {0: 1})
|
||||||
|
ot = Test.objects.item_frequencies('val', map_reduce=False)
|
||||||
|
self.assertEquals(ot, {0: 1})
|
||||||
|
|
||||||
|
def test_item_frequencies_with_False_values(self):
|
||||||
|
class Test(Document):
|
||||||
|
val = BooleanField()
|
||||||
|
|
||||||
|
Test.drop_collection()
|
||||||
|
t = Test()
|
||||||
|
t.val = False
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
ot = Test.objects.item_frequencies('val', map_reduce=True)
|
||||||
|
self.assertEquals(ot, {False: 1})
|
||||||
|
ot = Test.objects.item_frequencies('val', map_reduce=False)
|
||||||
|
self.assertEquals(ot, {False: 1})
|
||||||
|
|
||||||
|
def test_item_frequencies_normalize(self):
|
||||||
|
class Test(Document):
|
||||||
|
val = IntField()
|
||||||
|
|
||||||
|
Test.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(50):
|
||||||
|
Test(val=1).save()
|
||||||
|
|
||||||
|
for i in xrange(20):
|
||||||
|
Test(val=2).save()
|
||||||
|
|
||||||
|
freqs = Test.objects.item_frequencies('val', map_reduce=False, normalize=True)
|
||||||
|
self.assertEquals(freqs, {1: 50.0/70, 2: 20.0/70})
|
||||||
|
|
||||||
|
freqs = Test.objects.item_frequencies('val', map_reduce=True, normalize=True)
|
||||||
|
self.assertEquals(freqs, {1: 50.0/70, 2: 20.0/70})
|
||||||
|
|
||||||
def test_average(self):
|
def test_average(self):
|
||||||
"""Ensure that field can be averaged correctly.
|
"""Ensure that field can be averaged correctly.
|
||||||
"""
|
"""
|
||||||
@@ -2868,6 +3089,19 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(plist[1], (20, False))
|
self.assertEqual(plist[1], (20, False))
|
||||||
self.assertEqual(plist[2], (30, True))
|
self.assertEqual(plist[2], (30, True))
|
||||||
|
|
||||||
|
def test_scalar_primary_key(self):
|
||||||
|
|
||||||
|
class SettingValue(Document):
|
||||||
|
key = StringField(primary_key=True)
|
||||||
|
value = StringField()
|
||||||
|
|
||||||
|
SettingValue.drop_collection()
|
||||||
|
s = SettingValue(key="test", value="test value")
|
||||||
|
s.save()
|
||||||
|
|
||||||
|
val = SettingValue.objects.scalar('key', 'value')
|
||||||
|
self.assertEqual(list(val), [('test', 'test value')])
|
||||||
|
|
||||||
def test_scalar_cursor_behaviour(self):
|
def test_scalar_cursor_behaviour(self):
|
||||||
"""Ensure that a query returns a valid set of results.
|
"""Ensure that a query returns a valid set of results.
|
||||||
"""
|
"""
|
32
tests/test_replicaset_connection.py
Normal file
32
tests/test_replicaset_connection.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import unittest
|
||||||
|
import pymongo
|
||||||
|
from pymongo import ReadPreference, ReplicaSetConnection
|
||||||
|
|
||||||
|
import mongoengine
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectionTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
mongoengine.connection._connection_settings = {}
|
||||||
|
mongoengine.connection._connections = {}
|
||||||
|
mongoengine.connection._dbs = {}
|
||||||
|
|
||||||
|
def test_replicaset_uri_passes_read_preference(self):
|
||||||
|
"""Requires a replica set called "rs" on port 27017
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY)
|
||||||
|
except ConnectionError, e:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not isinstance(conn, ReplicaSetConnection):
|
||||||
|
return
|
||||||
|
|
||||||
|
self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
Reference in New Issue
Block a user