Compare commits

..

3 Commits

Author SHA1 Message Date
Wilson Júnior
1f7272d139 fix simple typo 2014-09-03 15:37:29 -03:00
Wilson Júnior
f6ba1ad788 Merge remote-tracking branch 'origin' into async 2014-09-03 00:50:01 -03:00
Wilson Júnior
294d59c9bb register a possible async database 2014-09-03 00:45:02 -03:00
81 changed files with 2962 additions and 6049 deletions

View File

@@ -1,63 +1,62 @@
# http://travis-ci.org/#!/MongoEngine/mongoengine
language: python language: python
python: python:
- '2.6' - "2.6"
- '2.7' - "2.7"
- '3.3' - "3.2"
- '3.4' - "3.3"
- '3.5' - "3.4"
- pypy - "pypy"
- pypy3 - "pypy3"
env: env:
- PYMONGO=2.7 - PYMONGO=dev DJANGO=dev
- PYMONGO=2.8 - PYMONGO=dev DJANGO=1.6.5
- PYMONGO=3.0 - PYMONGO=dev DJANGO=1.5.8
- PYMONGO=dev - PYMONGO=2.7.1 DJANGO=dev
- PYMONGO=2.7.1 DJANGO=1.6.5
- PYMONGO=2.7.1 DJANGO=1.5.8
- PYMONGO=2.7.2 DJANGO=dev
- PYMONGO=2.7.2 DJANGO=1.6.5
- PYMONGO=2.7.2 DJANGO=1.5.8
matrix: matrix:
fast_finish: true exclude:
- python: "2.6"
env: PYMONGO=dev DJANGO=dev
- python: "2.6"
env: PYMONGO=2.7.1 DJANGO=dev
- python: "2.6"
env: PYMONGO=2.7.2 DJANGO=dev
allow_failures:
- python: "pypy3"
fast_finish: true
before_install: before_install:
- travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 - "travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10"
- echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | - "echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | sudo tee /etc/apt/sources.list.d/mongodb.list"
sudo tee /etc/apt/sources.list.d/mongodb.list - "travis_retry sudo apt-get update"
- travis_retry sudo apt-get update - "travis_retry sudo apt-get install mongodb-org-server"
- travis_retry sudo apt-get install mongodb-org-server
install: install:
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev python-tk
libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev - if [[ $PYMONGO == 'dev' ]]; then travis_retry pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
python-tk - if [[ $PYMONGO != 'dev' ]]; then travis_retry pip install pymongo==$PYMONGO; true; fi
- travis_retry pip install --upgrade pip - if [[ $DJANGO == 'dev' ]]; then travis_retry pip install https://www.djangoproject.com/download/1.7c2/tarball/; fi
- travis_retry pip install coveralls - if [[ $DJANGO != 'dev' ]]; then travis_retry pip install Django==$DJANGO; fi
- travis_retry pip install flake8 - travis_retry pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b
- travis_retry pip install tox>=1.9 - travis_retry pip install coveralls
- travis_retry pip install "virtualenv<14.0.0" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) - travis_retry python setup.py install
- travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
# Run flake8 for py27
before_script:
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then tox -e flake8; fi
script: script:
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage - travis_retry python setup.py test
- if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then 2to3 . -w; fi;
after_script: coveralls --verbose - coverage run --source=mongoengine setup.py test
- coverage report -m
- python benchmark.py
after_script:
coveralls --verbose
notifications: notifications:
irc: irc.freenode.org#mongoengine irc: "irc.freenode.org#mongoengine"
branches: branches:
only: only:
- master - master
- /^v.*$/
deploy:
provider: pypi
user: the_drow
password:
secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek=
on:
tags: true
repo: MongoEngine/mongoengine

40
AUTHORS
View File

@@ -12,7 +12,7 @@ Laine Herron https://github.com/LaineHerron
CONTRIBUTORS CONTRIBUTORS
Derived from the git logs, inevitably incomplete but all of whom and others Dervived from the git logs, inevitably incomplete but all of whom and others
have submitted patches, reported bugs and generally helped make MongoEngine have submitted patches, reported bugs and generally helped make MongoEngine
that much better: that much better:
@@ -119,7 +119,7 @@ that much better:
* Anton Kolechkin * Anton Kolechkin
* Sergey Nikitin * Sergey Nikitin
* psychogenic * psychogenic
* Stefan Wójcik (https://github.com/wojcikstefan) * Stefan Wójcik
* dimonb * dimonb
* Garry Polley * Garry Polley
* James Slagle * James Slagle
@@ -138,6 +138,7 @@ that much better:
* hellysmile * hellysmile
* Jaepil Jeong * Jaepil Jeong
* Daniil Sharou * Daniil Sharou
* Stefan Wójcik
* Pete Campton * Pete Campton
* Martyn Smith * Martyn Smith
* Marcelo Anton * Marcelo Anton
@@ -207,38 +208,5 @@ that much better:
* Norberto Leite (https://github.com/nleite) * Norberto Leite (https://github.com/nleite)
* Bob Cribbs (https://github.com/bocribbz) * Bob Cribbs (https://github.com/bocribbz)
* Jay Shirley (https://github.com/jshirley) * Jay Shirley (https://github.com/jshirley)
* David Bordeynik (https://github.com/DavidBord) * DavidBord (https://github.com/DavidBord)
* Axel Haustant (https://github.com/noirbizarre) * Axel Haustant (https://github.com/noirbizarre)
* David Czarnecki (https://github.com/czarneckid)
* Vyacheslav Murashkin (https://github.com/a4tunado)
* André Ericson https://github.com/aericson)
* Mikhail Moshnogorsky (https://github.com/mikhailmoshnogorsky)
* Diego Berrocal (https://github.com/cestdiego)
* Matthew Ellison (https://github.com/seglberg)
* Jimmy Shen (https://github.com/jimmyshen)
* J. Fernando Sánchez (https://github.com/balkian)
* Michael Chase (https://github.com/rxsegrxup)
* Eremeev Danil (https://github.com/elephanter)
* Catstyle Lee (https://github.com/Catstyle)
* Kiryl Yermakou (https://github.com/rma4ok)
* Matthieu Rigal (https://github.com/MRigal)
* Charanpal Dhanjal (https://github.com/charanpald)
* Emmanuel Leblond (https://github.com/touilleMan)
* Breeze.Kay (https://github.com/9nix00)
* Vicki Donchenko (https://github.com/kivistein)
* Emile Caron (https://github.com/emilecaron)
* Amit Lichtenberg (https://github.com/amitlicht)
* Gang Li (https://github.com/iici-gli)
* Lars Butler (https://github.com/larsbutler)
* George Macon (https://github.com/gmacon)
* Ashley Whetter (https://github.com/AWhetter)
* Paul-Armand Verhaegen (https://github.com/paularmand)
* Steven Rossiter (https://github.com/BeardedSteve)
* Luo Peng (https://github.com/RussellLuo)
* Bryan Bennett (https://github.com/bbenne10)
* Gilb's Gilb's (https://github.com/gilbsgilbs)
* Joshua Nedrud (https://github.com/Neurostack)
* Shu Shen (https://github.com/shushen)
* xiaost7 (https://github.com/xiaost7)
* Victor Varvaryuk
* Stanislav Kaledin (https://github.com/sallyruthstruik)

View File

@@ -29,10 +29,7 @@ Style Guide
----------- -----------
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_ MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
including 4 space indents. When possible we try to stick to 79 character line limits. including 4 space indents and 79 character line limits.
However, screens got bigger and an ORM has a strong focus on readability and
if it can help, we accept 119 as maximum line length, in a similar way as
`django does <https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
Testing Testing
------- -------
@@ -41,21 +38,14 @@ All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
and any pull requests are automatically tested by Travis. Any pull requests and any pull requests are automatically tested by Travis. Any pull requests
without tests will take longer to be integrated and might be refused. without tests will take longer to be integrated and might be refused.
You may also submit a simple failing test as a PullRequest if you don't know
how to fix it, it will be easier for other people to work on it and it may get
fixed faster.
General Guidelines General Guidelines
------------------ ------------------
- Avoid backward breaking changes if at all possible. - Avoid backward breaking changes if at all possible.
- Write inline documentation for new classes and methods. - Write inline documentation for new classes and methods.
- Write tests and make sure they pass (make sure you have a mongod - Write tests and make sure they pass (make sure you have a mongod
running on the default port, then execute ``python setup.py nosetests`` running on the default port, then execute ``python setup.py test``
from the cmd line to run the test suite). from the cmd line to run the test suite).
- Ensure tests pass on every Python and PyMongo versions.
You can test on these versions locally by executing ``tox``
- Add enhancements or problematic bug fixes to docs/changelog.rst
- Add yourself to AUTHORS :) - Add yourself to AUTHORS :)
Documentation Documentation

View File

@@ -6,29 +6,27 @@ MongoEngine
:Author: Harry Marr (http://github.com/hmarr) :Author: Harry Marr (http://github.com/hmarr)
:Maintainer: Ross Lawley (http://github.com/rozza) :Maintainer: Ross Lawley (http://github.com/rozza)
.. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master .. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
:target: https://travis-ci.org/MongoEngine/mongoengine :target: http://travis-ci.org/MongoEngine/mongoengine
.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master .. image:: https://coveralls.io/repos/MongoEngine/mongoengine/badge.png?branch=master
:target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master :target: https://coveralls.io/r/MongoEngine/mongoengine?branch=master
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat .. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.png
:target: https://landscape.io/github/MongoEngine/mongoengine/master :target: https://landscape.io/github/MongoEngine/mongoengine/master
:alt: Code Health :alt: Code Health
About About
===== =====
MongoEngine is a Python Object-Document Mapper for working with MongoDB. MongoEngine is a Python Object-Document Mapper for working with MongoDB.
Documentation available at https://mongoengine-odm.readthedocs.io - there is currently Documentation available at http://mongoengine-odm.rtfd.org - there is currently
a `tutorial <https://mongoengine-odm.readthedocs.io/tutorial.html>`_, a `user guide a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
<https://mongoengine-odm.readthedocs.io/guide/index.html>`_ and an `API reference <https://mongoengine-odm.readthedocs.org/en/latest/guide/index.html>`_ and an `API reference
<https://mongoengine-odm.readthedocs.io/apireference.html>`_. <http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
Installation Installation
============ ============
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ and thus
you can use ``easy_install -U mongoengine``. Otherwise, you can download the you can use ``easy_install -U mongoengine``. Otherwise, you can download the
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
setup.py install``. setup.py install``.
@@ -40,26 +38,21 @@ Dependencies
Optional Dependencies Optional Dependencies
--------------------- ---------------------
- **Image Fields**: Pillow>=2.0.0 - **Django Integration:** Django>=1.4.0 for Python 2.x or PyPy and Django>=1.5.0 for Python 3.x
- **Image Fields**: Pillow>=2.0.0 or PIL (not recommended since MongoEngine is tested with Pillow)
- dateutil>=2.1.0 - dateutil>=2.1.0
.. note .. note
MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: PyMongo 3.0.1 MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: Django 1.6.5
Examples Examples
======== ========
Some simple examples of what MongoEngine code looks like: Some simple examples of what MongoEngine code looks like::
.. code :: python
from mongoengine import *
connect('mydb')
class BlogPost(Document): class BlogPost(Document):
title = StringField(required=True, max_length=200) title = StringField(required=True, max_length=200)
posted = DateTimeField(default=datetime.datetime.now) posted = DateTimeField(default=datetime.datetime.now)
tags = ListField(StringField(max_length=50)) tags = ListField(StringField(max_length=50))
meta = {'allow_inheritance': True}
class TextPost(BlogPost): class TextPost(BlogPost):
content = StringField(required=True) content = StringField(required=True)
@@ -89,7 +82,7 @@ Some simple examples of what MongoEngine code looks like:
>>> len(BlogPost.objects) >>> len(BlogPost.objects)
2 2
>>> len(TextPost.objects) >>> len(HtmlPost.objects)
1 1
>>> len(LinkPost.objects) >>> len(LinkPost.objects)
1 1
@@ -103,26 +96,7 @@ Some simple examples of what MongoEngine code looks like:
Tests Tests
===== =====
To run the test suite, ensure you are running a local instance of MongoDB on To run the test suite, ensure you are running a local instance of MongoDB on
the standard port and have ``nose`` installed. Then, run: ``python setup.py nosetests``. the standard port, and run: ``python setup.py test``.
To run the test suite on every supported Python version and every supported PyMongo version,
you can use ``tox``.
tox and each supported Python version should be installed in your environment:
.. code-block:: shell
# Install tox
$ pip install tox
# Run the test suites
$ tox
If you wish to run one single or selected tests, use the nosetest convention. It will find the folder,
eventually the file, go to the TestClass specified after the colon and eventually right to the single test.
Also use the -s argument if you want to print out whatever or access pdb while testing.
.. code-block:: shell
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest.test_cls_field -s
Community Community
========= =========

View File

@@ -2,7 +2,7 @@
{% if next or prev %} {% if next or prev %}
<div class="rst-footer-buttons"> <div class="rst-footer-buttons">
{% if next %} {% if next %}
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}">Next <span class="icon icon-circle-arrow-right"></span></a> <a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}"/>Next <span class="icon icon-circle-arrow-right"></span></a>
{% endif %} {% endif %}
{% if prev %} {% if prev %}
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a> <a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>

View File

@@ -34,9 +34,6 @@ Documents
.. autoclass:: mongoengine.ValidationError .. autoclass:: mongoengine.ValidationError
:members: :members:
.. autoclass:: mongoengine.FieldDoesNotExist
Context Managers Context Managers
================ ================
@@ -82,7 +79,6 @@ Fields
.. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField .. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField
.. autoclass:: mongoengine.fields.DynamicField .. autoclass:: mongoengine.fields.DynamicField
.. autoclass:: mongoengine.fields.ListField .. autoclass:: mongoengine.fields.ListField
.. autoclass:: mongoengine.fields.EmbeddedDocumentListField
.. autoclass:: mongoengine.fields.SortedListField .. autoclass:: mongoengine.fields.SortedListField
.. autoclass:: mongoengine.fields.DictField .. autoclass:: mongoengine.fields.DictField
.. autoclass:: mongoengine.fields.MapField .. autoclass:: mongoengine.fields.MapField
@@ -107,21 +103,6 @@ Fields
.. autoclass:: mongoengine.fields.ImageGridFsProxy .. autoclass:: mongoengine.fields.ImageGridFsProxy
.. autoclass:: mongoengine.fields.ImproperlyConfigured .. autoclass:: mongoengine.fields.ImproperlyConfigured
Embedded Document Querying
==========================
.. versionadded:: 0.9
Additional queries for Embedded Documents are available when using the
:class:`~mongoengine.EmbeddedDocumentListField` to store a list of embedded
documents.
A list of embedded documents is returned as a special list with the
following methods:
.. autoclass:: mongoengine.base.datastructures.EmbeddedDocumentList
:members:
Misc Misc
==== ====

View File

@@ -2,128 +2,9 @@
Changelog Changelog
========= =========
Changes in 0.10.8
=================
- Added ability to specify an authentication mechanism (e.g. X.509) #1333
- Added support for falsey primary keys (e.g. doc.pk = 0) #1354
- Fixed BaseQuerySet#sum/average for fields w/ explicit db_field #1417
Changes in 0.10.7 Changes in 0.9.X - DEV
================= ======================
- Dropped Python 3.2 support #1390
- Fixed the bug where dynamic doc has index inside a dict field #1278
- Fixed: ListField minus index assignment does not work #1128
- Fixed cascade delete mixing among collections #1224
- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206
- Raise `OperationError` when trying to do a `drop_collection` on document with no collection set.
- count on ListField of EmbeddedDocumentField fails. #1187
- Fixed long fields stored as int32 in Python 3. #1253
- MapField now handles unicodes keys correctly. #1267
- ListField now handles negative indicies correctly. #1270
- Fixed AttributeError when initializing EmbeddedDocument with positional args. #681
- Fixed no_cursor_timeout error with pymongo 3.0+ #1304
- Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336
- Fixed support for `__` to escape field names that match operators names in `update` #1351
- Fixed BaseDocument#_mark_as_changed #1369
- Added support for pickling QuerySet instances. #1397
- Fixed connecting to a list of hosts #1389
- Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334
- Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218
- Improvements to the dictionary fields docs #1383
Changes in 0.10.6
=================
- Add support for mocking MongoEngine based on mongomock. #1151
- Fixed not being able to run tests on Windows. #1153
- Allow creation of sparse compound indexes. #1114
- count on ListField of EmbeddedDocumentField fails. #1187
Changes in 0.10.5
=================
- Fix for reloading of strict with special fields. #1156
Changes in 0.10.4
=================
- SaveConditionError is now importable from the top level package. #1165
- upsert_one method added. #1157
Changes in 0.10.3
=================
- Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042
Changes in 0.10.2
=================
- Allow shard key to point to a field in an embedded document. #551
- Allow arbirary metadata in fields. #1129
- ReferenceFields now support abstract document types. #837
Changes in 0.10.1
=================
- Fix infinite recursion with CASCADE delete rules under specific conditions. #1046
- Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047
- Fix ignored chained options #842
- Document save's save_condition error raises `SaveConditionError` exception #1070
- Fix Document.reload for DynamicDocument. #1050
- StrictDict & SemiStrictDict are shadowed at init time. #1105
- Fix ListField minus index assignment does not work. #1119
- Remove code that marks field as changed when the field has default but not existed in database #1126
- Remove test dependencies (nose and rednose) from install dependencies list. #1079
- Recursively build query when using elemMatch operator. #1130
- Fix instance back references for lists of embedded documents. #1131
Changes in 0.10.0
=================
- Django support was removed and will be available as a separate extension. #958
- Allow to load undeclared field with meta attribute 'strict': False #957
- Support for PyMongo 3+ #946
- Removed get_or_create() deprecated since 0.8.0. #300
- Improve Document._created status when switch collection and db #1020
- Queryset update doesn't go through field validation #453
- Added support for specifying authentication source as option `authSource` in URI. #967
- Fixed mark_as_changed to handle higher/lower level fields changed. #927
- ListField of embedded docs doesn't set the _instance attribute when iterating over it #914
- Support += and *= for ListField #595
- Use sets for populating dbrefs to dereference
- Fixed unpickled documents replacing the global field's list. #888
- Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910
- Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769)
- Fix for updating sorting in SortedListField. #978
- Added __ support to escape field name in fields lookup keywords that match operators names #949
- Fix for issue where FileField deletion did not free space in GridFS.
- No_dereference() not respected on embedded docs containing reference. #517
- Document save raise an exception if save_condition fails #1005
- Fixes some internal _id handling issue. #961
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
- Capped collection multiple of 256. #1011
- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods.
- Fix for delete with write_concern {'w': 0}. #1008
- Allow dynamic lookup for more than two parts. #882
- Added support for min_distance on geo queries. #831
- Allow to add custom metadata to fields #705
Changes in 0.9.0
================
- Update FileField when creating a new file #714
- Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826
- ComplexDateTimeField should fall back to None when null=True #864
- Request Support for $min, $max Field update operators #863
- `BaseDict` does not follow `setdefault` #866
- Add support for $type operator # 766
- Fix tests for pymongo 2.8+ #877
- No module named 'django.utils.importlib' (Django dev) #872
- Field Choices Now Accept Subclasses of Documents
- Ensure Indexes before Each Save #812
- Generate Unique Indices for Lists of EmbeddedDocuments #358
- Sparse fields #515
- write_concern not in params of Collection#remove #801
- Better BaseDocument equality check when not saved #798
- OperationError: Shard Keys are immutable. Tried to update id even though the document is not yet saved #771
- with_limit_and_skip for count should default like in pymongo #759
- Fix storing value of precision attribute in DecimalField #787
- Set attribute to None does not work (at least for fields with default values) #734
- Querying by a field defined in a subclass raises InvalidQueryError #744
- Add Support For MongoDB 2.6.X's maxTimeMS #778
- abstract shouldn't be inherited in EmbeddedDocument # 789
- Allow specifying the '_cls' as a field for indexes #397 - Allow specifying the '_cls' as a field for indexes #397
- Stop ensure_indexes running on a secondaries unless connection is through mongos #746 - Stop ensure_indexes running on a secondaries unless connection is through mongos #746
- Not overriding default values when loading a subset of fields #399 - Not overriding default values when loading a subset of fields #399
@@ -154,7 +35,7 @@ Changes in 0.9.0
- Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x. - Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x.
- Removing support for Python < 2.6.6 - Removing support for Python < 2.6.6
- Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664 - Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664
- QuerySet.modify() and Document.modify() methods to provide find_and_modify() like behaviour #677 #773 - QuerySet.modify() method to provide find_and_modify() like behaviour #677
- Added support for the using() method on a queryset #676 - Added support for the using() method on a queryset #676
- PYPY support #673 - PYPY support #673
- Connection pooling #674 - Connection pooling #674
@@ -173,14 +54,10 @@ Changes in 0.9.0
- Allow atomic update for the entire `DictField` #742 - Allow atomic update for the entire `DictField` #742
- Added MultiPointField, MultiLineField, MultiPolygonField - Added MultiPointField, MultiLineField, MultiPolygonField
- Fix multiple connections aliases being rewritten #748 - Fix multiple connections aliases being rewritten #748
- Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791
- Make `in_bulk()` respect `no_dereference()` #775
- Handle None from model __str__; Fixes #753 #754
- _get_changed_fields fix for embedded documents with id field. #925
Changes in 0.8.7 Changes in 0.8.7
================ ================
- Calling reload on deleted / nonexistent documents raises DoesNotExist (#538) - Calling reload on deleted / nonexistant documents raises DoesNotExist (#538)
- Stop ensure_indexes running on a secondaries (#555) - Stop ensure_indexes running on a secondaries (#555)
- Fix circular import issue with django auth (#531) (#545) - Fix circular import issue with django auth (#531) (#545)
@@ -193,7 +70,7 @@ Changes in 0.8.5
- Fix multi level nested fields getting marked as changed (#523) - Fix multi level nested fields getting marked as changed (#523)
- Django 1.6 login fix (#522) (#527) - Django 1.6 login fix (#522) (#527)
- Django 1.6 session fix (#509) - Django 1.6 session fix (#509)
- EmbeddedDocument._instance is now set when setting the attribute (#506) - EmbeddedDocument._instance is now set when settng the attribute (#506)
- Fixed EmbeddedDocument with ReferenceField equality issue (#502) - Fixed EmbeddedDocument with ReferenceField equality issue (#502)
- Fixed GenericReferenceField serialization order (#499) - Fixed GenericReferenceField serialization order (#499)
- Fixed count and none bug (#498) - Fixed count and none bug (#498)
@@ -283,7 +160,7 @@ Changes in 0.8.0
- Added `get_next_value` preview for SequenceFields (#319) - Added `get_next_value` preview for SequenceFields (#319)
- Added no_sub_classes context manager and queryset helper (#312) - Added no_sub_classes context manager and queryset helper (#312)
- Querysets now utilises a local cache - Querysets now utilises a local cache
- Changed __len__ behaviour in the queryset (#247, #311) - Changed __len__ behavour in the queryset (#247, #311)
- Fixed querying string versions of ObjectIds issue with ReferenceField (#307) - Fixed querying string versions of ObjectIds issue with ReferenceField (#307)
- Added $setOnInsert support for upserts (#308) - Added $setOnInsert support for upserts (#308)
- Upserts now possible with just query parameters (#309) - Upserts now possible with just query parameters (#309)
@@ -334,7 +211,7 @@ Changes in 0.8.0
- Uses getlasterror to test created on updated saves (#163) - Uses getlasterror to test created on updated saves (#163)
- Fixed inheritance and unique index creation (#140) - Fixed inheritance and unique index creation (#140)
- Fixed reverse delete rule with inheritance (#197) - Fixed reverse delete rule with inheritance (#197)
- Fixed validation for GenericReferences which haven't been dereferenced - Fixed validation for GenericReferences which havent been dereferenced
- Added switch_db context manager (#106) - Added switch_db context manager (#106)
- Added switch_db method to document instances (#106) - Added switch_db method to document instances (#106)
- Added no_dereference context manager (#82) (#61) - Added no_dereference context manager (#82) (#61)
@@ -416,11 +293,11 @@ Changes in 0.7.2
- Update index spec generation so its not destructive (#113) - Update index spec generation so its not destructive (#113)
Changes in 0.7.1 Changes in 0.7.1
================ =================
- Fixed index spec inheritance (#111) - Fixed index spec inheritance (#111)
Changes in 0.7.0 Changes in 0.7.0
================ =================
- Updated queryset.delete so you can use with skip / limit (#107) - Updated queryset.delete so you can use with skip / limit (#107)
- Updated index creation allows kwargs to be passed through refs (#104) - Updated index creation allows kwargs to be passed through refs (#104)
- Fixed Q object merge edge case (#109) - Fixed Q object merge edge case (#109)
@@ -501,7 +378,7 @@ Changes in 0.6.12
- Fixes error with _delta handling DBRefs - Fixes error with _delta handling DBRefs
Changes in 0.6.11 Changes in 0.6.11
================= ==================
- Fixed inconsistency handling None values field attrs - Fixed inconsistency handling None values field attrs
- Fixed map_field embedded db_field issue - Fixed map_field embedded db_field issue
- Fixed .save() _delta issue with DbRefs - Fixed .save() _delta issue with DbRefs
@@ -581,7 +458,7 @@ Changes in 0.6.1
- Fix for replicaSet connections - Fix for replicaSet connections
Changes in 0.6 Changes in 0.6
============== ================
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
- Added support for covered indexes when inheritance is off - Added support for covered indexes when inheritance is off
@@ -669,8 +546,8 @@ Changes in v0.5
- Updated default collection naming convention - Updated default collection naming convention
- Added Document Mixin support - Added Document Mixin support
- Fixed queryet __repr__ mid iteration - Fixed queryet __repr__ mid iteration
- Added hint() support, so can tell Mongo the proper index to use for the query - Added hint() support, so cantell Mongo the proper index to use for the query
- Fixed issue with inconsistent setting of _cls breaking inherited referencing - Fixed issue with inconsitent setting of _cls breaking inherited referencing
- Added help_text and verbose_name to fields to help with some form libs - Added help_text and verbose_name to fields to help with some form libs
- Updated item_frequencies to handle embedded document lookups - Updated item_frequencies to handle embedded document lookups
- Added delta tracking now only sets / unsets explicitly changed fields - Added delta tracking now only sets / unsets explicitly changed fields

View File

@@ -17,10 +17,6 @@ class Post(Document):
tags = ListField(StringField(max_length=30)) tags = ListField(StringField(max_length=30))
comments = ListField(EmbeddedDocumentField(Comment)) comments = ListField(EmbeddedDocumentField(Comment))
# bugfix
meta = {'allow_inheritance': True}
class TextPost(Post): class TextPost(Post):
content = StringField() content = StringField()
@@ -49,8 +45,7 @@ print 'ALL POSTS'
print print
for post in Post.objects: for post in Post.objects:
print post.title print post.title
#print '=' * post.title.count() print '=' * post.title.count()
print "=" * 20
if isinstance(post, TextPost): if isinstance(post, TextPost):
print post.content print post.content

View File

@@ -2,18 +2,176 @@
Django Support Django Support
============== ==============
.. note:: Django support has been split from the main MongoEngine .. note:: Updated to support Django 1.5
repository. The *legacy* Django extension may be found bundled with the
0.9 release of MongoEngine. Connecting
==========
In your **settings.py** file, ignore the standard database settings (unless you
also plan to use the ORM in your project), and instead call
:func:`~mongoengine.connect` somewhere in the settings module.
.. note::
If you are not using another Database backend you may need to add a dummy
database backend to ``settings.py`` eg::
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.dummy'
}
}
Authentication
==============
MongoEngine includes a Django authentication backend, which uses MongoDB. The
:class:`~mongoengine.django.auth.User` model is a MongoEngine
:class:`~mongoengine.Document`, but implements most of the methods and
attributes that the standard Django :class:`User` model does - so the two are
moderately compatible. Using this backend will allow you to store users in
MongoDB but still use many of the Django authentication infrastructure (such as
the :func:`login_required` decorator and the :func:`authenticate` function). To
enable the MongoEngine auth backend, add the following to your **settings.py**
file::
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
The :mod:`~mongoengine.django.auth` module also contains a
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
.. versionadded:: 0.1.3
Custom User model
=================
Django 1.5 introduced `Custom user Models
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/#auth-custom-user>`_
which can be used as an alternative to the MongoEngine authentication backend.
The main advantage of this option is that other components relying on
:mod:`django.contrib.auth` and supporting the new swappable user model are more
likely to work. For example, you can use the ``createsuperuser`` management
command as usual.
To enable the custom User model in Django, add ``'mongoengine.django.mongo_auth'``
in your ``INSTALLED_APPS`` and set ``'mongo_auth.MongoUser'`` as the custom user
user model to use. In your **settings.py** file you will have::
INSTALLED_APPS = (
...
'django.contrib.auth',
'mongoengine.django.mongo_auth',
...
)
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
An additional ``MONGOENGINE_USER_DOCUMENT`` setting enables you to replace the
:class:`~mongoengine.django.auth.User` class with another class of your choice::
MONGOENGINE_USER_DOCUMENT = 'mongoengine.django.auth.User'
The custom :class:`User` must be a :class:`~mongoengine.Document` class, but
otherwise has the same requirements as a standard custom user model,
as specified in the `Django Documentation
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/>`_.
In particular, the custom class must define :attr:`USERNAME_FIELD` and
:attr:`REQUIRED_FIELDS` attributes.
Sessions
========
Django allows the use of different backend stores for its sessions. MongoEngine
provides a MongoDB-based session backend for Django, which allows you to use
sessions in your Django application with just MongoDB. To enable the MongoEngine
session backend, ensure that your settings module has
``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
``INSTALLED_APPS``. From there, all you need to do is add the following line
into your settings module::
SESSION_ENGINE = 'mongoengine.django.sessions'
SESSION_SERIALIZER = 'mongoengine.django.sessions.BSONSerializer'
Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesn't delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL
<http://docs.mongodb.org/manual/tutorial/expire-data/>`_.
.. note:: ``SESSION_SERIALIZER`` is only necessary in Django 1.6 as the default
serializer is based around JSON and doesn't know how to convert
``bson.objectid.ObjectId`` instances to strings.
.. versionadded:: 0.2.1
Storage
=======
With MongoEngine's support for GridFS via the :class:`~mongoengine.fields.FileField`,
it is useful to have a Django file storage backend that wraps this. The new
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
Using it is very similar to using the default FileSystemStorage.::
from mongoengine.django.storage import GridFSStorage
fs = GridFSStorage()
filename = fs.save('hello.txt', 'Hello, World!')
All of the `Django Storage API methods
<http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been
implemented except :func:`path`. If the filename provided already exists, an
underscore and a number (before # the file extension, if one exists) will be
appended to the filename until the generated filename doesn't exist. The
:func:`save` method will return the new filename.::
>>> fs.exists('hello.txt')
True
>>> fs.open('hello.txt').read()
'Hello, World!'
>>> fs.size('hello.txt')
13
>>> fs.url('hello.txt')
'http://your_media_url/hello.txt'
>>> fs.open('hello.txt').name
'hello.txt'
>>> fs.listdir()
([], [u'hello.txt'])
All files will be saved and retrieved in GridFS via the :class:`FileDocument`
document, allowing easy access to the files without the GridFSStorage
backend.::
>>> from mongoengine.django.storage import FileDocument
>>> FileDocument.objects()
[<FileDocument: FileDocument object>]
.. versionadded:: 0.4
Shortcuts
=========
Inspired by the `Django shortcut get_object_or_404
<https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-object-or-404>`_,
the :func:`~mongoengine.django.shortcuts.get_document_or_404` method returns
a document or raises an Http404 exception if the document does not exist::
from mongoengine.django.shortcuts import get_document_or_404
admin_user = get_document_or_404(User, username='root')
The first argument may be a Document or QuerySet object. All other passed arguments
and keyword arguments are used in the query::
foo_email = get_document_or_404(User.objects.only('email'), username='foo', is_active=True).email
.. note:: Like with :func:`get`, a MultipleObjectsReturned will be raised if more than one
object is found.
Also inspired by the `Django shortcut get_list_or_404
<https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-list-or-404>`_,
the :func:`~mongoengine.django.shortcuts.get_list_or_404` method returns a list of
documents or raises an Http404 exception if the list is empty::
Help Wanted! from mongoengine.django.shortcuts import get_list_or_404
------------
active_users = get_list_or_404(User, is_active=True)
The first argument may be a Document or QuerySet object. All other passed
arguments and keyword arguments are used to filter the query.
The MongoEngine team is looking for help contributing and maintaining a new
Django extension for MongoEngine! If you have Django experience and would like
to help contribute to the project, please get in touch on the
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
simply contributing on
`GitHub <https://github.com/MongoEngine/django-mongoengine>`_.

View File

@@ -23,32 +23,21 @@ arguments should be provided::
connect('project1', username='webapp', password='pwd123') connect('project1', username='webapp', password='pwd123')
URI style connections are also supported -- just supply the URI as Uri style connections are also supported - just supply the uri as
the :attr:`host` to the :attr:`host` to
:func:`~mongoengine.connect`:: :func:`~mongoengine.connect`::
connect('project1', host='mongodb://localhost/database_name') connect('project1', host='mongodb://localhost/database_name')
.. note:: Database, username and password from URI string overrides Note that database name from uri has priority over name
corresponding parameters in :func:`~mongoengine.connect`: :: in ::func:`~mongoengine.connect`
connect(
name='test',
username='user',
password='12345',
host='mongodb://admin:qwerty@localhost/production'
)
will establish connection to ``production`` database using
``admin`` username and ``qwerty`` password.
ReplicaSets ReplicaSets
=========== ===========
MongoEngine supports MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`.
:class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`. To use them, To use them, please use a URI style connection and provide the `replicaSet` name in the
please use an URI style connection and provide the ``replicaSet`` name connection kwargs.
in the connection kwargs.
Read preferences are supported through the connection or via individual Read preferences are supported through the connection or via individual
queries by passing the read_preference :: queries by passing the read_preference ::
@@ -88,38 +77,36 @@ to point across databases and collections. Below is an example schema, using
meta = {"db_alias": "users-books-db"} meta = {"db_alias": "users-books-db"}
Context Managers Switch Database Context Manager
================ ===============================
Sometimes you may want to switch the database or collection to query against
for a class. Sometimes you may want to switch the database to query against for a class
For example, archiving older data into a separate database for performance for example, archiving older data into a separate database for performance
reasons or writing functions that dynamically choose collections to write reasons.
document to.
Switch Database
---------------
The :class:`~mongoengine.context_managers.switch_db` context manager allows The :class:`~mongoengine.context_managers.switch_db` context manager allows
you to change the database alias for a given class allowing quick and easy you to change the database alias for a given class allowing quick and easy
access the same User document across databases:: access to the same User document across databases::
from mongoengine.context_managers import switch_db from mongoengine.context_managers import switch_db
class User(Document): class User(Document):
name = StringField() name = StringField()
meta = {"db_alias": "user-db"} meta = {"db_alias": "user-db"}
with switch_db(User, 'archive-user-db') as User: with switch_db(User, 'archive-user-db') as User:
User(name="Ross").save() # Saves the 'archive-user-db' User(name="Ross").save() # Saves the 'archive-user-db'
.. note:: Make sure any aliases have been registered with
:func:`~mongoengine.register_connection` before using the context manager.
Switch Collection There is also a switch collection context manager as well. The
----------------- :class:`~mongoengine.context_managers.switch_collection` context manager allows
The :class:`~mongoengine.context_managers.switch_collection` context manager you to change the collection for a given class allowing quick and easy
allows you to change the collection for a given class allowing quick and easy access to the same Group document across collection::
access the same Group document across collection::
from mongoengine.context_managers import switch_collection from mongoengine.context_managers import switch_db
class Group(Document): class Group(Document):
name = StringField() name = StringField()
@@ -128,9 +115,3 @@ access the same Group document across collection::
with switch_collection(Group, 'group2000') as Group: with switch_collection(Group, 'group2000') as Group:
Group(name="hello Group 2000 collection!").save() # Saves in group2000 collection Group(name="hello Group 2000 collection!").save() # Saves in group2000 collection
.. note:: Make sure any aliases have been registered with
:func:`~mongoengine.register_connection` or :func:`~mongoengine.connect`
before using the context manager.

View File

@@ -4,7 +4,7 @@ Defining documents
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
working with relational databases, rows are stored in **tables**, which have a working with relational databases, rows are stored in **tables**, which have a
strict **schema** that the rows follow. MongoDB stores documents in strict **schema** that the rows follow. MongoDB stores documents in
**collections** rather than tables --- the principal difference is that no schema **collections** rather than tables - the principal difference is that no schema
is enforced at a database level. is enforced at a database level.
Defining a document's schema Defining a document's schema
@@ -29,7 +29,7 @@ documents are serialized based on their field order.
Dynamic document schemas Dynamic document schemas
======================== ========================
One of the benefits of MongoDB is dynamic schemas for a collection, whilst data One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
should be planned and organised (after all explicit is better than implicit!) should be planned and organised (after all explicit is better than implicit!)
there are scenarios where having dynamic / expando style documents is desirable. there are scenarios where having dynamic / expando style documents is desirable.
@@ -75,7 +75,6 @@ are as follows:
* :class:`~mongoengine.fields.DynamicField` * :class:`~mongoengine.fields.DynamicField`
* :class:`~mongoengine.fields.EmailField` * :class:`~mongoengine.fields.EmailField`
* :class:`~mongoengine.fields.EmbeddedDocumentField` * :class:`~mongoengine.fields.EmbeddedDocumentField`
* :class:`~mongoengine.fields.EmbeddedDocumentListField`
* :class:`~mongoengine.fields.FileField` * :class:`~mongoengine.fields.FileField`
* :class:`~mongoengine.fields.FloatField` * :class:`~mongoengine.fields.FloatField`
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField` * :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
@@ -115,7 +114,7 @@ arguments can be set on all fields:
:attr:`default` (Default: None) :attr:`default` (Default: None)
A value to use when no value is set for this field. A value to use when no value is set for this field.
The definition of default parameters follow `the general rules on Python The definion of default parameters follow `the general rules on Python
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__, <http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
which means that some care should be taken when dealing with default mutable objects which means that some care should be taken when dealing with default mutable objects
(like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`):: (like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`)::
@@ -147,8 +146,6 @@ arguments can be set on all fields:
When True, use this field as a primary key for the collection. `DictField` When True, use this field as a primary key for the collection. `DictField`
and `EmbeddedDocuments` both support being the primary key for a document. and `EmbeddedDocuments` both support being the primary key for a document.
.. note:: If set, this field is also accessible through the `pk` field.
:attr:`choices` (Default: None) :attr:`choices` (Default: None)
An iterable (e.g. a list or tuple) of choices to which the value of this An iterable (e.g. a list or tuple) of choices to which the value of this
field should be limited. field should be limited.
@@ -173,16 +170,16 @@ arguments can be set on all fields:
class Shirt(Document): class Shirt(Document):
size = StringField(max_length=3, choices=SIZE) size = StringField(max_length=3, choices=SIZE)
:attr:`**kwargs` (Optional) :attr:`help_text` (Default: None)
You can supply additional metadata as arbitrary additional keyword Optional help text to output with the field - used by form libraries
arguments. You can not override existing attributes, however. Common
choices include `help_text` and `verbose_name`, commonly used by form and :attr:`verbose_name` (Default: None)
widget libraries. Optional human-readable name for the field - used by form libraries
List fields List fields
----------- -----------
MongoDB allows storing lists of items. To add a list of items to a MongoDB allows the storage of lists of items. To add a list of items to a
:class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field :class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field
type. :class:`~mongoengine.fields.ListField` takes another field object as its first type. :class:`~mongoengine.fields.ListField` takes another field object as its first
argument, which specifies which type elements may be stored within the list:: argument, which specifies which type elements may be stored within the list::
@@ -214,9 +211,9 @@ document class as the first argument::
Dictionary Fields Dictionary Fields
----------------- -----------------
Often, an embedded document may be used instead of a dictionary generally Often, an embedded document may be used instead of a dictionary -- generally
embedded documents are recommended as dictionaries dont support validation this is recommended as dictionaries don't support validation or custom field
or custom field types. However, sometimes you will not know the structure of what you want to types. However, sometimes you will not know the structure of what you want to
store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate:: store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
class SurveyResponse(Document): class SurveyResponse(Document):
@@ -316,12 +313,12 @@ reference with a delete rule specification. A delete rule is specified by
supplying the :attr:`reverse_delete_rule` attributes on the supplying the :attr:`reverse_delete_rule` attributes on the
:class:`ReferenceField` definition, like this:: :class:`ReferenceField` definition, like this::
class ProfilePage(Document): class Employee(Document):
... ...
employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE) profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY)
The declaration in this example means that when an :class:`Employee` object is The declaration in this example means that when an :class:`Employee` object is
removed, the :class:`ProfilePage` that references that employee is removed as removed, the :class:`ProfilePage` that belongs to that employee is removed as
well. If a whole batch of employees is removed, all profile pages that are well. If a whole batch of employees is removed, all profile pages that are
linked are removed as well. linked are removed as well.
@@ -337,7 +334,7 @@ Its value can take any of the following constants:
Any object's fields still referring to the object being deleted are removed Any object's fields still referring to the object being deleted are removed
(using MongoDB's "unset" operation), effectively nullifying the relationship. (using MongoDB's "unset" operation), effectively nullifying the relationship.
:const:`mongoengine.CASCADE` :const:`mongoengine.CASCADE`
Any object containing fields that are referring to the object being deleted Any object containing fields that are refererring to the object being deleted
are deleted first. are deleted first.
:const:`mongoengine.PULL` :const:`mongoengine.PULL`
Removes the reference to the object (using MongoDB's "pull" operation) Removes the reference to the object (using MongoDB's "pull" operation)
@@ -404,7 +401,7 @@ MongoEngine allows you to specify that a field should be unique across a
collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's
constructor. If you try to save a document that has the same value for a unique constructor. If you try to save a document that has the same value for a unique
field as a document that is already in the database, a field as a document that is already in the database, a
:class:`~mongoengine.NotUniqueError` will be raised. You may also specify :class:`~mongoengine.OperationError` will be raised. You may also specify
multi-field uniqueness constraints by using :attr:`unique_with`, which may be multi-field uniqueness constraints by using :attr:`unique_with`, which may be
either a single field name, or a list or tuple of field names:: either a single field name, or a list or tuple of field names::
@@ -431,7 +428,7 @@ Document collections
==================== ====================
Document classes that inherit **directly** from :class:`~mongoengine.Document` Document classes that inherit **directly** from :class:`~mongoengine.Document`
will have their own **collection** in the database. The name of the collection will have their own **collection** in the database. The name of the collection
is by default the name of the class, converted to lowercase (so in the example is by default the name of the class, coverted to lowercase (so in the example
above, the collection would be called `page`). If you need to change the name above, the collection would be called `page`). If you need to change the name
of the collection (e.g. to use MongoEngine with an existing database), then of the collection (e.g. to use MongoEngine with an existing database), then
create a class dictionary attribute called :attr:`meta` on your document, and create a class dictionary attribute called :attr:`meta` on your document, and
@@ -448,10 +445,8 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
:attr:`max_documents` is the maximum number of documents that is allowed to be :attr:`max_documents` is the maximum number of documents that is allowed to be
stored in the collection, and :attr:`max_size` is the maximum size of the stored in the collection, and :attr:`max_size` is the maximum size of the
collection in bytes. :attr:`max_size` is rounded up to the next multiple of 256 collection in bytes. If :attr:`max_size` is not specified and
by MongoDB internally and mongoengine before. Use also a multiple of 256 to :attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
avoid confusions. If :attr:`max_size` is not specified and
:attr:`max_documents` is, :attr:`max_size` defaults to 10485760 bytes (10MB).
The following example shows a :class:`Log` document that will be limited to The following example shows a :class:`Log` document that will be limited to
1000 entries and 2MB of disk space:: 1000 entries and 2MB of disk space::
@@ -468,31 +463,16 @@ You can specify indexes on collections to make querying faster. This is done
by creating a list of index specifications called :attr:`indexes` in the by creating a list of index specifications called :attr:`indexes` in the
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may :attr:`~mongoengine.Document.meta` dictionary, where an index specification may
either be a single field name, a tuple containing multiple field names, or a either be a single field name, a tuple containing multiple field names, or a
dictionary containing a full index definition. dictionary containing a full index definition. A direction may be specified on
fields by prefixing the field name with a **+** (for ascending) or a **-** sign
A direction may be specified on fields by prefixing the field name with a (for descending). Note that direction only matters on multi-field indexes.
**+** (for ascending) or a **-** sign (for descending). Note that direction Text indexes may be specified by prefixing the field name with a **$**. ::
only matters on multi-field indexes. Text indexes may be specified by prefixing
the field name with a **$**. Hashed indexes may be specified by prefixing
the field name with a **#**::
class Page(Document): class Page(Document):
category = IntField()
title = StringField() title = StringField()
rating = StringField() rating = StringField()
created = DateTimeField()
meta = { meta = {
'indexes': [ 'indexes': ['title', ('title', '-rating')]
'title',
'$title', # text index
'#title', # hashed index
('title', '-rating'),
('category', '_cls'),
{
'fields': ['created'],
'expireAfterSeconds': 3600
}
]
} }
If a dictionary is passed then the following options are available: If a dictionary is passed then the following options are available:
@@ -542,14 +522,11 @@ There are a few top level defaults for all indexes that can be set::
:attr:`index_background` (Optional) :attr:`index_background` (Optional)
Set the default value for if an index should be indexed in the background Set the default value for if an index should be indexed in the background
:attr:`index_cls` (Optional)
A way to turn off a specific index for _cls.
:attr:`index_drop_dups` (Optional) :attr:`index_drop_dups` (Optional)
Set the default value for if an index should drop duplicates Set the default value for if an index should drop duplicates
.. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning :attr:`index_cls` (Optional)
and has no effect A way to turn off a specific index for _cls.
Compound Indexes and Indexing sub documents Compound Indexes and Indexing sub documents
@@ -687,11 +664,11 @@ Shard keys
========== ==========
If your collection is sharded, then you need to specify the shard key as a tuple, If your collection is sharded, then you need to specify the shard key as a tuple,
using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`. using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`.
This ensures that the shard key is sent with the query when calling the This ensures that the shard key is sent with the query when calling the
:meth:`~mongoengine.document.Document.save` or :meth:`~mongoengine.document.Document.save` or
:meth:`~mongoengine.document.Document.update` method on an existing :meth:`~mongoengine.document.Document.update` method on an existing
:class:`~mongoengine.Document` instance:: :class:`-mongoengine.Document` instance::
class LogEntry(Document): class LogEntry(Document):
machine = StringField() machine = StringField()
@@ -713,7 +690,7 @@ defined, you may subclass it and add any extra fields or methods you may need.
As this is new class is not a direct subclass of As this is new class is not a direct subclass of
:class:`~mongoengine.Document`, it will not be stored in its own collection; it :class:`~mongoengine.Document`, it will not be stored in its own collection; it
will use the same collection as its superclass uses. This allows for more will use the same collection as its superclass uses. This allows for more
convenient and efficient retrieval of related documents -- all you need do is convenient and efficient retrieval of related documents - all you need do is
set :attr:`allow_inheritance` to True in the :attr:`meta` data for a set :attr:`allow_inheritance` to True in the :attr:`meta` data for a
document.:: document.::
@@ -727,12 +704,12 @@ document.::
class DatedPage(Page): class DatedPage(Page):
date = DateTimeField() date = DateTimeField()
.. note:: From 0.8 onwards :attr:`allow_inheritance` defaults .. note:: From 0.8 onwards you must declare :attr:`allow_inheritance` defaults
to False, meaning you must set it to True to use inheritance. to False, meaning you must set it to True to use inheritance.
Working with existing data Working with existing data
-------------------------- --------------------------
As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and
easily get working with existing data. Just define the document to match easily get working with existing data. Just define the document to match
the expected schema in your database :: the expected schema in your database ::
@@ -755,7 +732,7 @@ Abstract classes
If you want to add some extra functionality to a group of Document classes but If you want to add some extra functionality to a group of Document classes but
you don't need or want the overhead of inheritance you can use the you don't need or want the overhead of inheritance you can use the
:attr:`abstract` attribute of :attr:`~mongoengine.Document.meta`. :attr:`abstract` attribute of :attr:`-mongoengine.Document.meta`.
This won't turn on :ref:`document-inheritance` but will allow you to keep your This won't turn on :ref:`document-inheritance` but will allow you to keep your
code DRY:: code DRY::

View File

@@ -2,7 +2,7 @@
Documents instances Documents instances
=================== ===================
To create a new document object, create an instance of the relevant document To create a new document object, create an instance of the relevant document
class, providing values for its fields as constructor keyword arguments. class, providing values for its fields as its constructor keyword arguments.
You may provide values for any of the fields on the document:: You may provide values for any of the fields on the document::
>>> page = Page(title="Test Page") >>> page = Page(title="Test Page")
@@ -32,11 +32,11 @@ already exist, then any changes will be updated atomically. For example::
Changes to documents are tracked and on the whole perform ``set`` operations. Changes to documents are tracked and on the whole perform ``set`` operations.
* ``list_field.push(0)`` --- *sets* the resulting list * ``list_field.push(0)`` - *sets* the resulting list
* ``del(list_field)`` --- *unsets* whole list * ``del(list_field)`` - *unsets* whole list
With lists its preferable to use ``Doc.update(push__list_field=0)`` as With lists its preferable to use ``Doc.update(push__list_field=0)`` as
this stops the whole list being updated --- stopping any race conditions. this stops the whole list being updated - stopping any race conditions.
.. seealso:: .. seealso::
:ref:`guide-atomic-updates` :ref:`guide-atomic-updates`
@@ -74,7 +74,7 @@ Cascading Saves
If your document contains :class:`~mongoengine.fields.ReferenceField` or If your document contains :class:`~mongoengine.fields.ReferenceField` or
:class:`~mongoengine.fields.GenericReferenceField` objects, then by default the :class:`~mongoengine.fields.GenericReferenceField` objects, then by default the
:meth:`~mongoengine.Document.save` method will not save any changes to :meth:`~mongoengine.Document.save` method will not save any changes to
those objects. If you want all references to be saved also, noting each those objects. If you want all references to also be saved also, noting each
save is a separate query, then passing :attr:`cascade` as True save is a separate query, then passing :attr:`cascade` as True
to the save method will cascade any saves. to the save method will cascade any saves.
@@ -113,13 +113,12 @@ you may still use :attr:`id` to access the primary key if you want::
>>> bob.id == bob.email == 'bob@example.com' >>> bob.id == bob.email == 'bob@example.com'
True True
You can also access the document's "primary key" using the :attr:`pk` field, You can also access the document's "primary key" using the :attr:`pk` field; in
it's an alias to :attr:`id`:: is an alias to :attr:`id`::
>>> page = Page(title="Another Test Page") >>> page = Page(title="Another Test Page")
>>> page.save() >>> page.save()
>>> page.id == page.pk >>> page.id == page.pk
True
.. note:: .. note::

View File

@@ -13,4 +13,3 @@ User Guide
gridfs gridfs
signals signals
text-indexes text-indexes
mongomock

View File

@@ -1,21 +0,0 @@
==============================
Use mongomock for testing
==============================
`mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just
what the name implies, mocking a mongo database.
To use with mongoengine, simply specify mongomock when connecting with
mongoengine:
.. code-block:: python
connect('mongoenginetest', host='mongomock://localhost')
conn = get_connection()
or with an alias:
.. code-block:: python
connect('mongoenginetest', host='mongomock://localhost', alias='testdb')
conn = get_connection('testdb')

View File

@@ -17,7 +17,7 @@ fetch documents from the database::
As of MongoEngine 0.8 the querysets utilise a local cache. So iterating As of MongoEngine 0.8 the querysets utilise a local cache. So iterating
it multiple times will only cause a single query. If this is not the it multiple times will only cause a single query. If this is not the
desired behaviour you can call :class:`~mongoengine.QuerySet.no_cache` desired behavour you can call :class:`~mongoengine.QuerySet.no_cache`
(version **0.8.3+**) to return a non-caching queryset. (version **0.8.3+**) to return a non-caching queryset.
Filtering queries Filtering queries
@@ -39,18 +39,10 @@ syntax::
# been written by a user whose 'country' field is set to 'uk' # been written by a user whose 'country' field is set to 'uk'
uk_pages = Page.objects(author__country='uk') uk_pages = Page.objects(author__country='uk')
.. note::
(version **0.9.1+**) if your field name is like mongodb operator name (for example
type, lte, lt...) and you want to place it at the end of lookup keyword
mongoengine automatically prepend $ to it. To avoid this use __ at the end of
your lookup keyword. For example if your field name is ``type`` and you want to
query by this field you must use ``.objects(user__type__="admin")`` instead of
``.objects(user__type="admin")``
Query operators Query operators
=============== ===============
Operators other than equality may also be used in queries --- just attach the Operators other than equality may also be used in queries; just attach the
operator name to a key with a double-underscore:: operator name to a key with a double-underscore::
# Only find users whose age is 18 or less # Only find users whose age is 18 or less
@@ -92,20 +84,19 @@ expressions:
Geo queries Geo queries
----------- -----------
There are a few special operators for performing geographical queries. There are a few special operators for performing geographical queries. The following
The following were added in MongoEngine 0.8 for were added in 0.8 for: :class:`~mongoengine.fields.PointField`,
:class:`~mongoengine.fields.PointField`,
:class:`~mongoengine.fields.LineStringField` and :class:`~mongoengine.fields.LineStringField` and
:class:`~mongoengine.fields.PolygonField`: :class:`~mongoengine.fields.PolygonField`:
* ``geo_within`` -- check if a geometry is within a polygon. For ease of use * ``geo_within`` -- Check if a geometry is within a polygon. For ease of use
it accepts either a geojson geometry or just the polygon coordinates eg:: it accepts either a geojson geometry or just the polygon coordinates eg::
loc.objects(point__geo_within=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) loc.objects(point__geo_within=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
loc.objects(point__geo_within={"type": "Polygon", loc.objects(point__geo_within={"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
* ``geo_within_box`` -- simplified geo_within searching with a box eg:: * ``geo_within_box`` - simplified geo_within searching with a box eg::
loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)]) loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)])
loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>]) loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>])
@@ -141,22 +132,23 @@ The following were added in MongoEngine 0.8 for
loc.objects(poly__geo_intersects={"type": "Polygon", loc.objects(poly__geo_intersects={"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}) "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]})
* ``near`` -- find all the locations near a given point:: * ``near`` -- Find all the locations near a given point::
loc.objects(point__near=[40, 5]) loc.objects(point__near=[40, 5])
loc.objects(point__near={"type": "Point", "coordinates": [40, 5]}) loc.objects(point__near={"type": "Point", "coordinates": [40, 5]})
You can also set the maximum and/or the minimum distance in meters as well::
You can also set the maximum distance in meters as well::
loc.objects(point__near=[40, 5], point__max_distance=1000) loc.objects(point__near=[40, 5], point__max_distance=1000)
loc.objects(point__near=[40, 5], point__min_distance=100)
The older 2D indexes are still supported with the The older 2D indexes are still supported with the
:class:`~mongoengine.fields.GeoPointField`: :class:`~mongoengine.fields.GeoPointField`:
* ``within_distance`` -- provide a list containing a point and a maximum * ``within_distance`` -- provide a list containing a point and a maximum
distance (e.g. [(41.342, -87.653), 5]) distance (e.g. [(41.342, -87.653), 5])
* ``within_spherical_distance`` -- same as above but using the spherical geo model * ``within_spherical_distance`` -- Same as above but using the spherical geo model
(e.g. [(41.342, -87.653), 5/earth_radius]) (e.g. [(41.342, -87.653), 5/earth_radius])
* ``near`` -- order the documents by how close they are to a given point * ``near`` -- order the documents by how close they are to a given point
* ``near_sphere`` -- Same as above but using the spherical geo model * ``near_sphere`` -- Same as above but using the spherical geo model
@@ -169,8 +161,7 @@ The older 2D indexes are still supported with the
* ``max_distance`` -- can be added to your location queries to set a maximum * ``max_distance`` -- can be added to your location queries to set a maximum
distance. distance.
* ``min_distance`` -- can be added to your location queries to set a minimum
distance.
Querying lists Querying lists
-------------- --------------
@@ -207,14 +198,12 @@ However, this doesn't map well to the syntax so you can also use a capital S ins
Post.objects(comments__by="joe").update(inc__comments__S__votes=1) Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
.. note:: .. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query.
Due to :program:`Mongo`, currently the $ operator only applies to the
first matched item in the query.
Raw queries Raw queries
----------- -----------
It is possible to provide a raw :mod:`PyMongo` query as a query parameter, which will It is possible to provide a raw PyMongo query as a query parameter, which will
be integrated directly into the query. This is done using the ``__raw__`` be integrated directly into the query. This is done using the ``__raw__``
keyword argument:: keyword argument::
@@ -224,12 +213,12 @@ keyword argument::
Limiting and skipping results Limiting and skipping results
============================= =============================
Just as with traditional ORMs, you may limit the number of results returned or Just as with traditional ORMs, you may limit the number of results returned, or
skip a number or results in you query. skip a number or results in you query.
:meth:`~mongoengine.queryset.QuerySet.limit` and :meth:`~mongoengine.queryset.QuerySet.limit` and
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on :meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
:class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax :class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
is preferred for achieving this:: achieving this is using array-slicing syntax::
# Only the first 5 people # Only the first 5 people
users = User.objects[:5] users = User.objects[:5]
@@ -237,7 +226,7 @@ is preferred for achieving this::
# All except for the first 5 people # All except for the first 5 people
users = User.objects[5:] users = User.objects[5:]
# 5 users, starting from the 11th user found # 5 users, starting from the 10th user found
users = User.objects[10:15] users = User.objects[10:15]
You may also index the query to retrieve a single result. If an item at that You may also index the query to retrieve a single result. If an item at that
@@ -263,17 +252,23 @@ To retrieve a result that should be unique in the collection, use
no document matches the query, and no document matches the query, and
:class:`~mongoengine.queryset.MultipleObjectsReturned` :class:`~mongoengine.queryset.MultipleObjectsReturned`
if more than one document matched the query. These exceptions are merged into if more than one document matched the query. These exceptions are merged into
your document definitions eg: `MyDoc.DoesNotExist` your document defintions eg: `MyDoc.DoesNotExist`
A variation of this method, get_or_create() existed, but it was unsafe. It A variation of this method exists,
could not be made safe, because there are no transactions in mongoDB. Other :meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new
approaches should be investigated, to ensure you don't accidentally duplicate document with the query arguments if no documents match the query. An
data when using something similar to this method. Therefore it was deprecated additional keyword argument, :attr:`defaults` may be provided, which will be
in 0.8 and removed in 0.10. used as default values for the new document, in the case that it should need
to be created::
>>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30})
>>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40})
>>> a.name == b.name and a.age == b.age
True
Default Document queries Default Document queries
======================== ========================
By default, the objects :attr:`~Document.objects` attribute on a By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter
the collection -- it returns all objects. This may be changed by defining a the collection -- it returns all objects. This may be changed by defining a
method on a document that modifies a queryset. The method should accept two method on a document that modifies a queryset. The method should accept two
@@ -316,7 +311,7 @@ Should you want to add custom methods for interacting with or filtering
documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be
the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on
a document, set ``queryset_class`` to the custom class in a a document, set ``queryset_class`` to the custom class in a
:class:`~mongoengine.Document`'s ``meta`` dictionary:: :class:`~mongoengine.Document`\ s ``meta`` dictionary::
class AwesomerQuerySet(QuerySet): class AwesomerQuerySet(QuerySet):
@@ -347,8 +342,6 @@ way of achieving this::
num_users = len(User.objects) num_users = len(User.objects)
Even if len() is the Pythonic way of counting results, keep in mind that if you concerned about performance, :meth:`~mongoengine.queryset.QuerySet.count` is the way to go since it only execute a server side count query, while len() retrieves the results, places them in cache, and finally counts them. If we compare the performance of the two operations, len() is much slower than :meth:`~mongoengine.queryset.QuerySet.count`.
Further aggregation Further aggregation
------------------- -------------------
You may sum over the values of a specific field on documents using You may sum over the values of a specific field on documents using
@@ -498,14 +491,11 @@ Documents may be updated atomically by using the
:meth:`~mongoengine.queryset.QuerySet.update_one`, :meth:`~mongoengine.queryset.QuerySet.update_one`,
:meth:`~mongoengine.queryset.QuerySet.update` and :meth:`~mongoengine.queryset.QuerySet.update` and
:meth:`~mongoengine.queryset.QuerySet.modify` methods on a :meth:`~mongoengine.queryset.QuerySet.modify` methods on a
:class:`~mongoengine.queryset.QuerySet` or :meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
:meth:`~mongoengine.Document.modify` and that you may use with these methods:
:meth:`~mongoengine.Document.save` (with :attr:`save_condition` argument) on a
:class:`~mongoengine.Document`.
There are several different "modifiers" that you may use with these methods:
* ``set`` -- set a particular value * ``set`` -- set a particular value
* ``unset`` -- delete a particular value (since MongoDB v1.3) * ``unset`` -- delete a particular value (since MongoDB v1.3+)
* ``inc`` -- increment a value by a given amount * ``inc`` -- increment a value by a given amount
* ``dec`` -- decrement a value by a given amount * ``dec`` -- decrement a value by a given amount
* ``push`` -- append a value to a list * ``push`` -- append a value to a list
@@ -600,7 +590,7 @@ Some variables are made available in the scope of the Javascript function:
The following example demonstrates the intended usage of The following example demonstrates the intended usage of
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums :meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
over a field on a document (this functionality is already available through over a field on a document (this functionality is already available throught
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of :meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
example):: example)::

View File

@@ -35,25 +35,25 @@ Available signals include:
:class:`~mongoengine.EmbeddedDocument` instance has been completed. :class:`~mongoengine.EmbeddedDocument` instance has been completed.
`pre_save` `pre_save`
Called within :meth:`~mongoengine.Document.save` prior to performing Called within :meth:`~mongoengine.document.Document.save` prior to performing
any actions. any actions.
`pre_save_post_validation` `pre_save_post_validation`
Called within :meth:`~mongoengine.Document.save` after validation Called within :meth:`~mongoengine.document.Document.save` after validation
has taken place but before saving. has taken place but before saving.
`post_save` `post_save`
Called within :meth:`~mongoengine.Document.save` after all actions Called within :meth:`~mongoengine.document.Document.save` after all actions
(validation, insert/update, cascades, clearing dirty flags) have completed (validation, insert/update, cascades, clearing dirty flags) have completed
successfully. Passed the additional boolean keyword argument `created` to successfully. Passed the additional boolean keyword argument `created` to
indicate if the save was an insert or an update. indicate if the save was an insert or an update.
`pre_delete` `pre_delete`
Called within :meth:`~mongoengine.Document.delete` prior to Called within :meth:`~mongoengine.document.Document.delete` prior to
attempting the delete operation. attempting the delete operation.
`post_delete` `post_delete`
Called within :meth:`~mongoengine.Document.delete` upon successful Called within :meth:`~mongoengine.document.Document.delete` upon successful
deletion of the record. deletion of the record.
`pre_bulk_insert` `pre_bulk_insert`
@@ -145,7 +145,7 @@ cleaner looking while still allowing manual execution of the callback::
ReferenceFields and Signals ReferenceFields and Signals
--------------------------- ---------------------------
Currently `reverse_delete_rule` does not trigger signals on the other part of Currently `reverse_delete_rules` do not trigger signals on the other part of
the relationship. If this is required you must manually handle the the relationship. If this is required you must manually handle the
reverse deletion. reverse deletion.

View File

@@ -17,7 +17,7 @@ Use the *$* prefix to set a text index, Look the declaration::
meta = {'indexes': [ meta = {'indexes': [
{'fields': ['$title', "$content"], {'fields': ['$title', "$content"],
'default_language': 'english', 'default_language': 'english',
'weights': {'title': 10, 'content': 2} 'weight': {'title': 10, 'content': 2}
} }
]} ]}
@@ -46,6 +46,4 @@ Next, start a text search using :attr:`QuerySet.search_text` method::
Ordering by text score Ordering by text score
====================== ======================
::
objects = News.objects.search('mongo').order_by('$text_score') objects = News.objects.search('mongo').order_by('$text_score')

View File

@@ -14,7 +14,7 @@ MongoDB. To install it, simply run
MongoEngine. MongoEngine.
:doc:`guide/index` :doc:`guide/index`
The Full guide to MongoEngine --- from modeling documents to storing files, The Full guide to MongoEngine - from modeling documents to storing files,
from querying for data to firing signals and *everything* between. from querying for data to firing signals and *everything* between.
:doc:`apireference` :doc:`apireference`

View File

@@ -65,7 +65,7 @@ which fields a :class:`User` may have, and what types of data they might store::
first_name = StringField(max_length=50) first_name = StringField(max_length=50)
last_name = StringField(max_length=50) last_name = StringField(max_length=50)
This looks similar to how the structure of a table would be defined in a This looks similar to how a the structure of a table would be defined in a
regular ORM. The key difference is that this schema will never be passed on to regular ORM. The key difference is that this schema will never be passed on to
MongoDB --- this will only be enforced at the application level, making future MongoDB --- this will only be enforced at the application level, making future
changes easy to manage. Also, the User documents will be stored in a changes easy to manage. Also, the User documents will be stored in a

View File

@@ -2,20 +2,10 @@
Upgrading Upgrading
######### #########
0.9.0
*****
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
pip uninstall pymongo
pip uninstall mongoengine
pip install pymongo==2.8
pip install mongoengine
0.8.7 0.8.7
***** *****
Calling reload on deleted / nonexistent documents now raises a DoesNotExist Calling reload on deleted / nonexistant documents now raises a DoesNotExist
exception. exception.
@@ -273,7 +263,7 @@ update your code like so: ::
[m for m in mammals] # This will return all carnivores [m for m in mammals] # This will return all carnivores
Len iterates the queryset Len iterates the queryset
------------------------- --------------------------
If you ever did `len(queryset)` it previously did a `count()` under the covers, If you ever did `len(queryset)` it previously did a `count()` under the covers,
this caused some unusual issues. As `len(queryset)` is most often used by this caused some unusual issues. As `len(queryset)` is most often used by

View File

@@ -1,20 +1,21 @@
import connection
from connection import *
import document import document
from document import * from document import *
import errors
from errors import *
import fields import fields
from fields import * from fields import *
import connection
from connection import *
import queryset import queryset
from queryset import * from queryset import *
import signals import signals
from signals import * from signals import *
from errors import *
import errors
import django
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
list(queryset.__all__) + signals.__all__ + list(errors.__all__)) list(queryset.__all__) + signals.__all__ + list(errors.__all__))
VERSION = (0, 10, 7) VERSION = (0, 8, 7)
def get_version(): def get_version():
@@ -22,5 +23,4 @@ def get_version():
return '.'.join(map(str, VERSION[:-1])) + VERSION[-1] return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
return '.'.join(map(str, VERSION)) return '.'.join(map(str, VERSION))
__version__ = get_version() __version__ = get_version()

View File

@@ -1,10 +1,9 @@
import itertools
import weakref import weakref
import functools
import itertools
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
__all__ = ("BaseDict", "BaseList", "EmbeddedDocumentList") __all__ = ("BaseDict", "BaseList")
class BaseDict(dict): class BaseDict(dict):
@@ -21,7 +20,7 @@ class BaseDict(dict):
if isinstance(instance, (Document, EmbeddedDocument)): if isinstance(instance, (Document, EmbeddedDocument)):
self._instance = weakref.proxy(instance) self._instance = weakref.proxy(instance)
self._name = name self._name = name
super(BaseDict, self).__init__(dict_items) return super(BaseDict, self).__init__(dict_items)
def __getitem__(self, key, *args, **kwargs): def __getitem__(self, key, *args, **kwargs):
value = super(BaseDict, self).__getitem__(key) value = super(BaseDict, self).__getitem__(key)
@@ -66,7 +65,7 @@ class BaseDict(dict):
def clear(self, *args, **kwargs): def clear(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
return super(BaseDict, self).clear() return super(BaseDict, self).clear(*args, **kwargs)
def pop(self, *args, **kwargs): def pop(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
@@ -74,11 +73,7 @@ class BaseDict(dict):
def popitem(self, *args, **kwargs): def popitem(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
return super(BaseDict, self).popitem() return super(BaseDict, self).popitem(*args, **kwargs)
def setdefault(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).setdefault(*args, **kwargs)
def update(self, *args, **kwargs): def update(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
@@ -107,7 +102,7 @@ class BaseList(list):
if isinstance(instance, (Document, EmbeddedDocument)): if isinstance(instance, (Document, EmbeddedDocument)):
self._instance = weakref.proxy(instance) self._instance = weakref.proxy(instance)
self._name = name self._name = name
super(BaseList, self).__init__(list_items) return super(BaseList, self).__init__(list_items)
def __getitem__(self, key, *args, **kwargs): def __getitem__(self, key, *args, **kwargs):
value = super(BaseList, self).__getitem__(key) value = super(BaseList, self).__getitem__(key)
@@ -125,10 +120,6 @@ class BaseList(list):
value._instance = self._instance value._instance = self._instance
return value return value
def __iter__(self):
for i in xrange(self.__len__()):
yield self[i]
def __setitem__(self, key, value, *args, **kwargs): def __setitem__(self, key, value, *args, **kwargs):
if isinstance(key, slice): if isinstance(key, slice):
self._mark_as_changed() self._mark_as_changed()
@@ -160,14 +151,6 @@ class BaseList(list):
self = state self = state
return self return self
def __iadd__(self, other):
self._mark_as_changed()
return super(BaseList, self).__iadd__(other)
def __imul__(self, other):
self._mark_as_changed()
return super(BaseList, self).__imul__(other)
def append(self, *args, **kwargs): def append(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
return super(BaseList, self).append(*args, **kwargs) return super(BaseList, self).append(*args, **kwargs)
@@ -190,7 +173,7 @@ class BaseList(list):
def reverse(self, *args, **kwargs): def reverse(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
return super(BaseList, self).reverse() return super(BaseList, self).reverse(*args, **kwargs)
def sort(self, *args, **kwargs): def sort(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
@@ -199,203 +182,34 @@ class BaseList(list):
def _mark_as_changed(self, key=None): def _mark_as_changed(self, key=None):
if hasattr(self._instance, '_mark_as_changed'): if hasattr(self._instance, '_mark_as_changed'):
if key: if key:
self._instance._mark_as_changed( self._instance._mark_as_changed('%s.%s' % (self._name, key))
'%s.%s' % (self._name, key % len(self))
)
else: else:
self._instance._mark_as_changed(self._name) self._instance._mark_as_changed(self._name)
class EmbeddedDocumentList(BaseList):
@classmethod
def __match_all(cls, i, kwargs):
items = kwargs.items()
return all([
getattr(i, k) == v or unicode(getattr(i, k)) == v for k, v in items
])
@classmethod
def __only_matches(cls, obj, kwargs):
if not kwargs:
return obj
return filter(lambda i: cls.__match_all(i, kwargs), obj)
def __init__(self, list_items, instance, name):
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
self._instance = instance
def filter(self, **kwargs):
"""
Filters the list by only including embedded documents with the
given keyword arguments.
:param kwargs: The keyword arguments corresponding to the fields to
filter on. *Multiple arguments are treated as if they are ANDed
together.*
:return: A new ``EmbeddedDocumentList`` containing the matching
embedded documents.
Raises ``AttributeError`` if a given keyword is not a valid field for
the embedded document class.
"""
values = self.__only_matches(self, kwargs)
return EmbeddedDocumentList(values, self._instance, self._name)
def exclude(self, **kwargs):
"""
Filters the list by excluding embedded documents with the given
keyword arguments.
:param kwargs: The keyword arguments corresponding to the fields to
exclude on. *Multiple arguments are treated as if they are ANDed
together.*
:return: A new ``EmbeddedDocumentList`` containing the non-matching
embedded documents.
Raises ``AttributeError`` if a given keyword is not a valid field for
the embedded document class.
"""
exclude = self.__only_matches(self, kwargs)
values = [item for item in self if item not in exclude]
return EmbeddedDocumentList(values, self._instance, self._name)
def count(self):
"""
The number of embedded documents in the list.
:return: The length of the list, equivalent to the result of ``len()``.
"""
return len(self)
def get(self, **kwargs):
"""
Retrieves an embedded document determined by the given keyword
arguments.
:param kwargs: The keyword arguments corresponding to the fields to
search on. *Multiple arguments are treated as if they are ANDed
together.*
:return: The embedded document matched by the given keyword arguments.
Raises ``DoesNotExist`` if the arguments used to query an embedded
document returns no results. ``MultipleObjectsReturned`` if more
than one result is returned.
"""
values = self.__only_matches(self, kwargs)
if len(values) == 0:
raise DoesNotExist(
"%s matching query does not exist." % self._name
)
elif len(values) > 1:
raise MultipleObjectsReturned(
"%d items returned, instead of 1" % len(values)
)
return values[0]
def first(self):
"""
Returns the first embedded document in the list, or ``None`` if empty.
"""
if len(self) > 0:
return self[0]
def create(self, **values):
"""
Creates a new embedded document and saves it to the database.
.. note::
The embedded document changes are not automatically saved
to the database after calling this method.
:param values: A dictionary of values for the embedded document.
:return: The new embedded document instance.
"""
name = self._name
EmbeddedClass = self._instance._fields[name].field.document_type_obj
self._instance[self._name].append(EmbeddedClass(**values))
return self._instance[self._name][-1]
def save(self, *args, **kwargs):
"""
Saves the ancestor document.
:param args: Arguments passed up to the ancestor Document's save
method.
:param kwargs: Keyword arguments passed up to the ancestor Document's
save method.
"""
self._instance.save(*args, **kwargs)
def delete(self):
"""
Deletes the embedded documents from the database.
.. note::
The embedded document changes are not automatically saved
to the database after calling this method.
:return: The number of entries deleted.
"""
values = list(self)
for item in values:
self._instance[self._name].remove(item)
return len(values)
def update(self, **update):
"""
Updates the embedded documents with the given update values.
.. note::
The embedded document changes are not automatically saved
to the database after calling this method.
:param update: A dictionary of update values to apply to each
embedded document.
:return: The number of entries updated.
"""
if len(update) == 0:
return 0
values = list(self)
for item in values:
for k, v in update.items():
setattr(item, k, v)
return len(values)
class StrictDict(object): class StrictDict(object):
__slots__ = () __slots__ = ()
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create']) _special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
_classes = {} _classes = {}
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k, v in kwargs.iteritems(): for k,v in kwargs.iteritems():
setattr(self, k, v) setattr(self, k, v)
def __getitem__(self, key): def __getitem__(self, key):
key = '_reserved_' + key if key in self._special_fields else key key = '_reserved_' + key if key in self._special_fields else key
try: try:
return getattr(self, key) return getattr(self, key)
except AttributeError: except AttributeError:
raise KeyError(key) raise KeyError(key)
def __setitem__(self, key, value): def __setitem__(self, key, value):
key = '_reserved_' + key if key in self._special_fields else key key = '_reserved_' + key if key in self._special_fields else key
return setattr(self, key, value) return setattr(self, key, value)
def __contains__(self, key): def __contains__(self, key):
return hasattr(self, key) return hasattr(self, key)
def get(self, key, default=None): def get(self, key, default=None):
try: try:
return self[key] return self[key]
except KeyError: except KeyError:
return default return default
def pop(self, key, default=None): def pop(self, key, default=None):
v = self.get(key, default) v = self.get(key, default)
try: try:
@@ -403,29 +217,19 @@ class StrictDict(object):
except AttributeError: except AttributeError:
pass pass
return v return v
def iteritems(self): def iteritems(self):
for key in self: for key in self:
yield key, self[key] yield key, self[key]
def items(self): def items(self):
return [(k, self[k]) for k in iter(self)] return [(k, self[k]) for k in iter(self)]
def iterkeys(self):
return iter(self)
def keys(self): def keys(self):
return list(iter(self)) return list(iter(self))
def __iter__(self): def __iter__(self):
return (key for key in self.__slots__ if hasattr(self, key)) return (key for key in self.__slots__ if hasattr(self, key))
def __len__(self): def __len__(self):
return len(list(self.iteritems())) return len(list(self.iteritems()))
def __eq__(self, other): def __eq__(self, other):
return self.items() == other.items() return self.items() == other.items()
def __neq__(self, other): def __neq__(self, other):
return self.items() != other.items() return self.items() != other.items()
@@ -436,18 +240,15 @@ class StrictDict(object):
if allowed_keys not in cls._classes: if allowed_keys not in cls._classes:
class SpecificStrictDict(cls): class SpecificStrictDict(cls):
__slots__ = allowed_keys_tuple __slots__ = allowed_keys_tuple
def __repr__(self): def __repr__(self):
return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k) for k in self.iterkeys()) return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k,v) for (k,v) in self.iteritems())
cls._classes[allowed_keys] = SpecificStrictDict cls._classes[allowed_keys] = SpecificStrictDict
return cls._classes[allowed_keys] return cls._classes[allowed_keys]
class SemiStrictDict(StrictDict): class SemiStrictDict(StrictDict):
__slots__ = ('_extras', ) __slots__ = ('_extras')
_classes = {} _classes = {}
def __getattr__(self, attr): def __getattr__(self, attr):
try: try:
super(SemiStrictDict, self).__getattr__(attr) super(SemiStrictDict, self).__getattr__(attr)
@@ -456,7 +257,6 @@ class SemiStrictDict(StrictDict):
return self.__getattribute__('_extras')[attr] return self.__getattribute__('_extras')[attr]
except KeyError as e: except KeyError as e:
raise AttributeError(e) raise AttributeError(e)
def __setattr__(self, attr, value): def __setattr__(self, attr, value):
try: try:
super(SemiStrictDict, self).__setattr__(attr, value) super(SemiStrictDict, self).__setattr__(attr, value)

View File

@@ -1,29 +1,24 @@
import copy import copy
import numbers
import operator import operator
import numbers
from collections import Hashable from collections import Hashable
from functools import partial from functools import partial
from bson import ObjectId, json_util import pymongo
from bson import json_util, ObjectId
from bson.dbref import DBRef from bson.dbref import DBRef
from bson.son import SON from bson.son import SON
import pymongo
from mongoengine import signals from mongoengine import signals
from mongoengine.base.common import ALLOW_INHERITANCE, get_document
from mongoengine.base.datastructures import (
BaseDict,
BaseList,
EmbeddedDocumentList,
SemiStrictDict,
StrictDict
)
from mongoengine.base.fields import ComplexBaseField
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, from mongoengine.errors import (ValidationError, InvalidDocumentError,
LookUpError, ValidationError) LookUpError)
from mongoengine.python_support import PY3, txt_type from mongoengine.python_support import PY3, txt_type
from mongoengine.base.common import get_document, ALLOW_INHERITANCE
from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict, SemiStrictDict
from mongoengine.base.fields import ComplexBaseField
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS') __all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
NON_FIELD_ERRORS = '__all__' NON_FIELD_ERRORS = '__all__'
@@ -51,7 +46,7 @@ class BaseDocument(object):
# We only want named arguments. # We only want named arguments.
field = iter(self._fields_ordered) field = iter(self._fields_ordered)
# If its an automatic id field then skip to the first defined field # If its an automatic id field then skip to the first defined field
if getattr(self, '_auto_id_field', False): if self._auto_id_field:
next(field) next(field)
for value in args: for value in args:
name = next(field) name = next(field)
@@ -59,33 +54,21 @@ class BaseDocument(object):
raise TypeError( raise TypeError(
"Multiple values for keyword argument '" + name + "'") "Multiple values for keyword argument '" + name + "'")
values[name] = value values[name] = value
__auto_convert = values.pop("__auto_convert", True) __auto_convert = values.pop("__auto_convert", True)
# 399: set default values only to fields loaded from DB # 399: set default values only to fields loaded from DB
__only_fields = set(values.pop("__only_fields", values)) __only_fields = set(values.pop("__only_fields", values))
_created = values.pop("_created", True)
signals.pre_init.send(self.__class__, document=self, values=values) signals.pre_init.send(self.__class__, document=self, values=values)
# Check if there are undefined fields supplied to the constructor,
# if so raise an Exception.
if not self._dynamic and (self._meta.get('strict', True) or _created):
_undefined_fields = set(values.keys()) - set(
self._fields.keys() + ['id', 'pk', '_cls', '_text_score'])
if _undefined_fields:
msg = (
"The fields '{0}' do not exist on the document '{1}'"
).format(_undefined_fields, self._class_name)
raise FieldDoesNotExist(msg)
if self.STRICT and not self._dynamic: if self.STRICT and not self._dynamic:
self._data = StrictDict.create(allowed_keys=self._fields_ordered)() self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
else: else:
self._data = SemiStrictDict.create( self._data = SemiStrictDict.create(
allowed_keys=self._fields_ordered)() allowed_keys=self._fields_ordered)()
_created = values.pop("_created", True)
self._data = {}
self._dynamic_fields = SON() self._dynamic_fields = SON()
# Assign default values to instance # Assign default values to instance
@@ -149,6 +132,7 @@ class BaseDocument(object):
# Handle dynamic data only if an initialised dynamic document # Handle dynamic data only if an initialised dynamic document
if self._dynamic and not self._dynamic_lock: if self._dynamic and not self._dynamic_lock:
field = None
if not hasattr(self, name) and not name.startswith('_'): if not hasattr(self, name) and not name.startswith('_'):
DynamicField = _import_class("DynamicField") DynamicField = _import_class("DynamicField")
field = DynamicField(db_field=name) field = DynamicField(db_field=name)
@@ -181,8 +165,8 @@ class BaseDocument(object):
except AttributeError: except AttributeError:
self__initialised = False self__initialised = False
# Check if the user has created a new instance of a class # Check if the user has created a new instance of a class
if (self._is_document and self__initialised and if (self._is_document and self__initialised
self__created and name == self._meta.get('id_field')): and self__created and name == self._meta['id_field']):
super(BaseDocument, self).__setattr__('_created', False) super(BaseDocument, self).__setattr__('_created', False)
super(BaseDocument, self).__setattr__(name, value) super(BaseDocument, self).__setattr__(name, value)
@@ -204,12 +188,7 @@ class BaseDocument(object):
if k in data: if k in data:
setattr(self, k, data[k]) setattr(self, k, data[k])
if '_fields_ordered' in data: if '_fields_ordered' in data:
if self._dynamic: setattr(type(self), '_fields_ordered', data['_fields_ordered'])
setattr(self, '_fields_ordered', data['_fields_ordered'])
else:
_super_fields_ordered = type(self)._fields_ordered
setattr(self, '_fields_ordered', _super_fields_ordered)
dynamic_fields = data.get('_dynamic_fields') or SON() dynamic_fields = data.get('_dynamic_fields') or SON()
for k in dynamic_fields.keys(): for k in dynamic_fields.keys():
setattr(self, k, data["_data"].get(k)) setattr(self, k, data["_data"].get(k))
@@ -250,7 +229,7 @@ class BaseDocument(object):
u = self.__str__() u = self.__str__()
except (UnicodeEncodeError, UnicodeDecodeError): except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]' u = '[Bad Unicode data]'
repr_type = str if u is None else type(u) repr_type = type(u)
return repr_type('<%s: %s>' % (self.__class__.__name__, u)) return repr_type('<%s: %s>' % (self.__class__.__name__, u))
def __str__(self): def __str__(self):
@@ -262,12 +241,10 @@ class BaseDocument(object):
return txt_type('%s object' % self.__class__.__name__) return txt_type('%s object' % self.__class__.__name__)
def __eq__(self, other): def __eq__(self, other):
if isinstance(other, self.__class__) and hasattr(other, 'id') and other.id is not None: if isinstance(other, self.__class__) and hasattr(other, 'id'):
return self.id == other.id return self.id == other.id
if isinstance(other, DBRef): if isinstance(other, DBRef):
return self._get_collection_name() == other.collection and self.id == other.id return self._get_collection_name() == other.collection and self.id == other.id
if self.id is None:
return self is other
return False return False
def __ne__(self, other): def __ne__(self, other):
@@ -290,27 +267,14 @@ class BaseDocument(object):
""" """
pass pass
def get_text_score(self): def to_mongo(self, use_db_field=True, fields=[]):
"""
Get text score from text query
"""
if '_text_score' not in self._data:
raise InvalidDocumentError('This document is not originally built from a text query')
return self._data['_text_score']
def to_mongo(self, use_db_field=True, fields=None):
""" """
Return as SON data ready for use with MongoDB. Return as SON data ready for use with MongoDB.
""" """
if not fields:
fields = []
data = SON() data = SON()
data["_id"] = None data["_id"] = None
data['_cls'] = self._class_name data['_cls'] = self._class_name
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
# only root fields ['test1.a', 'test2'] => ['test1', 'test2'] # only root fields ['test1.a', 'test2'] => ['test1', 'test2']
root_fields = set([f.split('.')[0] for f in fields]) root_fields = set([f.split('.')[0] for f in fields])
@@ -325,20 +289,21 @@ class BaseDocument(object):
field = self._dynamic_fields.get(field_name) field = self._dynamic_fields.get(field_name)
if value is not None: if value is not None:
f_inputs = field.to_mongo.__code__.co_varnames
ex_vars = {}
if fields and 'fields' in f_inputs:
key = '%s.' % field_name
embedded_fields = [
i.replace(key, '') for i in fields
if i.startswith(key)]
ex_vars['fields'] = embedded_fields if isinstance(field, (EmbeddedDocumentField)):
if fields:
key = '%s.' % field_name
embedded_fields = [
i.replace(key, '') for i in fields
if i.startswith(key)]
if 'use_db_field' in f_inputs: else:
ex_vars['use_db_field'] = use_db_field embedded_fields = []
value = field.to_mongo(value, **ex_vars) value = field.to_mongo(value, use_db_field=use_db_field,
fields=embedded_fields)
else:
value = field.to_mongo(value)
# Handle self generating fields # Handle self generating fields
if value is None and field._auto_gen: if value is None and field._auto_gen:
@@ -414,24 +379,22 @@ class BaseDocument(object):
def to_json(self, *args, **kwargs): def to_json(self, *args, **kwargs):
"""Converts a document to JSON. """Converts a document to JSON.
:param use_db_field: Set to True by default but enables the output of the json structure with the field names :param use_db_field: Set to True by default but enables the output of the json structure with the field names and not the mongodb store db_names in case of set to False
and not the mongodb store db_names in case of set to False
""" """
use_db_field = kwargs.pop('use_db_field', True) use_db_field = kwargs.pop('use_db_field') if kwargs.has_key(
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs) 'use_db_field') else True
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
@classmethod @classmethod
def from_json(cls, json_data, created=False): def from_json(cls, json_data):
"""Converts json data to an unsaved document instance""" """Converts json data to an unsaved document instance"""
return cls._from_son(json_util.loads(json_data), created=created) return cls._from_son(json_util.loads(json_data))
def __expand_dynamic_values(self, name, value): def __expand_dynamic_values(self, name, value):
"""expand any dynamic values to their correct types / values""" """expand any dynamic values to their correct types / values"""
if not isinstance(value, (dict, list, tuple)): if not isinstance(value, (dict, list, tuple)):
return value return value
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
is_list = False is_list = False
if not hasattr(value, 'items'): if not hasattr(value, 'items'):
is_list = True is_list = True
@@ -455,10 +418,7 @@ class BaseDocument(object):
# Convert lists / values so we can watch for any changes on them # Convert lists / values so we can watch for any changes on them
if (isinstance(value, (list, tuple)) and if (isinstance(value, (list, tuple)) and
not isinstance(value, BaseList)): not isinstance(value, BaseList)):
if issubclass(type(self), EmbeddedDocumentListField): value = BaseList(value, self, name)
value = EmbeddedDocumentList(value, self, name)
else:
value = BaseList(value, self, name)
elif isinstance(value, dict) and not isinstance(value, BaseDict): elif isinstance(value, dict) and not isinstance(value, BaseDict):
value = BaseDict(value, self, name) value = BaseDict(value, self, name)
@@ -481,19 +441,7 @@ class BaseDocument(object):
key = self._db_field_map.get(key, key) key = self._db_field_map.get(key, key)
if key not in self._changed_fields: if key not in self._changed_fields:
levels, idx = key.split('.'), 1 self._changed_fields.append(key)
while idx <= len(levels):
if '.'.join(levels[:idx]) in self._changed_fields:
break
idx += 1
else:
self._changed_fields.append(key)
# remove lower level changed fields
level = '.'.join(levels[:idx]) + '.'
remove = self._changed_fields.remove
for field in self._changed_fields[:]:
if field.startswith(level):
remove(field)
def _clear_changed_fields(self): def _clear_changed_fields(self):
"""Using get_changed_fields iterate and remove any fields that are """Using get_changed_fields iterate and remove any fields that are
@@ -545,7 +493,6 @@ class BaseDocument(object):
EmbeddedDocument = _import_class("EmbeddedDocument") EmbeddedDocument = _import_class("EmbeddedDocument")
DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument") DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument")
ReferenceField = _import_class("ReferenceField") ReferenceField = _import_class("ReferenceField")
SortedListField = _import_class("SortedListField")
changed_fields = [] changed_fields = []
changed_fields += getattr(self, '_changed_fields', []) changed_fields += getattr(self, '_changed_fields', [])
@@ -564,13 +511,12 @@ class BaseDocument(object):
if hasattr(data, 'id'): if hasattr(data, 'id'):
if data.id in inspected: if data.id in inspected:
continue continue
inspected.add(data.id)
if isinstance(field, ReferenceField): if isinstance(field, ReferenceField):
continue continue
elif ( elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument))
isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) and and db_field_name not in changed_fields):
db_field_name not in changed_fields # Find all embedded fields that have been changed
):
# Find all embedded fields that have been changed
changed = data._get_changed_fields(inspected) changed = data._get_changed_fields(inspected)
changed_fields += ["%s%s" % (key, k) for k in changed if k] changed_fields += ["%s%s" % (key, k) for k in changed if k]
elif (isinstance(data, (list, tuple, dict)) and elif (isinstance(data, (list, tuple, dict)) and
@@ -578,12 +524,6 @@ class BaseDocument(object):
if (hasattr(field, 'field') and if (hasattr(field, 'field') and
isinstance(field.field, ReferenceField)): isinstance(field.field, ReferenceField)):
continue continue
elif isinstance(field, SortedListField) and field._ordering:
# if ordering is affected whole list is changed
if any(map(lambda d: field._ordering in d._changed_fields, data)):
changed_fields.append(db_field_name)
continue
self._nestable_types_changed_fields( self._nestable_types_changed_fields(
changed_fields, key, data, inspected) changed_fields, key, data, inspected)
return changed_fields return changed_fields
@@ -608,9 +548,7 @@ class BaseDocument(object):
for p in parts: for p in parts:
if isinstance(d, (ObjectId, DBRef)): if isinstance(d, (ObjectId, DBRef)):
break break
elif isinstance(d, list) and p.lstrip('-').isdigit(): elif isinstance(d, list) and p.isdigit():
if p[0] == '-':
p = str(len(d) + int(p))
try: try:
d = d[int(p)] d = d[int(p)]
except IndexError: except IndexError:
@@ -623,18 +561,18 @@ class BaseDocument(object):
else: else:
set_data = doc set_data = doc
if '_id' in set_data: if '_id' in set_data:
del set_data['_id'] del(set_data['_id'])
# Determine if any changed items were actually unset. # Determine if any changed items were actually unset.
for path, value in set_data.items(): for path, value in set_data.items():
if value or isinstance(value, (numbers.Number, bool)): if value or isinstance(value, (numbers.Number, bool)):
continue continue
# If we've set a value that ain't the default value don't unset it. # If we've set a value that ain't the default value dont unset it.
default = None default = None
if (self._dynamic and len(parts) and parts[0] in if (self._dynamic and len(parts) and parts[0] in
self._dynamic_fields): self._dynamic_fields):
del set_data[path] del(set_data[path])
unset_data[path] = 1 unset_data[path] = 1
continue continue
elif path in self._fields: elif path in self._fields:
@@ -644,9 +582,7 @@ class BaseDocument(object):
parts = path.split('.') parts = path.split('.')
db_field_name = parts.pop() db_field_name = parts.pop()
for p in parts: for p in parts:
if isinstance(d, list) and p.lstrip('-').isdigit(): if isinstance(d, list) and p.isdigit():
if p[0] == '-':
p = str(len(d) + int(p))
d = d[int(p)] d = d[int(p)]
elif (hasattr(d, '__getattribute__') and elif (hasattr(d, '__getattribute__') and
not isinstance(d, dict)): not isinstance(d, dict)):
@@ -670,22 +606,20 @@ class BaseDocument(object):
if default != value: if default != value:
continue continue
del set_data[path] del(set_data[path])
unset_data[path] = 1 unset_data[path] = 1
return set_data, unset_data return set_data, unset_data
@classmethod @classmethod
def _get_collection_name(cls): def _get_collection_name(cls):
"""Returns the collection name for this class. None for abstract class """Returns the collection name for this class.
""" """
return cls._meta.get('collection', None) return cls._meta.get('collection', None)
@classmethod @classmethod
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False): def _from_son(cls, son, _auto_dereference=True, only_fields=[]):
"""Create an instance of a Document (subclass) from a PyMongo SON. """Create an instance of a Document (subclass) from a PyMongo SON.
""" """
if not only_fields:
only_fields = []
# get the class name from the document, falling back to the given # get the class name from the document, falling back to the given
# class if unavailable # class if unavailable
@@ -714,6 +648,14 @@ class BaseDocument(object):
del data[field.db_field] del data[field.db_field]
except (AttributeError, ValueError), e: except (AttributeError, ValueError), e:
errors_dict[field_name] = e errors_dict[field_name] = e
elif field.default:
default = field.default
if callable(default):
default = default()
if isinstance(default, BaseDocument):
changed_fields.append(field_name)
elif not only_fields or field_name in only_fields:
changed_fields.append(field_name)
if errors_dict: if errors_dict:
errors = "\n".join(["%s - %s" % (k, v) errors = "\n".join(["%s - %s" % (k, v)
@@ -725,7 +667,7 @@ class BaseDocument(object):
if cls.STRICT: if cls.STRICT:
data = dict((k, v) data = dict((k, v)
for k, v in data.iteritems() if k in cls._fields) for k, v in data.iteritems() if k in cls._fields)
obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data) obj = cls(__auto_convert=False, _created=False, __only_fields=only_fields, **data)
obj._changed_fields = changed_fields obj._changed_fields = changed_fields
if not _auto_dereference: if not _auto_dereference:
obj._fields = fields obj._fields = fields
@@ -748,7 +690,7 @@ class BaseDocument(object):
spec_fields = [v['fields'] spec_fields = [v['fields']
for k, v in enumerate(index_specs)] for k, v in enumerate(index_specs)]
# Merge unique_indexes with existing specs # Merge unqiue_indexes with existing specs
for k, v in enumerate(indices): for k, v in enumerate(indices):
if v['fields'] in spec_fields: if v['fields'] in spec_fields:
index_specs[spec_fields.index(v['fields'])].update(v) index_specs[spec_fields.index(v['fields'])].update(v)
@@ -777,12 +719,8 @@ class BaseDocument(object):
# Check to see if we need to include _cls # Check to see if we need to include _cls
allow_inheritance = cls._meta.get('allow_inheritance', allow_inheritance = cls._meta.get('allow_inheritance',
ALLOW_INHERITANCE) ALLOW_INHERITANCE)
include_cls = ( include_cls = (allow_inheritance and not spec.get('sparse', False) and
allow_inheritance and spec.get('cls', True))
not spec.get('sparse', False) and
spec.get('cls', True) and
'_cls' not in spec['fields']
)
# 733: don't include cls if index_cls is False unless there is an explicit cls with the index # 733: don't include cls if index_cls is False unless there is an explicit cls with the index
include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True)) include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True))
@@ -795,25 +733,16 @@ class BaseDocument(object):
# ASCENDING from + # ASCENDING from +
# DESCENDING from - # DESCENDING from -
# TEXT from $
# HASHED from #
# GEOSPHERE from (
# GEOHAYSTACK from )
# GEO2D from * # GEO2D from *
# TEXT from $
direction = pymongo.ASCENDING direction = pymongo.ASCENDING
if key.startswith("-"): if key.startswith("-"):
direction = pymongo.DESCENDING direction = pymongo.DESCENDING
elif key.startswith("$"):
direction = pymongo.TEXT
elif key.startswith("#"):
direction = pymongo.HASHED
elif key.startswith("("):
direction = pymongo.GEOSPHERE
elif key.startswith(")"):
direction = pymongo.GEOHAYSTACK
elif key.startswith("*"): elif key.startswith("*"):
direction = pymongo.GEO2D direction = pymongo.GEO2D
if key.startswith(("+", "-", "*", "$", "#", "(", ")")): elif key.startswith("$"):
direction = pymongo.TEXT
if key.startswith(("+", "-", "*", "$")):
key = key[1:] key = key[1:]
# Use real field name, do it manually because we need field # Use real field name, do it manually because we need field
@@ -821,6 +750,7 @@ class BaseDocument(object):
parts = key.split('.') parts = key.split('.')
if parts in (['pk'], ['id'], ['_id']): if parts in (['pk'], ['id'], ['_id']):
key = '_id' key = '_id'
fields = []
else: else:
fields = cls._lookup_field(parts) fields = cls._lookup_field(parts)
parts = [] parts = []
@@ -835,12 +765,15 @@ class BaseDocument(object):
index_list.append((key, direction)) index_list.append((key, direction))
# Don't add cls to a geo index # Don't add cls to a geo index
if include_cls and direction not in ( if include_cls and direction is not pymongo.GEO2D:
pymongo.GEO2D, pymongo.GEOHAYSTACK, pymongo.GEOSPHERE):
index_list.insert(0, ('_cls', 1)) index_list.insert(0, ('_cls', 1))
if index_list: if index_list:
spec['fields'] = index_list spec['fields'] = index_list
if spec.get('sparse', False) and len(spec['fields']) > 1:
raise ValueError(
'Sparse indexes can only have one field in them. '
'See https://jira.mongodb.org/browse/SERVER-2193')
return spec return spec
@@ -851,9 +784,10 @@ class BaseDocument(object):
""" """
unique_indexes = [] unique_indexes = []
for field_name, field in cls._fields.items(): for field_name, field in cls._fields.items():
sparse = field.sparse sparse = False
# Generate a list of indexes needed by uniqueness constraints # Generate a list of indexes needed by uniqueness constraints
if field.unique: if field.unique:
field.required = True
unique_fields = [field.db_field] unique_fields = [field.db_field]
# Add any unique_with fields to the back of the index spec # Add any unique_with fields to the back of the index spec
@@ -881,9 +815,6 @@ class BaseDocument(object):
index = {'fields': fields, 'unique': True, 'sparse': sparse} index = {'fields': fields, 'unique': True, 'sparse': sparse}
unique_indexes.append(index) unique_indexes.append(index)
if field.__class__.__name__ == "ListField":
field = field.field
# Grab any embedded document field unique indexes # Grab any embedded document field unique indexes
if (field.__class__.__name__ == "EmbeddedDocumentField" and if (field.__class__.__name__ == "EmbeddedDocumentField" and
field.document_type != cls): field.document_type != cls):
@@ -930,7 +861,6 @@ class BaseDocument(object):
""" """
ListField = _import_class("ListField") ListField = _import_class("ListField")
DynamicField = _import_class('DynamicField')
if not isinstance(parts, (list, tuple)): if not isinstance(parts, (list, tuple)):
parts = [parts] parts = [parts]
@@ -940,6 +870,7 @@ class BaseDocument(object):
for field_name in parts: for field_name in parts:
# Handle ListField indexing: # Handle ListField indexing:
if field_name.isdigit() and isinstance(field, ListField): if field_name.isdigit() and isinstance(field, ListField):
new_field = field.field
fields.append(field_name) fields.append(field_name)
continue continue
@@ -951,19 +882,8 @@ class BaseDocument(object):
if field_name in cls._fields: if field_name in cls._fields:
field = cls._fields[field_name] field = cls._fields[field_name]
elif cls._dynamic: elif cls._dynamic:
DynamicField = _import_class('DynamicField')
field = DynamicField(db_field=field_name) field = DynamicField(db_field=field_name)
elif cls._meta.get("allow_inheritance", False) or cls._meta.get("abstract", False):
# 744: in case the field is defined in a subclass
for subcls in cls.__subclasses__():
try:
field = subcls._lookup_field([field_name])[0]
except LookUpError:
continue
if field is not None:
break
else:
raise LookUpError('Cannot resolve field "%s"' % field_name)
else: else:
raise LookUpError('Cannot resolve field "%s"' raise LookUpError('Cannot resolve field "%s"'
% field_name) % field_name)
@@ -975,20 +895,21 @@ class BaseDocument(object):
'__'.join(parts)) '__'.join(parts))
if hasattr(getattr(field, 'field', None), 'lookup_member'): if hasattr(getattr(field, 'field', None), 'lookup_member'):
new_field = field.field.lookup_member(field_name) new_field = field.field.lookup_member(field_name)
elif cls._dynamic and (isinstance(field, DynamicField) or
getattr(getattr(field, 'document_type', None), '_dynamic', None)):
new_field = DynamicField(db_field=field_name)
else: else:
# Look up subfield on the previous field or raise # Look up subfield on the previous field
try: new_field = field.lookup_member(field_name)
new_field = field.lookup_member(field_name)
except AttributeError:
raise LookUpError('Cannot resolve subfield or operator {} '
'on the field {}'.format(
field_name, field.name))
if not new_field and isinstance(field, ComplexBaseField): if not new_field and isinstance(field, ComplexBaseField):
fields.append(field_name) if hasattr(field.field, 'document_type') and cls._dynamic \
continue and field.field.document_type._dynamic:
DynamicField = _import_class('DynamicField')
new_field = DynamicField(db_field=field_name)
else:
fields.append(field_name)
continue
elif not new_field and hasattr(field, 'document_type') and cls._dynamic \
and field.document_type._dynamic:
DynamicField = _import_class('DynamicField')
new_field = DynamicField(db_field=field_name)
elif not new_field: elif not new_field:
raise LookUpError('Cannot resolve field "%s"' raise LookUpError('Cannot resolve field "%s"'
% field_name) % field_name)

View File

@@ -5,23 +5,18 @@ import weakref
from bson import DBRef, ObjectId, SON from bson import DBRef, ObjectId, SON
import pymongo import pymongo
from mongoengine.base.common import ALLOW_INHERITANCE
from mongoengine.base.datastructures import (
BaseDict, BaseList, EmbeddedDocumentList
)
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.errors import ValidationError from mongoengine.errors import ValidationError
from mongoengine.base.common import ALLOW_INHERITANCE
from mongoengine.base.datastructures import BaseDict, BaseList
__all__ = ("BaseField", "ComplexBaseField", __all__ = ("BaseField", "ComplexBaseField",
"ObjectIdField", "GeoJsonBaseField") "ObjectIdField", "GeoJsonBaseField")
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert', 'min', 'max'])
class BaseField(object): class BaseField(object):
"""A base class for fields in a MongoDB document. Instances of this class """A base class for fields in a MongoDB document. Instances of this class
may be added to subclasses of `Document` to define a document's schema. may be added to subclasses of `Document` to define a document's schema.
@@ -41,8 +36,8 @@ class BaseField(object):
def __init__(self, db_field=None, name=None, required=False, default=None, def __init__(self, db_field=None, name=None, required=False, default=None,
unique=False, unique_with=None, primary_key=False, unique=False, unique_with=None, primary_key=False,
validation=None, choices=None, null=False, sparse=False, validation=None, choices=None, verbose_name=None,
**kwargs): help_text=None):
""" """
:param db_field: The database field to store this field in :param db_field: The database field to store this field in
(defaults to the name of the field) (defaults to the name of the field)
@@ -60,15 +55,11 @@ class BaseField(object):
field. Generally this is deprecated in favour of the field. Generally this is deprecated in favour of the
`FIELD.validate` method `FIELD.validate` method
:param choices: (optional) The valid choices :param choices: (optional) The valid choices
:param null: (optional) Is the field value can be null. If no and there is a default value :param verbose_name: (optional) The verbose name for the field.
then the default value is set Designed to be human readable and is often used when generating
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` model forms from the document model.
means that uniqueness won't be enforced for `None` values :param help_text: (optional) The help text for this field and is often
:param **kwargs: (optional) Arbitrary indirection-free metadata for used when generating model forms from the document model.
this field can be supplied as additional keyword arguments and
accessed as attributes of the field. Must not conflict with any
existing attributes. Common metadata includes `verbose_name` and
`help_text`.
""" """
self.db_field = (db_field or name) if not primary_key else '_id' self.db_field = (db_field or name) if not primary_key else '_id'
@@ -82,19 +73,8 @@ class BaseField(object):
self.primary_key = primary_key self.primary_key = primary_key
self.validation = validation self.validation = validation
self.choices = choices self.choices = choices
self.null = null self.verbose_name = verbose_name
self.sparse = sparse self.help_text = help_text
self._owner_document = None
# Detect and report conflicts between metadata and base properties.
conflicts = set(dir(self)) & set(kwargs)
if conflicts:
raise TypeError("%s already has attribute(s): %s" % (
self.__class__.__name__, ', '.join(conflicts)))
# Assign metadata to the instance
# This efficient method is available because no __slots__ are defined.
self.__dict__.update(kwargs)
# Adjust the appropriate creation counter, and save our local copy. # Adjust the appropriate creation counter, and save our local copy.
if self.db_field == '_id': if self.db_field == '_id':
@@ -118,22 +98,19 @@ class BaseField(object):
"""Descriptor for assigning a value to a field in a document. """Descriptor for assigning a value to a field in a document.
""" """
# If setting to None and there is a default # If setting to None and theres a default
# Then set the value to the default value # Then set the value to the default value
if value is None: if value is None and self.default is not None:
if self.null: value = self.default
value = None if callable(value):
elif self.default is not None: value = value()
value = self.default
if callable(value):
value = value()
if instance._initialised: if instance._initialised:
try: try:
if (self.name not in instance._data or if (self.name not in instance._data or
instance._data[self.name] != value): instance._data[self.name] != value):
instance._mark_as_changed(self.name) instance._mark_as_changed(self.name)
except Exception: except:
# Values cant be compared eg: naive and tz datetimes # Values cant be compared eg: naive and tz datetimes
# So mark it as changed # So mark it as changed
instance._mark_as_changed(self.name) instance._mark_as_changed(self.name)
@@ -141,10 +118,6 @@ class BaseField(object):
EmbeddedDocument = _import_class('EmbeddedDocument') EmbeddedDocument = _import_class('EmbeddedDocument')
if isinstance(value, EmbeddedDocument): if isinstance(value, EmbeddedDocument):
value._instance = weakref.proxy(instance) value._instance = weakref.proxy(instance)
elif isinstance(value, (list, tuple)):
for v in value:
if isinstance(v, EmbeddedDocument):
v._instance = weakref.proxy(instance)
instance._data[self.name] = value instance._data[self.name] = value
def error(self, message="", errors=None, field_name=None): def error(self, message="", errors=None, field_name=None):
@@ -163,24 +136,9 @@ class BaseField(object):
""" """
return self.to_python(value) return self.to_python(value)
def _to_mongo_safe_call(self, value, use_db_field=True, fields=None):
"""A helper method to call to_mongo with proper inputs
"""
f_inputs = self.to_mongo.__code__.co_varnames
ex_vars = {}
if 'fields' in f_inputs:
ex_vars['fields'] = fields
if 'use_db_field' in f_inputs:
ex_vars['use_db_field'] = use_db_field
return self.to_mongo(value, **ex_vars)
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
"""Prepare a value that is being used in a query for PyMongo. """Prepare a value that is being used in a query for PyMongo.
""" """
if op in UPDATE_OPERATORS:
self.validate(value)
return value return value
def validate(self, value, clean=True): def validate(self, value, clean=True):
@@ -188,28 +146,24 @@ class BaseField(object):
""" """
pass pass
def _validate_choices(self, value): def _validate(self, value, **kwargs):
Document = _import_class('Document') Document = _import_class('Document')
EmbeddedDocument = _import_class('EmbeddedDocument') EmbeddedDocument = _import_class('EmbeddedDocument')
# check choices
choice_list = self.choices
if isinstance(choice_list[0], (list, tuple)):
choice_list = [k for k, _ in choice_list]
# Choices which are other types of Documents
if isinstance(value, (Document, EmbeddedDocument)):
if not any(isinstance(value, c) for c in choice_list):
self.error(
'Value must be instance of %s' % unicode(choice_list)
)
# Choices which are types other than Documents
elif value not in choice_list:
self.error('Value must be one of %s' % unicode(choice_list))
def _validate(self, value, **kwargs):
# Check the Choices Constraint
if self.choices: if self.choices:
self._validate_choices(value) is_cls = isinstance(value, (Document, EmbeddedDocument))
value_to_check = value.__class__ if is_cls else value
err_msg = 'an instance' if is_cls else 'one'
if isinstance(self.choices[0], (list, tuple)):
option_keys = [k for k, v in self.choices]
if value_to_check not in option_keys:
msg = ('Value must be %s of %s' %
(err_msg, unicode(option_keys)))
self.error(msg)
elif value_to_check not in self.choices:
msg = ('Value must be %s of %s' %
(err_msg, unicode(self.choices)))
self.error(msg)
# check validation argument # check validation argument
if self.validation is not None: if self.validation is not None:
@@ -222,19 +176,9 @@ class BaseField(object):
self.validate(value, **kwargs) self.validate(value, **kwargs)
@property
def owner_document(self):
return self._owner_document
def _set_owner_document(self, owner_document):
self._owner_document = owner_document
@owner_document.setter
def owner_document(self, owner_document):
self._set_owner_document(owner_document)
class ComplexBaseField(BaseField): class ComplexBaseField(BaseField):
"""Handles complex fields, such as lists / dictionaries. """Handles complex fields, such as lists / dictionaries.
Allows for nesting of embedded documents inside complex types. Allows for nesting of embedded documents inside complex types.
@@ -255,7 +199,6 @@ class ComplexBaseField(BaseField):
ReferenceField = _import_class('ReferenceField') ReferenceField = _import_class('ReferenceField')
GenericReferenceField = _import_class('GenericReferenceField') GenericReferenceField = _import_class('GenericReferenceField')
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
dereference = (self._auto_dereference and dereference = (self._auto_dereference and
(self.field is None or isinstance(self.field, (self.field is None or isinstance(self.field,
(GenericReferenceField, ReferenceField)))) (GenericReferenceField, ReferenceField))))
@@ -272,20 +215,17 @@ class ComplexBaseField(BaseField):
value = super(ComplexBaseField, self).__get__(instance, owner) value = super(ComplexBaseField, self).__get__(instance, owner)
# Convert lists / values so we can watch for any changes on them # Convert lists / values so we can watch for any changes on them
if isinstance(value, (list, tuple)): if (isinstance(value, (list, tuple)) and
if (issubclass(type(self), EmbeddedDocumentListField) and not isinstance(value, BaseList)):
not isinstance(value, EmbeddedDocumentList)): value = BaseList(value, instance, self.name)
value = EmbeddedDocumentList(value, instance, self.name)
elif not isinstance(value, BaseList):
value = BaseList(value, instance, self.name)
instance._data[self.name] = value instance._data[self.name] = value
elif isinstance(value, dict) and not isinstance(value, BaseDict): elif isinstance(value, dict) and not isinstance(value, BaseDict):
value = BaseDict(value, instance, self.name) value = BaseDict(value, instance, self.name)
instance._data[self.name] = value instance._data[self.name] = value
if (self._auto_dereference and instance._initialised and if (self._auto_dereference and instance._initialised and
isinstance(value, (BaseList, BaseDict)) and isinstance(value, (BaseList, BaseDict))
not value._dereferenced): and not value._dereferenced):
value = _dereference( value = _dereference(
value, max_depth=1, instance=instance, name=self.name value, max_depth=1, instance=instance, name=self.name
) )
@@ -297,6 +237,8 @@ class ComplexBaseField(BaseField):
def to_python(self, value): def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type. """Convert a MongoDB-compatible type to a Python type.
""" """
Document = _import_class('Document')
if isinstance(value, basestring): if isinstance(value, basestring):
return value return value
@@ -312,11 +254,9 @@ class ComplexBaseField(BaseField):
return value return value
if self.field: if self.field:
self.field._auto_dereference = self._auto_dereference
value_dict = dict([(key, self.field.to_python(item)) value_dict = dict([(key, self.field.to_python(item))
for key, item in value.items()]) for key, item in value.items()])
else: else:
Document = _import_class('Document')
value_dict = {} value_dict = {}
for k, v in value.items(): for k, v in value.items():
if isinstance(v, Document): if isinstance(v, Document):
@@ -332,11 +272,11 @@ class ComplexBaseField(BaseField):
value_dict[k] = self.to_python(v) value_dict[k] = self.to_python(v)
if is_list: # Convert back to a list if is_list: # Convert back to a list
return [v for _, v in sorted(value_dict.items(), return [v for k, v in sorted(value_dict.items(),
key=operator.itemgetter(0))] key=operator.itemgetter(0))]
return value_dict return value_dict
def to_mongo(self, value, use_db_field=True, fields=None): def to_mongo(self, value):
"""Convert a Python type to a MongoDB-compatible type. """Convert a Python type to a MongoDB-compatible type.
""" """
Document = _import_class("Document") Document = _import_class("Document")
@@ -350,9 +290,9 @@ class ComplexBaseField(BaseField):
if isinstance(value, Document): if isinstance(value, Document):
return GenericReferenceField().to_mongo(value) return GenericReferenceField().to_mongo(value)
cls = value.__class__ cls = value.__class__
val = value.to_mongo(use_db_field, fields) val = value.to_mongo()
# If it's a document that is not inherited add _cls # If we its a document thats not inherited add _cls
if isinstance(value, EmbeddedDocument): if (isinstance(value, EmbeddedDocument)):
val['_cls'] = cls.__name__ val['_cls'] = cls.__name__
return val return val
@@ -365,7 +305,7 @@ class ComplexBaseField(BaseField):
return value return value
if self.field: if self.field:
value_dict = dict([(key, self.field._to_mongo_safe_call(item, use_db_field, fields)) value_dict = dict([(key, self.field.to_mongo(item))
for key, item in value.iteritems()]) for key, item in value.iteritems()])
else: else:
value_dict = {} value_dict = {}
@@ -390,16 +330,16 @@ class ComplexBaseField(BaseField):
value_dict[k] = DBRef(collection, v.pk) value_dict[k] = DBRef(collection, v.pk)
elif hasattr(v, 'to_mongo'): elif hasattr(v, 'to_mongo'):
cls = v.__class__ cls = v.__class__
val = v.to_mongo(use_db_field, fields) val = v.to_mongo()
# If it's a document that is not inherited add _cls # If we its a document thats not inherited add _cls
if isinstance(v, (Document, EmbeddedDocument)): if (isinstance(v, (Document, EmbeddedDocument))):
val['_cls'] = cls.__name__ val['_cls'] = cls.__name__
value_dict[k] = val value_dict[k] = val
else: else:
value_dict[k] = self.to_mongo(v, use_db_field, fields) value_dict[k] = self.to_mongo(v)
if is_list: # Convert back to a list if is_list: # Convert back to a list
return [v for _, v in sorted(value_dict.items(), return [v for k, v in sorted(value_dict.items(),
key=operator.itemgetter(0))] key=operator.itemgetter(0))]
return value_dict return value_dict
@@ -441,17 +381,20 @@ class ComplexBaseField(BaseField):
self.field.owner_document = owner_document self.field.owner_document = owner_document
self._owner_document = owner_document self._owner_document = owner_document
def _get_owner_document(self, owner_document):
self._owner_document = owner_document
owner_document = property(_get_owner_document, _set_owner_document)
class ObjectIdField(BaseField): class ObjectIdField(BaseField):
"""A field wrapper around MongoDB's ObjectIds. """A field wrapper around MongoDB's ObjectIds.
""" """
def to_python(self, value): def to_python(self, value):
try: if not isinstance(value, ObjectId):
if not isinstance(value, ObjectId): value = ObjectId(value)
value = ObjectId(value)
except Exception:
pass
return value return value
def to_mongo(self, value): def to_mongo(self, value):
@@ -469,13 +412,13 @@ class ObjectIdField(BaseField):
def validate(self, value): def validate(self, value):
try: try:
ObjectId(unicode(value)) ObjectId(unicode(value))
except Exception: except:
self.error('Invalid Object ID') self.error('Invalid Object ID')
class GeoJsonBaseField(BaseField): class GeoJsonBaseField(BaseField):
"""A geo json field storing a geojson style object.
"""A geo json field storing a geojson style object.
.. versionadded:: 0.8 .. versionadded:: 0.8
""" """
@@ -484,8 +427,8 @@ class GeoJsonBaseField(BaseField):
def __init__(self, auto_index=True, *args, **kwargs): def __init__(self, auto_index=True, *args, **kwargs):
""" """
:param bool auto_index: Automatically create a "2dsphere" index.\ :param auto_index: Automatically create a "2dsphere" index. Defaults
Defaults to `True`. to `True`.
""" """
self._name = "%sField" % self._type self._name = "%sField" % self._type
if not auto_index: if not auto_index:
@@ -521,7 +464,7 @@ class GeoJsonBaseField(BaseField):
# Quick and dirty validator # Quick and dirty validator
try: try:
value[0][0][0] value[0][0][0]
except (TypeError, IndexError): except:
return "Invalid Polygon must contain at least one valid linestring" return "Invalid Polygon must contain at least one valid linestring"
errors = [] errors = []
@@ -545,7 +488,7 @@ class GeoJsonBaseField(BaseField):
# Quick and dirty validator # Quick and dirty validator
try: try:
value[0][0] value[0][0]
except (TypeError, IndexError): except:
return "Invalid LineString must contain at least one valid point" return "Invalid LineString must contain at least one valid point"
errors = [] errors = []
@@ -576,7 +519,7 @@ class GeoJsonBaseField(BaseField):
# Quick and dirty validator # Quick and dirty validator
try: try:
value[0][0] value[0][0]
except (TypeError, IndexError): except:
return "Invalid MultiPoint must contain at least one valid point" return "Invalid MultiPoint must contain at least one valid point"
errors = [] errors = []
@@ -595,7 +538,7 @@ class GeoJsonBaseField(BaseField):
# Quick and dirty validator # Quick and dirty validator
try: try:
value[0][0][0] value[0][0][0]
except (TypeError, IndexError): except:
return "Invalid MultiLineString must contain at least one valid linestring" return "Invalid MultiLineString must contain at least one valid linestring"
errors = [] errors = []
@@ -617,7 +560,7 @@ class GeoJsonBaseField(BaseField):
# Quick and dirty validator # Quick and dirty validator
try: try:
value[0][0][0][0] value[0][0][0][0]
except (TypeError, IndexError): except:
return "Invalid MultiPolygon must contain at least one valid Polygon" return "Invalid MultiPolygon must contain at least one valid Polygon"
errors = [] errors = []

View File

@@ -1,22 +1,25 @@
import warnings import warnings
from mongoengine.base.common import ALLOW_INHERITANCE, _document_registry import pymongo
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.errors import InvalidDocumentError from mongoengine.errors import InvalidDocumentError
from mongoengine.python_support import PY3 from mongoengine.python_support import PY3
from mongoengine.queryset import (DO_NOTHING, DoesNotExist, from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
MultipleObjectsReturned, MultipleObjectsReturned,
QuerySetManager) QuerySet, QuerySetManager)
from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass') __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
class DocumentMetaclass(type): class DocumentMetaclass(type):
"""Metaclass for all documents."""
# TODO lower complexity of this method """Metaclass for all documents.
"""
def __new__(cls, name, bases, attrs): def __new__(cls, name, bases, attrs):
flattened_bases = cls._get_bases(bases) flattened_bases = cls._get_bases(bases)
super_new = super(DocumentMetaclass, cls).__new__ super_new = super(DocumentMetaclass, cls).__new__
@@ -43,9 +46,8 @@ class DocumentMetaclass(type):
elif hasattr(base, '_meta'): elif hasattr(base, '_meta'):
meta.merge(base._meta) meta.merge(base._meta)
attrs['_meta'] = meta attrs['_meta'] = meta
attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract
if attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE): if '_meta' in attrs and attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE):
StringField = _import_class('StringField') StringField = _import_class('StringField')
attrs['_cls'] = StringField() attrs['_cls'] = StringField()
@@ -110,7 +112,7 @@ class DocumentMetaclass(type):
for base in flattened_bases: for base in flattened_bases:
if (not getattr(base, '_is_base_cls', True) and if (not getattr(base, '_is_base_cls', True) and
not getattr(base, '_meta', {}).get('abstract', True)): not getattr(base, '_meta', {}).get('abstract', True)):
# Collate hierarchy for _cls and _subclasses # Collate heirarchy for _cls and _subclasses
class_name.append(base.__name__) class_name.append(base.__name__)
if hasattr(base, '_meta'): if hasattr(base, '_meta'):
@@ -143,7 +145,7 @@ class DocumentMetaclass(type):
for base in document_bases: for base in document_bases:
if _cls not in base._subclasses: if _cls not in base._subclasses:
base._subclasses += (_cls,) base._subclasses += (_cls,)
base._types = base._subclasses # TODO depreciate _types base._types = base._subclasses # TODO depreciate _types
(Document, EmbeddedDocument, DictField, (Document, EmbeddedDocument, DictField,
CachedReferenceField) = cls._import_classes() CachedReferenceField) = cls._import_classes()
@@ -162,7 +164,7 @@ class DocumentMetaclass(type):
# copies __func__ into im_func and __self__ into im_self for # copies __func__ into im_func and __self__ into im_self for
# classmethod objects in Document derived classes. # classmethod objects in Document derived classes.
if PY3: if PY3:
for val in new_class.__dict__.values(): for key, val in new_class.__dict__.items():
if isinstance(val, classmethod): if isinstance(val, classmethod):
f = val.__get__(new_class) f = val.__get__(new_class)
if hasattr(f, '__func__') and not hasattr(f, 'im_func'): if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
@@ -173,8 +175,7 @@ class DocumentMetaclass(type):
# Handle delete rules # Handle delete rules
for field in new_class._fields.itervalues(): for field in new_class._fields.itervalues():
f = field f = field
if f.owner_document is None: f.owner_document = new_class
f.owner_document = new_class
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING) delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
if isinstance(f, CachedReferenceField): if isinstance(f, CachedReferenceField):
@@ -183,7 +184,7 @@ class DocumentMetaclass(type):
"CachedReferenceFields is not allowed in EmbeddedDocuments") "CachedReferenceFields is not allowed in EmbeddedDocuments")
if not f.document_type: if not f.document_type:
raise InvalidDocumentError( raise InvalidDocumentError(
"Document is not available to sync") "Document is not avaiable to sync")
if f.auto_sync: if f.auto_sync:
f.start_listener() f.start_listener()
@@ -245,10 +246,11 @@ class DocumentMetaclass(type):
EmbeddedDocument = _import_class('EmbeddedDocument') EmbeddedDocument = _import_class('EmbeddedDocument')
DictField = _import_class('DictField') DictField = _import_class('DictField')
CachedReferenceField = _import_class('CachedReferenceField') CachedReferenceField = _import_class('CachedReferenceField')
return Document, EmbeddedDocument, DictField, CachedReferenceField return (Document, EmbeddedDocument, DictField, CachedReferenceField)
class TopLevelDocumentMetaclass(DocumentMetaclass): class TopLevelDocumentMetaclass(DocumentMetaclass):
"""Metaclass for top-level documents (i.e. documents that have their own """Metaclass for top-level documents (i.e. documents that have their own
collection in the database. collection in the database.
""" """
@@ -258,7 +260,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
super_new = super(TopLevelDocumentMetaclass, cls).__new__ super_new = super(TopLevelDocumentMetaclass, cls).__new__
# Set default _meta data if base class, otherwise get user defined meta # Set default _meta data if base class, otherwise get user defined meta
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass: if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass):
# defaults # defaults
attrs['_meta'] = { attrs['_meta'] = {
'abstract': True, 'abstract': True,
@@ -277,7 +279,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
attrs['_meta'].update(attrs.get('meta', {})) attrs['_meta'].update(attrs.get('meta', {}))
else: else:
attrs['_meta'] = attrs.get('meta', {}) attrs['_meta'] = attrs.get('meta', {})
# Explicitly set abstract to false unless set # Explictly set abstract to false unless set
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False) attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
attrs['_is_base_cls'] = False attrs['_is_base_cls'] = False
@@ -292,7 +294,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# Clean up top level meta # Clean up top level meta
if 'meta' in attrs: if 'meta' in attrs:
del attrs['meta'] del(attrs['meta'])
# Find the parent document class # Find the parent document class
parent_doc_cls = [b for b in flattened_bases parent_doc_cls = [b for b in flattened_bases
@@ -301,11 +303,11 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# Prevent classes setting collection different to their parents # Prevent classes setting collection different to their parents
# If parent wasn't an abstract class # If parent wasn't an abstract class
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
not parent_doc_cls._meta.get('abstract', True)): and not parent_doc_cls._meta.get('abstract', True)):
msg = "Trying to set a collection on a subclass (%s)" % name msg = "Trying to set a collection on a subclass (%s)" % name
warnings.warn(msg, SyntaxWarning) warnings.warn(msg, SyntaxWarning)
del attrs['_meta']['collection'] del(attrs['_meta']['collection'])
# Ensure abstract documents have abstract bases # Ensure abstract documents have abstract bases
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
@@ -383,17 +385,15 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
new_class._auto_id_field = getattr(parent_doc_cls, new_class._auto_id_field = getattr(parent_doc_cls,
'_auto_id_field', False) '_auto_id_field', False)
if not new_class._meta.get('id_field'): if not new_class._meta.get('id_field'):
# After 0.10, find not existing names, instead of overwriting
id_name, id_db_name = cls.get_auto_id_names(new_class)
new_class._auto_id_field = True new_class._auto_id_field = True
new_class._meta['id_field'] = id_name new_class._meta['id_field'] = 'id'
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) new_class._fields['id'] = ObjectIdField(db_field='_id')
new_class._fields[id_name].name = id_name new_class._fields['id'].name = 'id'
new_class.id = new_class._fields[id_name] new_class.id = new_class._fields['id']
new_class._db_field_map[id_name] = id_db_name
new_class._reverse_db_field_map[id_db_name] = id_name # Prepend id field to _fields_ordered
# Prepend id field to _fields_ordered if 'id' in new_class._fields and 'id' not in new_class._fields_ordered:
new_class._fields_ordered = (id_name, ) + new_class._fields_ordered new_class._fields_ordered = ('id', ) + new_class._fields_ordered
# Merge in exceptions with parent hierarchy # Merge in exceptions with parent hierarchy
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
@@ -408,22 +408,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
return new_class return new_class
@classmethod
def get_auto_id_names(cls, new_class):
id_name, id_db_name = ('id', '_id')
if id_name not in new_class._fields and \
id_db_name not in (v.db_field for v in new_class._fields.values()):
return id_name, id_db_name
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
while id_name in new_class._fields or \
id_db_name in (v.db_field for v in new_class._fields.values()):
id_name = '{0}_{1}'.format(id_basename, i)
id_db_name = '{0}_{1}'.format(id_db_basename, i)
i += 1
return id_name, id_db_name
class MetaDict(dict): class MetaDict(dict):
"""Custom dictionary for meta classes. """Custom dictionary for meta classes.
Handles the merging of set indexes Handles the merging of set indexes
""" """
@@ -438,5 +425,6 @@ class MetaDict(dict):
class BasesTuple(tuple): class BasesTuple(tuple):
"""Special class to handle introspection of bases tuple in __new__""" """Special class to handle introspection of bases tuple in __new__"""
pass pass

View File

@@ -1,5 +1,4 @@
_class_registry_cache = {} _class_registry_cache = {}
_field_list_cache = []
def _import_class(cls_name): def _import_class(cls_name):
@@ -21,16 +20,13 @@ def _import_class(cls_name):
doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument',
'MapReduceDocument') 'MapReduceDocument')
field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField',
# Field Classes 'FileField', 'GenericReferenceField',
if not _field_list_cache: 'GenericEmbeddedDocumentField', 'GeoPointField',
from mongoengine.fields import __all__ as fields 'PointField', 'LineStringField', 'ListField',
_field_list_cache.extend(fields) 'PolygonField', 'ReferenceField', 'StringField',
from mongoengine.base.fields import __all__ as fields 'CachedReferenceField',
_field_list_cache.extend(fields) 'ComplexBaseField', 'GeoJsonBaseField')
field_classes = _field_list_cache
queryset_classes = ('OperationError',) queryset_classes = ('OperationError',)
deref_classes = ('DeReference',) deref_classes = ('DeReference',)

View File

@@ -1,5 +1,10 @@
from pymongo import MongoClient, ReadPreference, uri_parser import pymongo
from mongoengine.python_support import (IS_PYMONGO_3, str_types) from pymongo import MongoClient, MongoReplicaSetClient, uri_parser
try:
import motor
except ImportError:
motor = None
__all__ = ['ConnectionError', 'connect', 'register_connection', __all__ = ['ConnectionError', 'connect', 'register_connection',
'DEFAULT_CONNECTION_NAME'] 'DEFAULT_CONNECTION_NAME']
@@ -7,12 +12,6 @@ __all__ = ['ConnectionError', 'connect', 'register_connection',
DEFAULT_CONNECTION_NAME = 'default' DEFAULT_CONNECTION_NAME = 'default'
if IS_PYMONGO_3:
READ_PREFERENCE = ReadPreference.PRIMARY
else:
from pymongo import MongoReplicaSetClient
READ_PREFERENCE = False
class ConnectionError(Exception): class ConnectionError(Exception):
pass pass
@@ -24,9 +23,9 @@ _dbs = {}
def register_connection(alias, name=None, host=None, port=None, def register_connection(alias, name=None, host=None, port=None,
read_preference=READ_PREFERENCE, read_preference=False,
username=None, password=None, authentication_source=None, username=None, password=None, authentication_source=None,
authentication_mechanism=None, async=False,
**kwargs): **kwargs):
"""Add a connection. """Add a connection.
@@ -40,14 +39,7 @@ def register_connection(alias, name=None, host=None, port=None,
:param username: username to authenticate with :param username: username to authenticate with
:param password: password to authenticate with :param password: password to authenticate with
:param authentication_source: database to authenticate against :param authentication_source: database to authenticate against
:param authentication_mechanism: database authentication mechanisms.
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
:param is_mock: explicitly use mongomock for this connection
(can also be done by using `mongomock://` as db host prefix)
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
.. versionchanged:: 0.10.6 - added mongomock support
""" """
global _connection_settings global _connection_settings
@@ -59,41 +51,20 @@ def register_connection(alias, name=None, host=None, port=None,
'username': username, 'username': username,
'password': password, 'password': password,
'authentication_source': authentication_source, 'authentication_source': authentication_source,
'authentication_mechanism': authentication_mechanism 'async': async
} }
# Handle uri style connections # Handle uri style connections
conn_host = conn_settings['host'] if "://" in conn_settings['host']:
# host can be a list or a string, so if string, force to a list uri_dict = uri_parser.parse_uri(conn_settings['host'])
if isinstance(conn_host, str_types): conn_settings.update({
conn_host = [conn_host] 'name': uri_dict.get('database') or name,
'username': uri_dict.get('username'),
resolved_hosts = [] 'password': uri_dict.get('password'),
for entity in conn_host: 'read_preference': read_preference,
# Handle uri style connections })
if entity.startswith('mongomock://'): if "replicaSet" in conn_settings['host']:
conn_settings['is_mock'] = True conn_settings['replicaSet'] = True
# `mongomock://` is not a valid url prefix and must be replaced by `mongodb://`
resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1))
elif '://' in entity:
uri_dict = uri_parser.parse_uri(entity)
resolved_hosts.append(entity)
conn_settings.update({
'name': uri_dict.get('database') or name,
'username': uri_dict.get('username'),
'password': uri_dict.get('password'),
'read_preference': read_preference,
})
uri_options = uri_dict['options']
if 'replicaset' in uri_options:
conn_settings['replicaSet'] = True
if 'authsource' in uri_options:
conn_settings['authentication_source'] = uri_options['authsource']
if 'authmechanism' in uri_options:
conn_settings['authentication_mechanism'] = uri_options['authmechanism']
else:
resolved_hosts.append(entity)
conn_settings['host'] = resolved_hosts
# Deprecated parameters that should not be passed on # Deprecated parameters that should not be passed on
kwargs.pop('slaves', None) kwargs.pop('slaves', None)
@@ -108,7 +79,7 @@ def disconnect(alias=DEFAULT_CONNECTION_NAME):
global _dbs global _dbs
if alias in _connections: if alias in _connections:
get_connection(alias=alias).close() get_connection(alias=alias).disconnect()
del _connections[alias] del _connections[alias]
if alias in _dbs: if alias in _dbs:
del _dbs[alias] del _dbs[alias]
@@ -132,30 +103,29 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
conn_settings.pop('username', None) conn_settings.pop('username', None)
conn_settings.pop('password', None) conn_settings.pop('password', None)
conn_settings.pop('authentication_source', None) conn_settings.pop('authentication_source', None)
conn_settings.pop('authentication_mechanism', None) async = conn_settings.pop('async')
if async:
if not motor:
raise ImproperlyConfigured("Motor library was not found")
connection_class = motor.MotorClient
is_mock = conn_settings.pop('is_mock', None)
if is_mock:
# Use MongoClient from mongomock
try:
import mongomock
except ImportError:
raise RuntimeError('You need mongomock installed '
'to mock MongoEngine.')
connection_class = mongomock.MongoClient
else: else:
# Use MongoClient from pymongo
connection_class = MongoClient connection_class = MongoClient
if 'replicaSet' in conn_settings: if 'replicaSet' in conn_settings:
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
# Discard port since it can't be used on MongoReplicaSetClient # Discard port since it can't be used on MongoReplicaSetClient
conn_settings.pop('port', None) conn_settings.pop('port', None)
# Discard replicaSet if not base string # Discard replicaSet if not base string
if not isinstance(conn_settings['replicaSet'], basestring): if not isinstance(conn_settings['replicaSet'], basestring):
conn_settings.pop('replicaSet', None) conn_settings.pop('replicaSet', None)
if not IS_PYMONGO_3:
if async:
connection_class = motor.MotorReplicaSetClient
else:
connection_class = MongoReplicaSetClient connection_class = MongoReplicaSetClient
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
try: try:
connection = None connection = None
@@ -166,15 +136,15 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
connection_settings.pop('name', None) connection_settings.pop('name', None)
connection_settings.pop('username', None) connection_settings.pop('username', None)
connection_settings.pop('password', None) connection_settings.pop('password', None)
connection_settings.pop('authentication_source', None)
connection_settings.pop('authentication_mechanism', None)
if conn_settings == connection_settings and _connections.get(db_alias, None): if conn_settings == connection_settings and _connections.get(db_alias, None):
connection = _connections[db_alias] connection = _connections[db_alias]
break break
_connections[alias] = connection if connection else connection_class(**conn_settings) _connections[alias] = connection if connection else connection_class(
**conn_settings)
except Exception, e: except Exception, e:
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) raise ConnectionError(
"Cannot connect to database %s :\n%s" % (alias, e))
return _connections[alias] return _connections[alias]
@@ -187,13 +157,11 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
conn = get_connection(alias) conn = get_connection(alias)
conn_settings = _connection_settings[alias] conn_settings = _connection_settings[alias]
db = conn[conn_settings['name']] db = conn[conn_settings['name']]
auth_kwargs = {'source': conn_settings['authentication_source']}
if conn_settings['authentication_mechanism'] is not None:
auth_kwargs['mechanism'] = conn_settings['authentication_mechanism']
# Authenticate if necessary # Authenticate if necessary
if conn_settings['username'] and (conn_settings['password'] or if conn_settings['username'] and conn_settings['password']:
conn_settings['authentication_mechanism'] == 'MONGODB-X509'): db.authenticate(conn_settings['username'],
db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs) conn_settings['password'],
source=conn_settings['authentication_source'])
_dbs[alias] = db _dbs[alias] = db
return _dbs[alias] return _dbs[alias]

View File

@@ -1,17 +1,14 @@
from bson import DBRef, SON from bson import DBRef, SON
from .base import ( from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document)
BaseDict, BaseList, EmbeddedDocumentList, from fields import (ReferenceField, ListField, DictField, MapField)
TopLevelDocumentMetaclass, get_document from connection import get_db
) from queryset import QuerySet
from .connection import get_db from document import Document, EmbeddedDocument
from .document import Document, EmbeddedDocument
from .fields import DictField, ListField, MapField, ReferenceField
from .python_support import txt_type
from .queryset import QuerySet
class DeReference(object): class DeReference(object):
def __call__(self, items, max_depth=1, instance=None, name=None): def __call__(self, items, max_depth=1, instance=None, name=None):
""" """
Cheaply dereferences the items to a set depth. Cheaply dereferences the items to a set depth.
@@ -49,8 +46,8 @@ class DeReference(object):
if is_list and all([i.__class__ == doc_type for i in items]): if is_list and all([i.__class__ == doc_type for i in items]):
return items return items
elif not is_list and all( elif not is_list and all([i.__class__ == doc_type
[i.__class__ == doc_type for i in items.values()]): for i in items.values()]):
return items return items
elif not field.dbref: elif not field.dbref:
if not hasattr(items, 'items'): if not hasattr(items, 'items'):
@@ -101,25 +98,25 @@ class DeReference(object):
if isinstance(item, (Document, EmbeddedDocument)): if isinstance(item, (Document, EmbeddedDocument)):
for field_name, field in item._fields.iteritems(): for field_name, field in item._fields.iteritems():
v = item._data.get(field_name, None) v = item._data.get(field_name, None)
if isinstance(v, DBRef): if isinstance(v, (DBRef)):
reference_map.setdefault(field.document_type, set()).add(v.id) reference_map.setdefault(field.document_type, []).append(v.id)
elif isinstance(v, (dict, SON)) and '_ref' in v: elif isinstance(v, (dict, SON)) and '_ref' in v:
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
field_cls = getattr(getattr(field, 'field', None), 'document_type', None) field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
references = self._find_references(v, depth) references = self._find_references(v, depth)
for key, refs in references.iteritems(): for key, refs in references.iteritems():
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
key = field_cls key = field_cls
reference_map.setdefault(key, set()).update(refs) reference_map.setdefault(key, []).extend(refs)
elif isinstance(item, DBRef): elif isinstance(item, (DBRef)):
reference_map.setdefault(item.collection, set()).add(item.id) reference_map.setdefault(item.collection, []).append(item.id)
elif isinstance(item, (dict, SON)) and '_ref' in item: elif isinstance(item, (dict, SON)) and '_ref' in item:
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
references = self._find_references(item, depth - 1) references = self._find_references(item, depth - 1)
for key, refs in references.iteritems(): for key, refs in references.iteritems():
reference_map.setdefault(key, set()).update(refs) reference_map.setdefault(key, []).extend(refs)
return reference_map return reference_map
@@ -128,25 +125,21 @@ class DeReference(object):
""" """
object_map = {} object_map = {}
for collection, dbrefs in self.reference_map.iteritems(): for collection, dbrefs in self.reference_map.iteritems():
keys = object_map.keys()
refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys]))
if hasattr(collection, 'objects'): # We have a document class for the refs if hasattr(collection, 'objects'): # We have a document class for the refs
col_name = collection._get_collection_name()
refs = [dbref for dbref in dbrefs
if (col_name, dbref) not in object_map]
references = collection.objects.in_bulk(refs) references = collection.objects.in_bulk(refs)
for key, doc in references.iteritems(): for key, doc in references.iteritems():
object_map[(col_name, key)] = doc object_map[key] = doc
else: # Generic reference: use the refs data to convert to document else: # Generic reference: use the refs data to convert to document
if isinstance(doc_type, (ListField, DictField, MapField,)): if isinstance(doc_type, (ListField, DictField, MapField,)):
continue continue
refs = [dbref for dbref in dbrefs
if (collection, dbref) not in object_map]
if doc_type: if doc_type:
references = doc_type._get_db()[collection].find({'_id': {'$in': refs}}) references = doc_type._get_db()[collection].find({'_id': {'$in': refs}})
for ref in references: for ref in references:
doc = doc_type._from_son(ref) doc = doc_type._from_son(ref)
object_map[(collection, doc.id)] = doc object_map[doc.id] = doc
else: else:
references = get_db()[collection].find({'_id': {'$in': refs}}) references = get_db()[collection].find({'_id': {'$in': refs}})
for ref in references: for ref in references:
@@ -155,10 +148,10 @@ class DeReference(object):
elif doc_type is None: elif doc_type is None:
doc = get_document( doc = get_document(
''.join(x.capitalize() ''.join(x.capitalize()
for x in collection.split('_')))._from_son(ref) for x in collection.split('_')))._from_son(ref)
else: else:
doc = doc_type._from_son(ref) doc = doc_type._from_son(ref)
object_map[(collection, doc.id)] = doc object_map[doc.id] = doc
return object_map return object_map
def _attach_objects(self, items, depth=0, instance=None, name=None): def _attach_objects(self, items, depth=0, instance=None, name=None):
@@ -184,8 +177,7 @@ class DeReference(object):
if isinstance(items, (dict, SON)): if isinstance(items, (dict, SON)):
if '_ref' in items: if '_ref' in items:
return self.object_map.get( return self.object_map.get(items['_ref'].id, items)
(items['_ref'].collection, items['_ref'].id), items)
elif '_cls' in items: elif '_cls' in items:
doc = get_document(items['_cls'])._from_son(items) doc = get_document(items['_cls'])._from_son(items)
_cls = doc._data.pop('_cls', None) _cls = doc._data.pop('_cls', None)
@@ -197,9 +189,6 @@ class DeReference(object):
if not hasattr(items, 'items'): if not hasattr(items, 'items'):
is_list = True is_list = True
list_type = BaseList
if isinstance(items, EmbeddedDocumentList):
list_type = EmbeddedDocumentList
as_tuple = isinstance(items, tuple) as_tuple = isinstance(items, tuple)
iterator = enumerate(items) iterator = enumerate(items)
data = [] data = []
@@ -220,24 +209,23 @@ class DeReference(object):
elif isinstance(v, (Document, EmbeddedDocument)): elif isinstance(v, (Document, EmbeddedDocument)):
for field_name, field in v._fields.iteritems(): for field_name, field in v._fields.iteritems():
v = data[k]._data.get(field_name, None) v = data[k]._data.get(field_name, None)
if isinstance(v, DBRef): if isinstance(v, (DBRef)):
data[k]._data[field_name] = self.object_map.get( data[k]._data[field_name] = self.object_map.get(v.id, v)
(v.collection, v.id), v)
elif isinstance(v, (dict, SON)) and '_ref' in v: elif isinstance(v, (dict, SON)) and '_ref' in v:
data[k]._data[field_name] = self.object_map.get( data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
(v['_ref'].collection, v['_ref'].id), v) elif isinstance(v, dict) and depth <= self.max_depth:
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
item_name = txt_type("{0}.{1}.{2}").format(name, k, field_name) elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name) data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
item_name = '%s.%s' % (name, k) if name else name item_name = '%s.%s' % (name, k) if name else name
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
elif hasattr(v, 'id'): elif hasattr(v, 'id'):
data[k] = self.object_map.get((v.collection, v.id), v) data[k] = self.object_map.get(v.id, v)
if instance and name: if instance and name:
if is_list: if is_list:
return tuple(data) if as_tuple else list_type(data, instance, name) return tuple(data) if as_tuple else BaseList(data, instance, name)
return BaseDict(data, instance, name) return BaseDict(data, instance, name)
depth += 1 depth += 1
return data return data

View File

412
mongoengine/django/auth.py Normal file
View File

@@ -0,0 +1,412 @@
from mongoengine import *
from django.utils.encoding import smart_str
from django.contrib.auth.models import _user_has_perm, _user_get_all_permissions, _user_has_module_perms
from django.db import models
from django.contrib.contenttypes.models import ContentTypeManager
from django.contrib import auth
from django.contrib.auth.models import AnonymousUser
from django.utils.translation import ugettext_lazy as _
from .utils import datetime_now
REDIRECT_FIELD_NAME = 'next'
try:
from django.contrib.auth.hashers import check_password, make_password
except ImportError:
"""Handle older versions of Django"""
from django.utils.hashcompat import md5_constructor, sha_constructor
def get_hexdigest(algorithm, salt, raw_password):
raw_password, salt = smart_str(raw_password), smart_str(salt)
if algorithm == 'md5':
return md5_constructor(salt + raw_password).hexdigest()
elif algorithm == 'sha1':
return sha_constructor(salt + raw_password).hexdigest()
raise ValueError('Got unknown password algorithm type in password')
def check_password(raw_password, password):
algo, salt, hash = password.split('$')
return hash == get_hexdigest(algo, salt, raw_password)
def make_password(raw_password):
from random import random
algo = 'sha1'
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
hash = get_hexdigest(algo, salt, raw_password)
return '%s$%s$%s' % (algo, salt, hash)
class ContentType(Document):
name = StringField(max_length=100)
app_label = StringField(max_length=100)
model = StringField(max_length=100, verbose_name=_('python model class name'),
unique_with='app_label')
objects = ContentTypeManager()
class Meta:
verbose_name = _('content type')
verbose_name_plural = _('content types')
# db_table = 'django_content_type'
# ordering = ('name',)
# unique_together = (('app_label', 'model'),)
def __unicode__(self):
return self.name
def model_class(self):
"Returns the Python model class for this type of content."
from django.db import models
return models.get_model(self.app_label, self.model)
def get_object_for_this_type(self, **kwargs):
"""
Returns an object of this type for the keyword arguments given.
Basically, this is a proxy around this object_type's get_object() model
method. The ObjectNotExist exception, if thrown, will not be caught,
so code that calls this method should catch it.
"""
return self.model_class()._default_manager.using(self._state.db).get(**kwargs)
def natural_key(self):
return (self.app_label, self.model)
class SiteProfileNotAvailable(Exception):
pass
class PermissionManager(models.Manager):
def get_by_natural_key(self, codename, app_label, model):
return self.get(
codename=codename,
content_type=ContentType.objects.get_by_natural_key(app_label, model)
)
class Permission(Document):
"""The permissions system provides a way to assign permissions to specific
users and groups of users.
The permission system is used by the Django admin site, but may also be
useful in your own code. The Django admin site uses permissions as follows:
- The "add" permission limits the user's ability to view the "add"
form and add an object.
- The "change" permission limits a user's ability to view the change
list, view the "change" form and change an object.
- The "delete" permission limits the ability to delete an object.
Permissions are set globally per type of object, not per specific object
instance. It is possible to say "Mary may change news stories," but it's
not currently possible to say "Mary may change news stories, but only the
ones she created herself" or "Mary may only change news stories that have
a certain status or publication date."
Three basic permissions -- add, change and delete -- are automatically
created for each Django model.
"""
name = StringField(max_length=50, verbose_name=_('username'))
content_type = ReferenceField(ContentType)
codename = StringField(max_length=100, verbose_name=_('codename'))
# FIXME: don't access field of the other class
# unique_with=['content_type__app_label', 'content_type__model'])
objects = PermissionManager()
class Meta:
verbose_name = _('permission')
verbose_name_plural = _('permissions')
# unique_together = (('content_type', 'codename'),)
# ordering = ('content_type__app_label', 'content_type__model', 'codename')
def __unicode__(self):
return u"%s | %s | %s" % (
unicode(self.content_type.app_label),
unicode(self.content_type),
unicode(self.name))
def natural_key(self):
return (self.codename,) + self.content_type.natural_key()
natural_key.dependencies = ['contenttypes.contenttype']
class Group(Document):
"""Groups are a generic way of categorizing users to apply permissions,
or some other label, to those users. A user can belong to any number of
groups.
A user in a group automatically has all the permissions granted to that
group. For example, if the group Site editors has the permission
can_edit_home_page, any user in that group will have that permission.
Beyond permissions, groups are a convenient way to categorize users to
apply some label, or extended functionality, to them. For example, you
could create a group 'Special users', and you could write code that would
do special things to those users -- such as giving them access to a
members-only portion of your site, or sending them members-only
e-mail messages.
"""
name = StringField(max_length=80, unique=True, verbose_name=_('name'))
permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False))
class Meta:
verbose_name = _('group')
verbose_name_plural = _('groups')
def __unicode__(self):
return self.name
class UserManager(models.Manager):
def create_user(self, username, email, password=None):
"""
Creates and saves a User with the given username, e-mail and password.
"""
now = datetime_now()
# Normalize the address by lowercasing the domain part of the email
# address.
try:
email_name, domain_part = email.strip().split('@', 1)
except ValueError:
pass
else:
email = '@'.join([email_name, domain_part.lower()])
user = self.model(username=username, email=email, is_staff=False,
is_active=True, is_superuser=False, last_login=now,
date_joined=now)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, username, email, password):
u = self.create_user(username, email, password)
u.is_staff = True
u.is_active = True
u.is_superuser = True
u.save(using=self._db)
return u
def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'):
"Generates a random password with the given length and given allowed_chars"
# Note that default value of allowed_chars does not have "I" or letters
# that look like it -- just to avoid confusion.
from random import choice
return ''.join([choice(allowed_chars) for i in range(length)])
class User(Document):
"""A User document that aims to mirror most of the API specified by Django
at http://docs.djangoproject.com/en/dev/topics/auth/#users
"""
username = StringField(max_length=30, required=True,
verbose_name=_('username'),
help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
first_name = StringField(max_length=30,
verbose_name=_('first name'))
last_name = StringField(max_length=30,
verbose_name=_('last name'))
email = EmailField(verbose_name=_('e-mail address'))
password = StringField(max_length=128,
verbose_name=_('password'),
help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
is_staff = BooleanField(default=False,
verbose_name=_('staff status'),
help_text=_("Designates whether the user can log into this admin site."))
is_active = BooleanField(default=True,
verbose_name=_('active'),
help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
is_superuser = BooleanField(default=False,
verbose_name=_('superuser status'),
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
last_login = DateTimeField(default=datetime_now,
verbose_name=_('last login'))
date_joined = DateTimeField(default=datetime_now,
verbose_name=_('date joined'))
user_permissions = ListField(ReferenceField(Permission), verbose_name=_('user permissions'),
help_text=_('Permissions for the user.'))
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
meta = {
'allow_inheritance': True,
'indexes': [
{'fields': ['username'], 'unique': True, 'sparse': True}
]
}
def __unicode__(self):
return self.username
def get_full_name(self):
"""Returns the users first and last names, separated by a space.
"""
full_name = u'%s %s' % (self.first_name or '', self.last_name or '')
return full_name.strip()
def is_anonymous(self):
return False
def is_authenticated(self):
return True
def set_password(self, raw_password):
"""Sets the user's password - always use this rather than directly
assigning to :attr:`~mongoengine.django.auth.User.password` as the
password is hashed before storage.
"""
self.password = make_password(raw_password)
self.save()
return self
def check_password(self, raw_password):
"""Checks the user's password against a provided password - always use
this rather than directly comparing to
:attr:`~mongoengine.django.auth.User.password` as the password is
hashed before storage.
"""
return check_password(raw_password, self.password)
@classmethod
def create_user(cls, username, password, email=None):
"""Create (and save) a new user with the given username, password and
email address.
"""
now = datetime_now()
# Normalize the address by lowercasing the domain part of the email
# address.
if email is not None:
try:
email_name, domain_part = email.strip().split('@', 1)
except ValueError:
pass
else:
email = '@'.join([email_name, domain_part.lower()])
user = cls(username=username, email=email, date_joined=now)
user.set_password(password)
user.save()
return user
def get_group_permissions(self, obj=None):
"""
Returns a list of permission strings that this user has through his/her
groups. This method queries all available auth backends. If an object
is passed in, only permissions matching this object are returned.
"""
permissions = set()
for backend in auth.get_backends():
if hasattr(backend, "get_group_permissions"):
permissions.update(backend.get_group_permissions(self, obj))
return permissions
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj)
def has_perm(self, perm, obj=None):
"""
Returns True if the user has the specified permission. This method
queries all available auth backends, but returns immediately if any
backend returns True. Thus, a user who has permission from a single
auth backend is assumed to have permission in general. If an object is
provided, permissions for this specific object are checked.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
# Otherwise we need to check the backends.
return _user_has_perm(self, perm, obj)
def has_module_perms(self, app_label):
"""
Returns True if the user has any permissions in the given app label.
Uses pretty much the same logic as has_perm, above.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
return _user_has_module_perms(self, app_label)
def email_user(self, subject, message, from_email=None):
"Sends an e-mail to this User."
from django.core.mail import send_mail
send_mail(subject, message, from_email, [self.email])
def get_profile(self):
"""
Returns site-specific profile for this user. Raises
SiteProfileNotAvailable if this site does not allow profiles.
"""
if not hasattr(self, '_profile_cache'):
from django.conf import settings
if not getattr(settings, 'AUTH_PROFILE_MODULE', False):
raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO'
'DULE in your project settings')
try:
app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.')
except ValueError:
raise SiteProfileNotAvailable('app_label and model_name should'
' be separated by a dot in the AUTH_PROFILE_MODULE set'
'ting')
try:
model = models.get_model(app_label, model_name)
if model is None:
raise SiteProfileNotAvailable('Unable to load the profile '
'model, check AUTH_PROFILE_MODULE in your project sett'
'ings')
self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id)
self._profile_cache.user = self
except (ImportError, ImproperlyConfigured):
raise SiteProfileNotAvailable
return self._profile_cache
class MongoEngineBackend(object):
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
"""
supports_object_permissions = False
supports_anonymous_user = False
supports_inactive_user = False
_user_doc = False
def authenticate(self, username=None, password=None):
user = self.user_document.objects(username=username).first()
if user:
if password and user.check_password(password):
backend = auth.get_backends()[0]
user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__)
return user
return None
def get_user(self, user_id):
return self.user_document.objects.with_id(user_id)
@property
def user_document(self):
if self._user_doc is False:
from .mongo_auth.models import get_user_document
self._user_doc = get_user_document()
return self._user_doc
def get_user(userid):
"""Returns a User object from an id (User.id). Django's equivalent takes
request, but taking an id instead leaves it up to the developer to store
the id in any way they want (session, signed cookie, etc.)
"""
if not userid:
return AnonymousUser()
return MongoEngineBackend().get_user(userid) or AnonymousUser()

View File

@@ -0,0 +1,115 @@
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import UserManager
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.importlib import import_module
from django.utils.translation import ugettext_lazy as _
__all__ = (
'get_user_document',
)
MONGOENGINE_USER_DOCUMENT = getattr(
settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User')
def get_user_document():
"""Get the user document class used for authentication.
This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which
defaults to `mongoengine.django.auth.User`.
"""
name = MONGOENGINE_USER_DOCUMENT
dot = name.rindex('.')
module = import_module(name[:dot])
return getattr(module, name[dot + 1:])
class MongoUserManager(UserManager):
"""A User manager wich allows the use of MongoEngine documents in Django.
To use the manager, you must tell django.contrib.auth to use MongoUser as
the user model. In you settings.py, you need:
INSTALLED_APPS = (
...
'django.contrib.auth',
'mongoengine.django.mongo_auth',
...
)
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
Django will use the model object to access the custom Manager, which will
replace the original queryset with MongoEngine querysets.
By default, mongoengine.django.auth.User will be used to store users. You
can specify another document class in MONGOENGINE_USER_DOCUMENT in your
settings.py.
The User Document class has the same requirements as a standard custom user
model: https://docs.djangoproject.com/en/dev/topics/auth/customizing/
In particular, the User Document class must define USERNAME_FIELD and
REQUIRED_FIELDS.
`AUTH_USER_MODEL` has been added in Django 1.5.
"""
def contribute_to_class(self, model, name):
super(MongoUserManager, self).contribute_to_class(model, name)
self.dj_model = self.model
self.model = get_user_document()
self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD
username = models.CharField(_('username'), max_length=30, unique=True)
username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD)
self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS
for name in self.dj_model.REQUIRED_FIELDS:
field = models.CharField(_(name), max_length=30)
field.contribute_to_class(self.dj_model, name)
def get(self, *args, **kwargs):
try:
return self.get_query_set().get(*args, **kwargs)
except self.model.DoesNotExist:
# ModelBackend expects this exception
raise self.dj_model.DoesNotExist
@property
def db(self):
raise NotImplementedError
def get_empty_query_set(self):
return self.model.objects.none()
def get_query_set(self):
return self.model.objects
class MongoUser(models.Model):
""""Dummy user model for Django.
MongoUser is used to replace Django's UserManager with MongoUserManager.
The actual user document class is mongoengine.django.auth.User or any
other document class specified in MONGOENGINE_USER_DOCUMENT.
To get the user document class, use `get_user_document()`.
"""
objects = MongoUserManager()
class Meta:
app_label = 'mongo_auth'
def set_password(self, password):
"""Doesn't do anything, but works around the issue with Django 1.6."""
make_password(password)

View File

@@ -0,0 +1,124 @@
from bson import json_util
from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase, CreateError
from django.core.exceptions import SuspiciousOperation
try:
from django.utils.encoding import force_unicode
except ImportError:
from django.utils.encoding import force_text as force_unicode
from mongoengine.document import Document
from mongoengine import fields
from mongoengine.queryset import OperationError
from mongoengine.connection import DEFAULT_CONNECTION_NAME
from .utils import datetime_now
MONGOENGINE_SESSION_DB_ALIAS = getattr(
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
DEFAULT_CONNECTION_NAME)
# a setting for the name of the collection used to store sessions
MONGOENGINE_SESSION_COLLECTION = getattr(
settings, 'MONGOENGINE_SESSION_COLLECTION',
'django_session')
# a setting for whether session data is stored encoded or not
MONGOENGINE_SESSION_DATA_ENCODE = getattr(
settings, 'MONGOENGINE_SESSION_DATA_ENCODE',
True)
class MongoSession(Document):
session_key = fields.StringField(primary_key=True, max_length=40)
session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \
else fields.DictField()
expire_date = fields.DateTimeField()
meta = {
'collection': MONGOENGINE_SESSION_COLLECTION,
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
'allow_inheritance': False,
'indexes': [
{
'fields': ['expire_date'],
'expireAfterSeconds': 0
}
]
}
def get_decoded(self):
return SessionStore().decode(self.session_data)
class SessionStore(SessionBase):
"""A MongoEngine-based session store for Django.
"""
def _get_session(self, *args, **kwargs):
sess = super(SessionStore, self)._get_session(*args, **kwargs)
if sess.get('_auth_user_id', None):
sess['_auth_user_id'] = str(sess.get('_auth_user_id'))
return sess
def load(self):
try:
s = MongoSession.objects(session_key=self.session_key,
expire_date__gt=datetime_now)[0]
if MONGOENGINE_SESSION_DATA_ENCODE:
return self.decode(force_unicode(s.session_data))
else:
return s.session_data
except (IndexError, SuspiciousOperation):
self.create()
return {}
def exists(self, session_key):
return bool(MongoSession.objects(session_key=session_key).first())
def create(self):
while True:
self._session_key = self._get_new_session_key()
try:
self.save(must_create=True)
except CreateError:
continue
self.modified = True
self._session_cache = {}
return
def save(self, must_create=False):
if self.session_key is None:
self._session_key = self._get_new_session_key()
s = MongoSession(session_key=self.session_key)
if MONGOENGINE_SESSION_DATA_ENCODE:
s.session_data = self.encode(self._get_session(no_load=must_create))
else:
s.session_data = self._get_session(no_load=must_create)
s.expire_date = self.get_expiry_date()
try:
s.save(force_insert=must_create)
except OperationError:
if must_create:
raise CreateError
raise
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
MongoSession.objects(session_key=session_key).delete()
class BSONSerializer(object):
"""
Serializer that can handle BSON types (eg ObjectId).
"""
def dumps(self, obj):
return json_util.dumps(obj, separators=(',', ':')).encode('ascii')
def loads(self, data):
return json_util.loads(data.decode('ascii'))

View File

@@ -0,0 +1,47 @@
from mongoengine.queryset import QuerySet
from mongoengine.base import BaseDocument
from mongoengine.errors import ValidationError
def _get_queryset(cls):
"""Inspired by django.shortcuts.*"""
if isinstance(cls, QuerySet):
return cls
else:
return cls.objects
def get_document_or_404(cls, *args, **kwargs):
"""
Uses get() to return an document, or raises a Http404 exception if the document
does not exist.
cls may be a Document or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
object is found.
Inspired by django.shortcuts.*
"""
queryset = _get_queryset(cls)
try:
return queryset.get(*args, **kwargs)
except (queryset._document.DoesNotExist, ValidationError):
from django.http import Http404
raise Http404('No %s matches the given query.' % queryset._document._class_name)
def get_list_or_404(cls, *args, **kwargs):
"""
Uses filter() to return a list of documents, or raise a Http404 exception if
the list is empty.
cls may be a Document or QuerySet object. All other passed
arguments and keyword arguments are used in the filter() query.
Inspired by django.shortcuts.*
"""
queryset = _get_queryset(cls)
obj_list = list(queryset.filter(*args, **kwargs))
if not obj_list:
from django.http import Http404
raise Http404('No %s matches the given query.' % queryset._document._class_name)
return obj_list

View File

@@ -0,0 +1,112 @@
import os
import itertools
import urlparse
from mongoengine import *
from django.conf import settings
from django.core.files.storage import Storage
from django.core.exceptions import ImproperlyConfigured
class FileDocument(Document):
"""A document used to store a single file in GridFS.
"""
file = FileField()
class GridFSStorage(Storage):
"""A custom storage backend to store files in GridFS
"""
def __init__(self, base_url=None):
if base_url is None:
base_url = settings.MEDIA_URL
self.base_url = base_url
self.document = FileDocument
self.field = 'file'
def delete(self, name):
"""Deletes the specified file from the storage system.
"""
if self.exists(name):
doc = self.document.objects.first()
field = getattr(doc, self.field)
self._get_doc_with_name(name).delete() # Delete the FileField
field.delete() # Delete the FileDocument
def exists(self, name):
"""Returns True if a file referened by the given name already exists in the
storage system, or False if the name is available for a new file.
"""
doc = self._get_doc_with_name(name)
if doc:
field = getattr(doc, self.field)
return bool(field.name)
else:
return False
def listdir(self, path=None):
"""Lists the contents of the specified path, returning a 2-tuple of lists;
the first item being directories, the second item being files.
"""
def name(doc):
return getattr(doc, self.field).name
docs = self.document.objects
return [], [name(d) for d in docs if name(d)]
def size(self, name):
"""Returns the total size, in bytes, of the file specified by name.
"""
doc = self._get_doc_with_name(name)
if doc:
return getattr(doc, self.field).length
else:
raise ValueError("No such file or directory: '%s'" % name)
def url(self, name):
"""Returns an absolute URL where the file's contents can be accessed
directly by a web browser.
"""
if self.base_url is None:
raise ValueError("This file is not accessible via a URL.")
return urlparse.urljoin(self.base_url, name).replace('\\', '/')
def _get_doc_with_name(self, name):
"""Find the documents in the store with the given name
"""
docs = self.document.objects
doc = [d for d in docs if hasattr(getattr(d, self.field), 'name') and getattr(d, self.field).name == name]
if doc:
return doc[0]
else:
return None
def _open(self, name, mode='rb'):
doc = self._get_doc_with_name(name)
if doc:
return getattr(doc, self.field)
else:
raise ValueError("No file found with the name '%s'." % name)
def get_available_name(self, name):
"""Returns a filename that's free on the target storage system, and
available for new content to be written to.
"""
file_root, file_ext = os.path.splitext(name)
# If the filename already exists, add an underscore and a number (before
# the file extension, if one exists) to the filename until the generated
# filename doesn't exist.
count = itertools.count(1)
while self.exists(name):
# file_ext includes the dot.
name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext))
return name
def _save(self, name, content):
doc = self.document()
getattr(doc, self.field).put(content, filename=name)
doc.save()
return name

View File

@@ -0,0 +1,31 @@
#coding: utf-8
from unittest import TestCase
from mongoengine import connect
from mongoengine.connection import get_db
class MongoTestCase(TestCase):
"""
TestCase class that clear the collection between the tests
"""
@property
def db_name(self):
from django.conf import settings
return 'test_%s' % getattr(settings, 'MONGO_DATABASE_NAME', 'dummy')
def __init__(self, methodName='runtest'):
connect(self.db_name)
self.db = get_db()
super(MongoTestCase, self).__init__(methodName)
def dropCollections(self):
for collection in self.db.collection_names():
if collection == 'system.indexes':
continue
self.db.drop_collection(collection)
def tearDown(self):
self.dropCollections()

View File

@@ -0,0 +1,6 @@
try:
# django >= 1.4
from django.utils.timezone import now as datetime_now
except ImportError:
from datetime import datetime
datetime_now = datetime.now

View File

@@ -1,29 +1,22 @@
import re
import warnings import warnings
from bson.dbref import DBRef import hashlib
import pymongo import pymongo
from pymongo.read_preferences import ReadPreference import re
from pymongo.read_preferences import ReadPreference
from bson import ObjectId
from bson.dbref import DBRef
from mongoengine import signals from mongoengine import signals
from mongoengine.base import (
ALLOW_INHERITANCE,
BaseDict,
BaseDocument,
BaseList,
DocumentMetaclass,
EmbeddedDocumentList,
TopLevelDocumentMetaclass,
get_document
)
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass,
from mongoengine.context_managers import switch_collection, switch_db BaseDocument, BaseDict, BaseList,
from mongoengine.errors import (InvalidDocumentError, InvalidQueryError, ALLOW_INHERITANCE, get_document)
SaveConditionError) from mongoengine.errors import ValidationError
from mongoengine.python_support import IS_PYMONGO_3 from mongoengine.queryset import (OperationError, NotUniqueError,
from mongoengine.queryset import (NotUniqueError, OperationError,
QuerySet, transform) QuerySet, transform)
from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME
from mongoengine.context_managers import switch_db, switch_collection
__all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument',
'DynamicEmbeddedDocument', 'OperationError', 'DynamicEmbeddedDocument', 'OperationError',
@@ -48,6 +41,7 @@ class InvalidCollectionError(Exception):
class EmbeddedDocument(BaseDocument): class EmbeddedDocument(BaseDocument):
"""A :class:`~mongoengine.Document` that isn't stored in its own """A :class:`~mongoengine.Document` that isn't stored in its own
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
fields on :class:`~mongoengine.Document`\ s through the fields on :class:`~mongoengine.Document`\ s through the
@@ -62,7 +56,7 @@ class EmbeddedDocument(BaseDocument):
dictionary. dictionary.
""" """
__slots__ = ('_instance', ) __slots__ = ('_instance')
# The __metaclass__ attribute is removed by 2to3 when running with Python3 # The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3
@@ -82,14 +76,9 @@ class EmbeddedDocument(BaseDocument):
def __ne__(self, other): def __ne__(self, other):
return not self.__eq__(other) return not self.__eq__(other)
def save(self, *args, **kwargs):
self._instance.save(*args, **kwargs)
def reload(self, *args, **kwargs):
self._instance.reload(*args, **kwargs)
class Document(BaseDocument): class Document(BaseDocument):
"""The base class used for defining the structure and properties of """The base class used for defining the structure and properties of
collections of documents stored in MongoDB. Inherit from this class, and collections of documents stored in MongoDB. Inherit from this class, and
add fields as class attributes to define a document's structure. add fields as class attributes to define a document's structure.
@@ -114,11 +103,9 @@ class Document(BaseDocument):
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
dictionary. :attr:`max_documents` is the maximum number of documents that dictionary. :attr:`max_documents` is the maximum number of documents that
is allowed to be stored in the collection, and :attr:`max_size` is the is allowed to be stored in the collection, and :attr:`max_size` is the
maximum size of the collection in bytes. :attr:`max_size` is rounded up maximum size of the collection in bytes. If :attr:`max_size` is not
to the next multiple of 256 by MongoDB internally and mongoengine before.
Use also a multiple of 256 to avoid confusions. If :attr:`max_size` is not
specified and :attr:`max_documents` is, :attr:`max_size` defaults to specified and :attr:`max_documents` is, :attr:`max_size` defaults to
10485760 bytes (10MB). 10000000 bytes (10MB).
Indexes may be created by specifying :attr:`indexes` in the :attr:`meta` Indexes may be created by specifying :attr:`indexes` in the :attr:`meta`
dictionary. The value should be a list of field names or tuples of field dictionary. The value should be a list of field names or tuples of field
@@ -126,7 +113,7 @@ class Document(BaseDocument):
a **+** or **-** sign. a **+** or **-** sign.
Automatic index creation can be disabled by specifying Automatic index creation can be disabled by specifying
:attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
False then indexes will not be created by MongoEngine. This is useful in False then indexes will not be created by MongoEngine. This is useful in
production systems where index creation is performed as part of a production systems where index creation is performed as part of a
deployment system. deployment system.
@@ -135,11 +122,6 @@ class Document(BaseDocument):
doesn't contain a list) if allow_inheritance is True. This can be doesn't contain a list) if allow_inheritance is True. This can be
disabled by either setting cls to False on the specific index or disabled by either setting cls to False on the specific index or
by setting index_cls to False on the meta dictionary for the document. by setting index_cls to False on the meta dictionary for the document.
By default, any extra attribute existing in stored data but not declared
in your model will raise a :class:`~mongoengine.FieldDoesNotExist` error.
This can be disabled by setting :attr:`strict` to ``False``
in the :attr:`meta` dictionary.
""" """
# The __metaclass__ attribute is removed by 2to3 when running with Python3 # The __metaclass__ attribute is removed by 2to3 when running with Python3
@@ -147,24 +129,27 @@ class Document(BaseDocument):
my_metaclass = TopLevelDocumentMetaclass my_metaclass = TopLevelDocumentMetaclass
__metaclass__ = TopLevelDocumentMetaclass __metaclass__ = TopLevelDocumentMetaclass
__slots__ = ('__objects',) __slots__ = ('__objects')
def pk(): def pk():
"""Primary key alias """Primary key alias
""" """
def fget(self): def fget(self):
if 'id_field' not in self._meta:
return None
return getattr(self, self._meta['id_field']) return getattr(self, self._meta['id_field'])
def fset(self, value): def fset(self, value):
return setattr(self, self._meta['id_field'], value) return setattr(self, self._meta['id_field'], value)
return property(fget, fset) return property(fget, fset)
pk = pk() pk = pk()
@property
def text_score(self):
"""
Used for text searchs
"""
return self._data.get('text_score')
@classmethod @classmethod
def _get_db(cls): def _get_db(cls):
"""Some Model using other db_alias""" """Some Model using other db_alias"""
@@ -173,18 +158,14 @@ class Document(BaseDocument):
@classmethod @classmethod
def _get_collection(cls): def _get_collection(cls):
"""Returns the collection for the document.""" """Returns the collection for the document."""
# TODO: use new get_collection() with PyMongo3 ?
if not hasattr(cls, '_collection') or cls._collection is None: if not hasattr(cls, '_collection') or cls._collection is None:
db = cls._get_db() db = cls._get_db()
collection_name = cls._get_collection_name() collection_name = cls._get_collection_name()
# Create collection as a capped collection if specified # Create collection as a capped collection if specified
if cls._meta.get('max_size') or cls._meta.get('max_documents'): if cls._meta['max_size'] or cls._meta['max_documents']:
# Get max document limit and max byte size from meta # Get max document limit and max byte size from meta
max_size = cls._meta.get('max_size') or 10 * 2 ** 20 # 10MB default max_size = cls._meta['max_size'] or 10000000 # 10MB default
max_documents = cls._meta.get('max_documents') max_documents = cls._meta['max_documents']
# Round up to next 256 bytes as MongoDB would do it to avoid exception
if max_size % 256:
max_size = (max_size // 256 + 1) * 256
if collection_name in db.collection_names(): if collection_name in db.collection_names():
cls._collection = db[collection_name] cls._collection = db[collection_name]
@@ -192,7 +173,7 @@ class Document(BaseDocument):
# options match the specified capped options # options match the specified capped options
options = cls._collection.options() options = cls._collection.options()
if options.get('max') != max_documents or \ if options.get('max') != max_documents or \
options.get('size') != max_size: options.get('size') != max_size:
msg = (('Cannot create collection "%s" as a capped ' msg = (('Cannot create collection "%s" as a capped '
'collection as it already exists') 'collection as it already exists')
% cls._collection) % cls._collection)
@@ -211,47 +192,9 @@ class Document(BaseDocument):
cls.ensure_indexes() cls.ensure_indexes()
return cls._collection return cls._collection
def modify(self, query={}, **update):
"""Perform an atomic update of the document in the database and reload
the document object using updated version.
Returns True if the document has been updated or False if the document
in the database doesn't match the query.
.. note:: All unsaved changes that have been made to the document are
rejected if the method returns True.
:param query: the update will be performed only if the document in the
database matches the query
:param update: Django-style update keyword arguments
"""
if self.pk is None:
raise InvalidDocumentError("The document does not have a primary key.")
id_field = self._meta["id_field"]
query = query.copy() if isinstance(query, dict) else query.to_query(self)
if id_field not in query:
query[id_field] = self.pk
elif query[id_field] != self.pk:
raise InvalidQueryError("Invalid document modify query: it must modify only this document.")
updated = self._qs(**query).modify(new=True, **update)
if updated is None:
return False
for field in self._fields_ordered:
setattr(self, field, self._reload(field, updated[field]))
self._changed_fields = updated._changed_fields
self._created = False
return True
def save(self, force_insert=False, validate=True, clean=True, def save(self, force_insert=False, validate=True, clean=True,
write_concern=None, cascade=None, cascade_kwargs=None, write_concern=None, cascade=None, cascade_kwargs=None,
_refs=None, save_condition=None, signal_kwargs=None, **kwargs): _refs=None, save_condition=None, **kwargs):
"""Save the :class:`~mongoengine.Document` to the database. If the """Save the :class:`~mongoengine.Document` to the database. If the
document already exists, it will be updated, otherwise it will be document already exists, it will be updated, otherwise it will be
created. created.
@@ -275,11 +218,7 @@ class Document(BaseDocument):
to cascading saves. Implies ``cascade=True``. to cascading saves. Implies ``cascade=True``.
:param _refs: A list of processed references used in cascading saves :param _refs: A list of processed references used in cascading saves
:param save_condition: only perform save if matching record in db :param save_condition: only perform save if matching record in db
satisfies condition(s) (e.g. version number). satisfies condition(s) (e.g., version number)
Raises :class:`OperationError` if the conditions are not satisfied
:parm signal_kwargs: (optional) kwargs dictionary to be passed to
the signal calls.
.. versionchanged:: 0.5 .. versionchanged:: 0.5
In existing documents it only saves changed fields using In existing documents it only saves changed fields using
set / unset. Saves are cascaded and any set / unset. Saves are cascaded and any
@@ -296,15 +235,8 @@ class Document(BaseDocument):
.. versionchanged:: 0.8.5 .. versionchanged:: 0.8.5
Optional save_condition that only overwrites existing documents Optional save_condition that only overwrites existing documents
if the condition is satisfied in the current db record. if the condition is satisfied in the current db record.
.. versionchanged:: 0.10
:class:`OperationError` exception raised if save_condition fails.
.. versionchanged:: 0.10.1
:class: save_condition failure now raises a `SaveConditionError`
.. versionchanged:: 0.10.7
Add signal_kwargs argument
""" """
signal_kwargs = signal_kwargs or {} signals.pre_save.send(self.__class__, document=self)
signals.pre_save.send(self.__class__, document=self, **signal_kwargs)
if validate: if validate:
self.validate(clean=clean) self.validate(clean=clean)
@@ -317,26 +249,15 @@ class Document(BaseDocument):
created = ('_id' not in doc or self._created or force_insert) created = ('_id' not in doc or self._created or force_insert)
signals.pre_save_post_validation.send(self.__class__, document=self, signals.pre_save_post_validation.send(self.__class__, document=self,
created=created, **signal_kwargs) created=created)
try: try:
collection = self._get_collection() collection = self._get_collection()
if self._meta.get('auto_create_index', True):
self.ensure_indexes()
if created: if created:
if force_insert: if force_insert:
object_id = collection.insert(doc, **write_concern) object_id = collection.insert(doc, **write_concern)
else: else:
object_id = collection.save(doc, **write_concern) object_id = collection.save(doc, **write_concern)
# In PyMongo 3.0, the save() call calls internally the _update() call
# but they forget to return the _id value passed back, therefore getting it back here
# Correct behaviour in 2.X and in 3.0.1+ versions
if not object_id and pymongo.version_tuple == (3, 0):
pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk)
object_id = (
self._qs.filter(pk=pk_as_mongo_obj).first() and
self._qs.filter(pk=pk_as_mongo_obj).first().pk
) # TODO doesn't this make 2 queries?
else: else:
object_id = doc['_id'] object_id = doc['_id']
updates, removals = self._delta() updates, removals = self._delta()
@@ -349,12 +270,8 @@ class Document(BaseDocument):
select_dict['_id'] = object_id select_dict['_id'] = object_id
shard_key = self.__class__._meta.get('shard_key', tuple()) shard_key = self.__class__._meta.get('shard_key', tuple())
for k in shard_key: for k in shard_key:
path = self._lookup_field(k.split('.')) actual_key = self._db_field_map.get(k, k)
actual_key = [p.db_field for p in path] select_dict[actual_key] = doc[actual_key]
val = doc
for ak in actual_key:
val = val[ak]
select_dict['.'.join(actual_key)] = val
def is_new_object(last_error): def is_new_object(last_error):
if last_error is not None: if last_error is not None:
@@ -373,9 +290,6 @@ class Document(BaseDocument):
upsert = save_condition is None upsert = save_condition is None
last_error = collection.update(select_dict, update_query, last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern) upsert=upsert, **write_concern)
if not upsert and last_error["n"] == 0:
raise SaveConditionError('Race condition preventing'
' document update detected')
created = is_new_object(last_error) created = is_new_object(last_error)
if cascade is None: if cascade is None:
@@ -408,15 +322,14 @@ class Document(BaseDocument):
if created or id_field not in self._meta.get('shard_key', []): if created or id_field not in self._meta.get('shard_key', []):
self[id_field] = self._fields[id_field].to_python(object_id) self[id_field] = self._fields[id_field].to_python(object_id)
signals.post_save.send(self.__class__, document=self, signals.post_save.send(self.__class__, document=self, created=created)
created=created, **signal_kwargs)
self._clear_changed_fields() self._clear_changed_fields()
self._created = False self._created = False
return self return self
def cascade_save(self, *args, **kwargs): def cascade_save(self, *args, **kwargs):
"""Recursively saves any references / """Recursively saves any references /
generic references on the document""" generic references on an objects"""
_refs = kwargs.get('_refs', []) or [] _refs = kwargs.get('_refs', []) or []
ReferenceField = _import_class('ReferenceField') ReferenceField = _import_class('ReferenceField')
@@ -457,12 +370,7 @@ class Document(BaseDocument):
select_dict = {'pk': self.pk} select_dict = {'pk': self.pk}
shard_key = self.__class__._meta.get('shard_key', tuple()) shard_key = self.__class__._meta.get('shard_key', tuple())
for k in shard_key: for k in shard_key:
path = self._lookup_field(k.split('.')) select_dict[k] = getattr(self, k)
actual_key = [p.db_field for p in path]
val = self
for ak in actual_key:
val = getattr(val, ak)
select_dict['__'.join(actual_key)] = val
return select_dict return select_dict
def update(self, **kwargs): def update(self, **kwargs):
@@ -472,11 +380,11 @@ class Document(BaseDocument):
Raises :class:`OperationError` if called on an object that has not yet Raises :class:`OperationError` if called on an object that has not yet
been saved. been saved.
""" """
if self.pk is None: if not self.pk:
if kwargs.get('upsert', False): if kwargs.get('upsert', False):
query = self.to_mongo() query = self.to_mongo()
if "_cls" in query: if "_cls" in query:
del query["_cls"] del(query["_cls"])
return self._qs.filter(**query).update_one(**kwargs) return self._qs.filter(**query).update_one(**kwargs)
else: else:
raise OperationError( raise OperationError(
@@ -485,30 +393,18 @@ class Document(BaseDocument):
# Need to add shard key to query, or you get an error # Need to add shard key to query, or you get an error
return self._qs.filter(**self._object_key).update_one(**kwargs) return self._qs.filter(**self._object_key).update_one(**kwargs)
def delete(self, signal_kwargs=None, **write_concern): def delete(self, **write_concern):
"""Delete the :class:`~mongoengine.Document` from the database. This """Delete the :class:`~mongoengine.Document` from the database. This
will only take effect if the document has been previously saved. will only take effect if the document has been previously saved.
:parm signal_kwargs: (optional) kwargs dictionary to be passed to
the signal calls.
:param write_concern: Extra keyword arguments are passed down which :param write_concern: Extra keyword arguments are passed down which
will be used as options for the resultant will be used as options for the resultant
``getLastError`` command. For example, ``getLastError`` command. For example,
``save(..., write_concern={w: 2, fsync: True}, ...)`` will ``save(..., write_concern={w: 2, fsync: True}, ...)`` will
wait until at least two servers have recorded the write and wait until at least two servers have recorded the write and
will force an fsync on the primary server. will force an fsync on the primary server.
.. versionchanged:: 0.10.7
Add signal_kwargs argument
""" """
signal_kwargs = signal_kwargs or {} signals.pre_delete.send(self.__class__, document=self)
signals.pre_delete.send(self.__class__, document=self, **signal_kwargs)
# Delete FileFields separately
FileField = _import_class('FileField')
for name, field in self._fields.iteritems():
if isinstance(field, FileField):
getattr(self, name).delete()
try: try:
self._qs.filter( self._qs.filter(
@@ -516,9 +412,9 @@ class Document(BaseDocument):
except pymongo.errors.OperationFailure, err: except pymongo.errors.OperationFailure, err:
message = u'Could not delete document (%s)' % err.message message = u'Could not delete document (%s)' % err.message
raise OperationError(message) raise OperationError(message)
signals.post_delete.send(self.__class__, document=self, **signal_kwargs) signals.post_delete.send(self.__class__, document=self)
def switch_db(self, db_alias, keep_created=True): def switch_db(self, db_alias):
""" """
Temporarily switch the database for a document instance. Temporarily switch the database for a document instance.
@@ -528,14 +424,10 @@ class Document(BaseDocument):
user.switch_db('archive-db') user.switch_db('archive-db')
user.save() user.save()
:param str db_alias: The database alias to use for saving the document If you need to read from another database see
:class:`~mongoengine.context_managers.switch_db`
:param bool keep_created: keep self._created value after switching db, else is reset to True :param db_alias: The database alias to use for saving the document
.. seealso::
Use :class:`~mongoengine.context_managers.switch_collection`
if you need to read from another collection
""" """
with switch_db(self.__class__, db_alias) as cls: with switch_db(self.__class__, db_alias) as cls:
collection = cls._get_collection() collection = cls._get_collection()
@@ -543,12 +435,12 @@ class Document(BaseDocument):
self._get_collection = lambda: collection self._get_collection = lambda: collection
self._get_db = lambda: db self._get_db = lambda: db
self._collection = collection self._collection = collection
self._created = True if not keep_created else self._created self._created = True
self.__objects = self._qs self.__objects = self._qs
self.__objects._collection_obj = collection self.__objects._collection_obj = collection
return self return self
def switch_collection(self, collection_name, keep_created=True): def switch_collection(self, collection_name):
""" """
Temporarily switch the collection for a document instance. Temporarily switch the collection for a document instance.
@@ -558,21 +450,17 @@ class Document(BaseDocument):
user.switch_collection('old-users') user.switch_collection('old-users')
user.save() user.save()
:param str collection_name: The database alias to use for saving the If you need to read from another database see
:class:`~mongoengine.context_managers.switch_db`
:param collection_name: The database alias to use for saving the
document document
:param bool keep_created: keep self._created value after switching collection, else is reset to True
.. seealso::
Use :class:`~mongoengine.context_managers.switch_db`
if you need to read from another database
""" """
with switch_collection(self.__class__, collection_name) as cls: with switch_collection(self.__class__, collection_name) as cls:
collection = cls._get_collection() collection = cls._get_collection()
self._get_collection = lambda: collection self._get_collection = lambda: collection
self._collection = collection self._collection = collection
self._created = True if not keep_created else self._created self._created = True
self.__objects = self._qs self.__objects = self._qs
self.__objects._collection_obj = collection self.__objects._collection_obj = collection
return self return self
@@ -604,31 +492,20 @@ class Document(BaseDocument):
elif "max_depth" in kwargs: elif "max_depth" in kwargs:
max_depth = kwargs["max_depth"] max_depth = kwargs["max_depth"]
if self.pk is None: if not self.pk:
raise self.DoesNotExist("Document does not exist") raise self.DoesNotExist("Document does not exist")
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter( obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
**self._object_key).only(*fields).limit( **self._object_key).only(*fields).limit(1
1).select_related(max_depth=max_depth) ).select_related(max_depth=max_depth)
if obj: if obj:
obj = obj[0] obj = obj[0]
else: else:
raise self.DoesNotExist("Document does not exist") raise self.DoesNotExist("Document does not exist")
for field in obj._data: for field in self._fields_ordered:
if not fields or field in fields: if not fields or field in fields:
try: setattr(self, field, self._reload(field, obj[field]))
setattr(self, field, self._reload(field, obj[field]))
except (KeyError, AttributeError):
try:
# If field is a special field, e.g. items is stored as _reserved_items,
# an KeyError is thrown. So try to retrieve the field from _data
setattr(self, field, self._reload(field, obj._data.get(field)))
except KeyError:
# If field is removed from the database while the object
# is in memory, a reload would cause a KeyError
# i.e. obj.update(unset__field=1) followed by obj.reload()
delattr(self, field)
self._changed_fields = obj._changed_fields self._changed_fields = obj._changed_fields
self._created = False self._created = False
@@ -641,9 +518,6 @@ class Document(BaseDocument):
if isinstance(value, BaseDict): if isinstance(value, BaseDict):
value = [(k, self._reload(k, v)) for k, v in value.items()] value = [(k, self._reload(k, v)) for k, v in value.items()]
value = BaseDict(value, self, key) value = BaseDict(value, self, key)
elif isinstance(value, EmbeddedDocumentList):
value = [self._reload(key, v) for v in value]
value = EmbeddedDocumentList(value, self, key)
elif isinstance(value, BaseList): elif isinstance(value, BaseList):
value = [self._reload(key, v) for v in value] value = [self._reload(key, v) for v in value]
value = BaseList(value, self, key) value = BaseList(value, self, key)
@@ -655,7 +529,7 @@ class Document(BaseDocument):
def to_dbref(self): def to_dbref(self):
"""Returns an instance of :class:`~bson.dbref.DBRef` useful in """Returns an instance of :class:`~bson.dbref.DBRef` useful in
`__raw__` queries.""" `__raw__` queries."""
if self.pk is None: if not self.pk:
msg = "Only saved documents can have a valid dbref" msg = "Only saved documents can have a valid dbref"
raise OperationError(msg) raise OperationError(msg)
return DBRef(self.__class__._get_collection_name(), self.pk) return DBRef(self.__class__._get_collection_name(), self.pk)
@@ -672,76 +546,38 @@ class Document(BaseDocument):
for class_name in document_cls._subclasses for class_name in document_cls._subclasses
if class_name != document_cls.__name__] + [document_cls] if class_name != document_cls.__name__] + [document_cls]
for klass in classes: for cls in classes:
for document_cls in documents: for document_cls in documents:
delete_rules = klass._meta.get('delete_rules') or {} delete_rules = cls._meta.get('delete_rules') or {}
delete_rules[(document_cls, field_name)] = rule delete_rules[(document_cls, field_name)] = rule
klass._meta['delete_rules'] = delete_rules cls._meta['delete_rules'] = delete_rules
@classmethod @classmethod
def drop_collection(cls): def drop_collection(cls):
"""Drops the entire collection associated with this """Drops the entire collection associated with this
:class:`~mongoengine.Document` type from the database. :class:`~mongoengine.Document` type from the database.
Raises :class:`OperationError` if the document has no collection set
(i.g. if it is `abstract`)
.. versionchanged:: 0.10.7
:class:`OperationError` exception raised if no collection available
""" """
col_name = cls._get_collection_name()
if not col_name:
raise OperationError('Document %s has no collection defined '
'(is it abstract ?)' % cls)
cls._collection = None cls._collection = None
db = cls._get_db() db = cls._get_db()
db.drop_collection(col_name) db.drop_collection(cls._get_collection_name())
@classmethod
def create_index(cls, keys, background=False, **kwargs):
"""Creates the given indexes if required.
:param keys: a single index key or a list of index keys (to
construct a multi-field index); keys may be prefixed with a **+**
or a **-** to determine the index ordering
:param background: Allows index creation in the background
"""
index_spec = cls._build_index_spec(keys)
index_spec = index_spec.copy()
fields = index_spec.pop('fields')
drop_dups = kwargs.get('drop_dups', False)
if IS_PYMONGO_3 and drop_dups:
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
warnings.warn(msg, DeprecationWarning)
elif not IS_PYMONGO_3:
index_spec['drop_dups'] = drop_dups
index_spec['background'] = background
index_spec.update(kwargs)
if IS_PYMONGO_3:
return cls._get_collection().create_index(fields, **index_spec)
else:
return cls._get_collection().ensure_index(fields, **index_spec)
@classmethod @classmethod
def ensure_index(cls, key_or_list, drop_dups=False, background=False, def ensure_index(cls, key_or_list, drop_dups=False, background=False,
**kwargs): **kwargs):
"""Ensure that the given indexes are in place. Deprecated in favour """Ensure that the given indexes are in place.
of create_index.
:param key_or_list: a single index key or a list of index keys (to :param key_or_list: a single index key or a list of index keys (to
construct a multi-field index); keys may be prefixed with a **+** construct a multi-field index); keys may be prefixed with a **+**
or a **-** to determine the index ordering or a **-** to determine the index ordering
:param background: Allows index creation in the background
:param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value
will be removed if PyMongo3+ is used
""" """
if IS_PYMONGO_3 and drop_dups: index_spec = cls._build_index_spec(key_or_list)
msg = "drop_dups is deprecated and is removed when using PyMongo 3+." index_spec = index_spec.copy()
warnings.warn(msg, DeprecationWarning) fields = index_spec.pop('fields')
elif not IS_PYMONGO_3: index_spec['drop_dups'] = drop_dups
kwargs.update({'drop_dups': drop_dups}) index_spec['background'] = background
return cls.create_index(key_or_list, background=background, **kwargs) index_spec.update(kwargs)
return cls._get_collection().ensure_index(fields, **index_spec)
@classmethod @classmethod
def ensure_indexes(cls): def ensure_indexes(cls):
@@ -756,9 +592,6 @@ class Document(BaseDocument):
drop_dups = cls._meta.get('index_drop_dups', False) drop_dups = cls._meta.get('index_drop_dups', False)
index_opts = cls._meta.get('index_opts') or {} index_opts = cls._meta.get('index_opts') or {}
index_cls = cls._meta.get('index_cls', True) index_cls = cls._meta.get('index_cls', True)
if IS_PYMONGO_3 and drop_dups:
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
warnings.warn(msg, DeprecationWarning)
collection = cls._get_collection() collection = cls._get_collection()
# 746: when connection is via mongos, the read preference is not necessarily an indication that # 746: when connection is via mongos, the read preference is not necessarily an indication that
@@ -781,37 +614,18 @@ class Document(BaseDocument):
cls_indexed = cls_indexed or includes_cls(fields) cls_indexed = cls_indexed or includes_cls(fields)
opts = index_opts.copy() opts = index_opts.copy()
opts.update(spec) opts.update(spec)
collection.ensure_index(fields, background=background,
# we shouldn't pass 'cls' to the collection.ensureIndex options drop_dups=drop_dups, **opts)
# because of https://jira.mongodb.org/browse/SERVER-769
if 'cls' in opts:
del opts['cls']
if IS_PYMONGO_3:
collection.create_index(fields, background=background, **opts)
else:
collection.ensure_index(fields, background=background,
drop_dups=drop_dups, **opts)
# If _cls is being used (for polymorphism), it needs an index, # If _cls is being used (for polymorphism), it needs an index,
# only if another index doesn't begin with _cls # only if another index doesn't begin with _cls
if (index_cls and not cls_indexed and if (index_cls and not cls_indexed and
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True): cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
collection.ensure_index('_cls', background=background,
# we shouldn't pass 'cls' to the collection.ensureIndex options **index_opts)
# because of https://jira.mongodb.org/browse/SERVER-769
if 'cls' in index_opts:
del index_opts['cls']
if IS_PYMONGO_3:
collection.create_index('_cls', background=background,
**index_opts)
else:
collection.ensure_index('_cls', background=background,
**index_opts)
@classmethod @classmethod
def list_indexes(cls): def list_indexes(cls, go_up=True, go_down=True):
""" Lists all of the indexes that should be created for given """ Lists all of the indexes that should be created for given
collection. It includes all the indexes from super- and sub-classes. collection. It includes all the indexes from super- and sub-classes.
""" """
@@ -819,7 +633,7 @@ class Document(BaseDocument):
if cls._meta.get('abstract'): if cls._meta.get('abstract'):
return [] return []
# get all the base classes, subclasses and siblings # get all the base classes, subclasses and sieblings
classes = [] classes = []
def get_classes(cls): def get_classes(cls):
@@ -858,8 +672,8 @@ class Document(BaseDocument):
return indexes return indexes
indexes = [] indexes = []
for klass in classes: for cls in classes:
for index in get_indexes_spec(klass): for index in get_indexes_spec(cls):
if index not in indexes: if index not in indexes:
indexes.append(index) indexes.append(index)
@@ -898,6 +712,7 @@ class Document(BaseDocument):
class DynamicDocument(Document): class DynamicDocument(Document):
"""A Dynamic Document class allowing flexible, expandable and uncontrolled """A Dynamic Document class allowing flexible, expandable and uncontrolled
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
way as an ordinary document but has expando style properties. Any data way as an ordinary document but has expando style properties. Any data
@@ -929,6 +744,7 @@ class DynamicDocument(Document):
class DynamicEmbeddedDocument(EmbeddedDocument): class DynamicEmbeddedDocument(EmbeddedDocument):
"""A Dynamic Embedded Document class allowing flexible, expandable and """A Dynamic Embedded Document class allowing flexible, expandable and
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
information about dynamic documents. information about dynamic documents.
@@ -955,6 +771,7 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
class MapReduceDocument(object): class MapReduceDocument(object):
"""A document returned from a map/reduce query. """A document returned from a map/reduce query.
:param collection: An instance of :class:`~pymongo.Collection` :param collection: An instance of :class:`~pymongo.Collection`
@@ -984,7 +801,7 @@ class MapReduceDocument(object):
if not isinstance(self.key, id_field_type): if not isinstance(self.key, id_field_type):
try: try:
self.key = id_field_type(self.key) self.key = id_field_type(self.key)
except Exception: except:
raise Exception("Could not cast key as %s" % raise Exception("Could not cast key as %s" %
id_field_type.__name__) id_field_type.__name__)

View File

@@ -5,8 +5,7 @@ from mongoengine.python_support import txt_type
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', 'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
'OperationError', 'NotUniqueError', 'FieldDoesNotExist', 'OperationError', 'NotUniqueError', 'ValidationError')
'ValidationError', 'SaveConditionError')
class NotRegistered(Exception): class NotRegistered(Exception):
@@ -41,21 +40,6 @@ class NotUniqueError(OperationError):
pass pass
class SaveConditionError(OperationError):
pass
class FieldDoesNotExist(Exception):
"""Raised when trying to set a field
not declared in a :class:`~mongoengine.Document`
or an :class:`~mongoengine.EmbeddedDocument`.
To avoid this behavior on data loading,
you should the :attr:`strict` to ``False``
in the :attr:`meta` dictionnary.
"""
class ValidationError(AssertionError): class ValidationError(AssertionError):
"""Validation exception. """Validation exception.
@@ -119,7 +103,6 @@ class ValidationError(AssertionError):
else: else:
return unicode(source) return unicode(source)
return errors_dict return errors_dict
if not self.errors: if not self.errors:
return {} return {}
return build_dict(self.errors) return build_dict(self.errors)
@@ -130,9 +113,9 @@ class ValidationError(AssertionError):
def generate_key(value, prefix=''): def generate_key(value, prefix=''):
if isinstance(value, list): if isinstance(value, list):
value = ' '.join([generate_key(k) for k in value]) value = ' '.join([generate_key(k) for k in value])
elif isinstance(value, dict): if isinstance(value, dict):
value = ' '.join( value = ' '.join(
[generate_key(v, k) for k, v in value.iteritems()]) [generate_key(v, k) for k, v in value.iteritems()])
results = "%s.%s" % (prefix, value) if prefix else value results = "%s.%s" % (prefix, value) if prefix else value
return results return results

File diff suppressed because it is too large Load Diff

View File

@@ -1,26 +1,18 @@
"""Helper functions and types to aid with Python 2.5 - 3 support.""" """Helper functions and types to aid with Python 2.5 - 3 support."""
import sys import sys
import pymongo
if pymongo.version_tuple[0] < 3:
IS_PYMONGO_3 = False
else:
IS_PYMONGO_3 = True
PY3 = sys.version_info[0] == 3 PY3 = sys.version_info[0] == 3
if PY3: if PY3:
import codecs import codecs
from io import BytesIO as StringIO from io import BytesIO as StringIO
# return s converted to binary. b('test') should be equivalent to b'test' # return s converted to binary. b('test') should be equivalent to b'test'
def b(s): def b(s):
return codecs.latin_1_encode(s)[0] return codecs.latin_1_encode(s)[0]
bin_type = bytes bin_type = bytes
txt_type = str txt_type = str
else: else:
try: try:
from cStringIO import StringIO from cStringIO import StringIO

View File

@@ -1,6 +1,6 @@
from mongoengine.errors import (DoesNotExist, InvalidQueryError, from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned,
MultipleObjectsReturned, NotUniqueError, InvalidQueryError, OperationError,
OperationError) NotUniqueError)
from mongoengine.queryset.field_list import * from mongoengine.queryset.field_list import *
from mongoengine.queryset.manager import * from mongoengine.queryset.manager import *
from mongoengine.queryset.queryset import * from mongoengine.queryset.queryset import *

View File

@@ -7,27 +7,24 @@ import pprint
import re import re
import warnings import warnings
from bson import SON, json_util from bson import SON
from bson.code import Code from bson.code import Code
from bson import json_util
import pymongo import pymongo
import pymongo.errors import pymongo.errors
from pymongo.common import validate_read_preference from pymongo.common import validate_read_preference
from mongoengine import signals from mongoengine import signals
from mongoengine.base.common import get_document
from mongoengine.common import _import_class
from mongoengine.connection import get_db from mongoengine.connection import get_db
from mongoengine.context_managers import switch_db from mongoengine.context_managers import switch_db
from mongoengine.errors import (InvalidQueryError, LookUpError, from mongoengine.common import _import_class
NotUniqueError, OperationError) from mongoengine.base.common import get_document
from mongoengine.python_support import IS_PYMONGO_3 from mongoengine.errors import (OperationError, NotUniqueError,
InvalidQueryError, LookUpError)
from mongoengine.queryset import transform from mongoengine.queryset import transform
from mongoengine.queryset.field_list import QueryFieldList from mongoengine.queryset.field_list import QueryFieldList
from mongoengine.queryset.visitor import Q, QNode from mongoengine.queryset.visitor import Q, QNode
if IS_PYMONGO_3:
from pymongo.collection import ReturnDocument
__all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') __all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL')
@@ -42,6 +39,7 @@ RE_TYPE = type(re.compile(''))
class BaseQuerySet(object): class BaseQuerySet(object):
"""A set of results returned from a query. Wraps a MongoDB cursor, """A set of results returned from a query. Wraps a MongoDB cursor,
providing :class:`~mongoengine.Document` objects as the results. providing :class:`~mongoengine.Document` objects as the results.
""" """
@@ -68,6 +66,7 @@ class BaseQuerySet(object):
self._as_pymongo = False self._as_pymongo = False
self._as_pymongo_coerce = False self._as_pymongo_coerce = False
self._search_text = None self._search_text = None
self._include_text_scores = False
# If inheritance is allowed, only return instances and instances of # If inheritance is allowed, only return instances and instances of
# subclasses of the class being used # subclasses of the class being used
@@ -83,10 +82,9 @@ class BaseQuerySet(object):
self._skip = None self._skip = None
self._hint = -1 # Using -1 as None is a valid value for hint self._hint = -1 # Using -1 as None is a valid value for hint
self.only_fields = [] self.only_fields = []
self._max_time_ms = None
def __call__(self, q_obj=None, class_check=True, read_preference=None, def __call__(self, q_obj=None, class_check=True, slave_okay=False,
**query): read_preference=None, **query):
"""Filter the selected documents by calling the """Filter the selected documents by calling the
:class:`~mongoengine.queryset.QuerySet` with a query. :class:`~mongoengine.queryset.QuerySet` with a query.
@@ -96,7 +94,9 @@ class BaseQuerySet(object):
objects, only the last one will be used objects, only the last one will be used
:param class_check: If set to False bypass class name check when :param class_check: If set to False bypass class name check when
querying collection querying collection
:param read_preference: if set, overrides connection-level :param slave_okay: if True, allows this query to be run against a
replica secondary.
:params read_preference: if set, overrides connection-level
read_preference from `ReplicaSetConnection`. read_preference from `ReplicaSetConnection`.
:param query: Django-style query keyword arguments :param query: Django-style query keyword arguments
""" """
@@ -122,40 +122,9 @@ class BaseQuerySet(object):
return queryset return queryset
def __getstate__(self):
"""
Need for pickling queryset
See https://github.com/MongoEngine/mongoengine/issues/442
"""
obj_dict = self.__dict__.copy()
# don't picke collection, instead pickle collection params
obj_dict.pop("_collection_obj")
# don't pickle cursor
obj_dict["_cursor_obj"] = None
return obj_dict
def __setstate__(self, obj_dict):
"""
Need for pickling queryset
See https://github.com/MongoEngine/mongoengine/issues/442
"""
obj_dict["_collection_obj"] = obj_dict["_document"]._get_collection()
# update attributes
self.__dict__.update(obj_dict)
# forse load cursor
# self._cursor
def __getitem__(self, key): def __getitem__(self, key):
"""Support skip and limit using getitem and slicing syntax.""" """Support skip and limit using getitem and slicing syntax.
"""
queryset = self.clone() queryset = self.clone()
# Slice provided # Slice provided
@@ -189,9 +158,7 @@ class BaseQuerySet(object):
if queryset._as_pymongo: if queryset._as_pymongo:
return queryset._get_as_pymongo(queryset._cursor[key]) return queryset._get_as_pymongo(queryset._cursor[key])
return queryset._document._from_son(queryset._cursor[key], return queryset._document._from_son(queryset._cursor[key],
_auto_dereference=self._auto_dereference, _auto_dereference=self._auto_dereference, only_fields=self.only_fields)
only_fields=self.only_fields)
raise AttributeError raise AttributeError
def __iter__(self): def __iter__(self):
@@ -224,7 +191,7 @@ class BaseQuerySet(object):
""" """
return self.__call__(*q_objs, **query) return self.__call__(*q_objs, **query)
def search_text(self, text, language=None): def search_text(self, text, language=None, include_text_scores=False):
""" """
Start a text search, using text indexes. Start a text search, using text indexes.
Require: MongoDB server version 2.6+. Require: MongoDB server version 2.6+.
@@ -232,13 +199,15 @@ class BaseQuerySet(object):
:param language: The language that determines the list of stop words :param language: The language that determines the list of stop words
for the search and the rules for the stemmer and tokenizer. for the search and the rules for the stemmer and tokenizer.
If not specified, the search uses the default language of the index. If not specified, the search uses the default language of the index.
For supported languages, see For supported languages, see `Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
`Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
:param include_text_scores: If True, automaticaly add a text_score attribute to Document.
""" """
queryset = self.clone() queryset = self.clone()
if queryset._search_text: if queryset._search_text:
raise OperationError( raise OperationError(
"It is not possible to use search_text two times.") "Is not possible to use search_text two times.")
query_kwargs = SON({'$search': text}) query_kwargs = SON({'$search': text})
if language: if language:
@@ -248,6 +217,7 @@ class BaseQuerySet(object):
queryset._mongo_query = None queryset._mongo_query = None
queryset._cursor_obj = None queryset._cursor_obj = None
queryset._search_text = text queryset._search_text = text
queryset._include_text_scores = include_text_scores
return queryset return queryset
@@ -286,6 +256,54 @@ class BaseQuerySet(object):
""" """
return self._document(**kwargs).save() return self._document(**kwargs).save()
def get_or_create(self, write_concern=None, auto_save=True,
*q_objs, **query):
"""Retrieve unique object or create, if it doesn't exist. Returns a
tuple of ``(object, created)``, where ``object`` is the retrieved or
created object and ``created`` is a boolean specifying whether a new
object was created. Raises
:class:`~mongoengine.queryset.MultipleObjectsReturned` or
`DocumentName.MultipleObjectsReturned` if multiple results are found.
A new document will be created if the document doesn't exists; a
dictionary of default values for the new document may be provided as a
keyword argument called :attr:`defaults`.
.. note:: This requires two separate operations and therefore a
race condition exists. Because there are no transactions in
mongoDB other approaches should be investigated, to ensure you
don't accidently duplicate data when using this method. This is
now scheduled to be removed before 1.0
:param write_concern: optional extra keyword arguments used if we
have to create a new document.
Passes any write_concern onto :meth:`~mongoengine.Document.save`
:param auto_save: if the object is to be saved automatically if
not found.
.. deprecated:: 0.8
.. versionchanged:: 0.6 - added `auto_save`
.. versionadded:: 0.3
"""
msg = ("get_or_create is scheduled to be deprecated. The approach is "
"flawed without transactions. Upserts should be preferred.")
warnings.warn(msg, DeprecationWarning)
defaults = query.get('defaults', {})
if 'defaults' in query:
del query['defaults']
try:
doc = self.get(*q_objs, **query)
return doc, False
except self._document.DoesNotExist:
query.update(defaults)
doc = self._document(**query)
if auto_save:
doc.save(write_concern=write_concern)
return doc, True
def first(self): def first(self):
"""Retrieve the first object matching the query. """Retrieve the first object matching the query.
""" """
@@ -296,11 +314,10 @@ class BaseQuerySet(object):
result = None result = None
return result return result
def insert(self, doc_or_docs, load_bulk=True, def insert(self, doc_or_docs, load_bulk=True, write_concern=None):
write_concern=None, signal_kwargs=None):
"""bulk insert documents """bulk insert documents
:param doc_or_docs: a document or list of documents to be inserted :param docs_or_doc: a document or list of documents to be inserted
:param load_bulk (optional): If True returns the list of document :param load_bulk (optional): If True returns the list of document
instances instances
:param write_concern: Extra keyword arguments are passed down to :param write_concern: Extra keyword arguments are passed down to
@@ -310,15 +327,11 @@ class BaseQuerySet(object):
``insert(..., {w: 2, fsync: True})`` will wait until at least ``insert(..., {w: 2, fsync: True})`` will wait until at least
two servers have recorded the write and will force an fsync on two servers have recorded the write and will force an fsync on
each server being written to. each server being written to.
:parm signal_kwargs: (optional) kwargs dictionary to be passed to
the signal calls.
By default returns document instances, set ``load_bulk`` to False to By default returns document instances, set ``load_bulk`` to False to
return just ``ObjectIds`` return just ``ObjectIds``
.. versionadded:: 0.5 .. versionadded:: 0.5
.. versionchanged:: 0.10.7
Add signal_kwargs argument
""" """
Document = _import_class('Document') Document = _import_class('Document')
@@ -331,6 +344,7 @@ class BaseQuerySet(object):
return_one = True return_one = True
docs = [docs] docs = [docs]
raw = []
for doc in docs: for doc in docs:
if not isinstance(doc, self._document): if not isinstance(doc, self._document):
msg = ("Some documents inserted aren't instances of %s" msg = ("Some documents inserted aren't instances of %s"
@@ -339,12 +353,9 @@ class BaseQuerySet(object):
if doc.pk and not doc._created: if doc.pk and not doc._created:
msg = "Some documents have ObjectIds use doc.update() instead" msg = "Some documents have ObjectIds use doc.update() instead"
raise OperationError(msg) raise OperationError(msg)
raw.append(doc.to_mongo())
signal_kwargs = signal_kwargs or {} signals.pre_bulk_insert.send(self._document, documents=docs)
signals.pre_bulk_insert.send(self._document,
documents=docs, **signal_kwargs)
raw = [doc.to_mongo() for doc in docs]
try: try:
ids = self._collection.insert(raw, **write_concern) ids = self._collection.insert(raw, **write_concern)
except pymongo.errors.DuplicateKeyError, err: except pymongo.errors.DuplicateKeyError, err:
@@ -361,7 +372,7 @@ class BaseQuerySet(object):
if not load_bulk: if not load_bulk:
signals.post_bulk_insert.send( signals.post_bulk_insert.send(
self._document, documents=docs, loaded=False, **signal_kwargs) self._document, documents=docs, loaded=False)
return return_one and ids[0] or ids return return_one and ids[0] or ids
documents = self.in_bulk(ids) documents = self.in_bulk(ids)
@@ -369,10 +380,10 @@ class BaseQuerySet(object):
for obj_id in ids: for obj_id in ids:
results.append(documents.get(obj_id)) results.append(documents.get(obj_id))
signals.post_bulk_insert.send( signals.post_bulk_insert.send(
self._document, documents=results, loaded=True, **signal_kwargs) self._document, documents=results, loaded=True)
return return_one and results[0] or results return return_one and results[0] or results
def count(self, with_limit_and_skip=False): def count(self, with_limit_and_skip=True):
"""Count the selected elements in the query. """Count the selected elements in the query.
:param with_limit_and_skip (optional): take any :meth:`limit` or :param with_limit_and_skip (optional): take any :meth:`limit` or
@@ -383,7 +394,7 @@ class BaseQuerySet(object):
return 0 return 0
return self._cursor.count(with_limit_and_skip=with_limit_and_skip) return self._cursor.count(with_limit_and_skip=with_limit_and_skip)
def delete(self, write_concern=None, _from_doc_delete=False, cascade_refs=None): def delete(self, write_concern=None, _from_doc_delete=False):
"""Delete the documents matched by the query. """Delete the documents matched by the query.
:param write_concern: Extra keyword arguments are passed down which :param write_concern: Extra keyword arguments are passed down which
@@ -394,7 +405,6 @@ class BaseQuerySet(object):
will force an fsync on the primary server. will force an fsync on the primary server.
:param _from_doc_delete: True when called from document delete therefore :param _from_doc_delete: True when called from document delete therefore
signals will have been triggered so don't loop. signals will have been triggered so don't loop.
:returns number of deleted documents :returns number of deleted documents
""" """
queryset = self.clone() queryset = self.clone()
@@ -415,7 +425,7 @@ class BaseQuerySet(object):
if call_document_delete: if call_document_delete:
cnt = 0 cnt = 0
for doc in queryset: for doc in queryset:
doc.delete(**write_concern) doc.delete(write_concern=write_concern)
cnt += 1 cnt += 1
return cnt return cnt
@@ -424,8 +434,6 @@ class BaseQuerySet(object):
# references # references
for rule_entry in delete_rules: for rule_entry in delete_rules:
document_cls, field_name = rule_entry document_cls, field_name = rule_entry
if document_cls._meta.get('abstract'):
continue
rule = doc._meta['delete_rules'][rule_entry] rule = doc._meta['delete_rules'][rule_entry]
if rule == DENY and document_cls.objects( if rule == DENY and document_cls.objects(
**{field_name + '__in': self}).count() > 0: **{field_name + '__in': self}).count() > 0:
@@ -435,19 +443,13 @@ class BaseQuerySet(object):
for rule_entry in delete_rules: for rule_entry in delete_rules:
document_cls, field_name = rule_entry document_cls, field_name = rule_entry
if document_cls._meta.get('abstract'):
continue
rule = doc._meta['delete_rules'][rule_entry] rule = doc._meta['delete_rules'][rule_entry]
if rule == CASCADE: if rule == CASCADE:
cascade_refs = set() if cascade_refs is None else cascade_refs ref_q = document_cls.objects(**{field_name + '__in': self})
# Handle recursive reference
if doc._collection == document_cls._collection:
for ref in queryset:
cascade_refs.add(ref.id)
ref_q = document_cls.objects(**{field_name + '__in': self, 'id__nin': cascade_refs})
ref_q_count = ref_q.count() ref_q_count = ref_q.count()
if ref_q_count > 0: if (doc != document_cls and ref_q_count > 0
ref_q.delete(write_concern=write_concern, cascade_refs=cascade_refs) or (doc == document_cls and ref_q_count > 0)):
ref_q.delete(write_concern=write_concern)
elif rule == NULLIFY: elif rule == NULLIFY:
document_cls.objects(**{field_name + '__in': self}).update( document_cls.objects(**{field_name + '__in': self}).update(
write_concern=write_concern, **{'unset__%s' % field_name: 1}) write_concern=write_concern, **{'unset__%s' % field_name: 1})
@@ -456,15 +458,14 @@ class BaseQuerySet(object):
write_concern=write_concern, write_concern=write_concern,
**{'pull_all__%s' % field_name: self}) **{'pull_all__%s' % field_name: self})
result = queryset._collection.remove(queryset._query, **write_concern) result = queryset._collection.remove(queryset._query, write_concern=write_concern)
if result: return result["n"]
return result.get("n")
def update(self, upsert=False, multi=True, write_concern=None, def update(self, upsert=False, multi=True, write_concern=None,
full_result=False, **update): full_result=False, **update):
"""Perform an atomic update on the fields matched by the query. """Perform an atomic update on the fields matched by the query.
:param upsert: insert if document doesn't exist (default ``False``) :param upsert: Any existing document with that "_id" is overwritten.
:param multi: Update multiple documents. :param multi: Update multiple documents.
:param write_concern: Extra keyword arguments are passed down which :param write_concern: Extra keyword arguments are passed down which
will be used as options for the resultant will be used as options for the resultant
@@ -510,37 +511,10 @@ class BaseQuerySet(object):
raise OperationError(message) raise OperationError(message)
raise OperationError(u'Update failed (%s)' % unicode(err)) raise OperationError(u'Update failed (%s)' % unicode(err))
def upsert_one(self, write_concern=None, **update):
"""Overwrite or add the first document matched by the query.
:param write_concern: Extra keyword arguments are passed down which
will be used as options for the resultant
``getLastError`` command. For example,
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
wait until at least two servers have recorded the write and
will force an fsync on the primary server.
:param update: Django-style update keyword arguments
:returns the new or overwritten document
.. versionadded:: 0.10.2
"""
atomic_update = self.update(multi=False, upsert=True,
write_concern=write_concern,
full_result=True, **update)
if atomic_update['updatedExisting']:
document = self.get()
else:
document = self._document.objects.with_id(atomic_update['upserted'])
return document
def update_one(self, upsert=False, write_concern=None, **update): def update_one(self, upsert=False, write_concern=None, **update):
"""Perform an atomic update on the fields of the first document """Perform an atomic update on first field matched by the query.
matched by the query.
:param upsert: insert if document doesn't exist (default ``False``) :param upsert: Any existing document with that "_id" is overwritten.
:param write_concern: Extra keyword arguments are passed down which :param write_concern: Extra keyword arguments are passed down which
will be used as options for the resultant will be used as options for the resultant
``getLastError`` command. For example, ``getLastError`` command. For example,
@@ -569,7 +543,7 @@ class BaseQuerySet(object):
:param upsert: insert if document doesn't exist (default ``False``) :param upsert: insert if document doesn't exist (default ``False``)
:param full_response: return the entire response object from the :param full_response: return the entire response object from the
server (default ``False``, not available for PyMongo 3+) server (default ``False``)
:param remove: remove rather than updating (default ``False``) :param remove: remove rather than updating (default ``False``)
:param new: return updated rather than original document :param new: return updated rather than original document
(default ``False``) (default ``False``)
@@ -587,31 +561,13 @@ class BaseQuerySet(object):
queryset = self.clone() queryset = self.clone()
query = queryset._query query = queryset._query
if not IS_PYMONGO_3 or not remove: update = transform.update(queryset._document, **update)
update = transform.update(queryset._document, **update)
sort = queryset._ordering sort = queryset._ordering
try: try:
if IS_PYMONGO_3: result = queryset._collection.find_and_modify(
if full_response: query, update, upsert=upsert, sort=sort, remove=remove, new=new,
msg = "With PyMongo 3+, it is not possible anymore to get the full response." full_response=full_response, **self._cursor_args)
warnings.warn(msg, DeprecationWarning)
if remove:
result = queryset._collection.find_one_and_delete(
query, sort=sort, **self._cursor_args)
else:
if new:
return_doc = ReturnDocument.AFTER
else:
return_doc = ReturnDocument.BEFORE
result = queryset._collection.find_one_and_update(
query, update, upsert=upsert, sort=sort, return_document=return_doc,
**self._cursor_args)
else:
result = queryset._collection.find_and_modify(
query, update, upsert=upsert, sort=sort, remove=remove, new=new,
full_response=full_response, **self._cursor_args)
except pymongo.errors.DuplicateKeyError, err: except pymongo.errors.DuplicateKeyError, err:
raise NotUniqueError(u"Update failed (%s)" % err) raise NotUniqueError(u"Update failed (%s)" % err)
except pymongo.errors.OperationFailure, err: except pymongo.errors.OperationFailure, err:
@@ -663,10 +619,7 @@ class BaseQuerySet(object):
doc_map[doc['_id']] = self._get_as_pymongo(doc) doc_map[doc['_id']] = self._get_as_pymongo(doc)
else: else:
for doc in docs: for doc in docs:
doc_map[doc['_id']] = self._document._from_son( doc_map[doc['_id']] = self._document._from_son(doc, only_fields=self.only_fields)
doc,
only_fields=self.only_fields,
_auto_dereference=self._auto_dereference)
return doc_map return doc_map
@@ -686,8 +639,7 @@ class BaseQuerySet(object):
return self return self
def using(self, alias): def using(self, alias):
"""This method is for controlling which database the QuerySet will be """This method is for controlling which database the QuerySet will be evaluated against if you are using more than one database.
evaluated against if you are using more than one database.
:param alias: The database alias :param alias: The database alias
@@ -720,7 +672,7 @@ class BaseQuerySet(object):
'_timeout', '_class_check', '_slave_okay', '_read_preference', '_timeout', '_class_check', '_slave_okay', '_read_preference',
'_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce',
'_limit', '_skip', '_hint', '_auto_dereference', '_limit', '_skip', '_hint', '_auto_dereference',
'_search_text', 'only_fields', '_max_time_ms') '_search_text', '_include_text_scores', 'only_fields')
for prop in copy_props: for prop in copy_props:
val = getattr(self, prop) val = getattr(self, prop)
@@ -750,7 +702,11 @@ class BaseQuerySet(object):
:param n: the maximum number of objects to return :param n: the maximum number of objects to return
""" """
queryset = self.clone() queryset = self.clone()
queryset._limit = n if n != 0 else 1 if n == 0:
queryset._cursor.limit(1)
else:
queryset._cursor.limit(n)
queryset._limit = n
# Return self to allow chaining # Return self to allow chaining
return queryset return queryset
@@ -761,6 +717,7 @@ class BaseQuerySet(object):
:param n: the number of objects to skip before returning results :param n: the number of objects to skip before returning results
""" """
queryset = self.clone() queryset = self.clone()
queryset._cursor.skip(n)
queryset._skip = n queryset._skip = n
return queryset return queryset
@@ -778,6 +735,7 @@ class BaseQuerySet(object):
.. versionadded:: 0.5 .. versionadded:: 0.5
""" """
queryset = self.clone() queryset = self.clone()
queryset._cursor.hint(index)
queryset._hint = index queryset._hint = index
return queryset return queryset
@@ -800,29 +758,14 @@ class BaseQuerySet(object):
distinct = self._dereference(queryset._cursor.distinct(field), 1, distinct = self._dereference(queryset._cursor.distinct(field), 1,
name=field, instance=self._document) name=field, instance=self._document)
doc_field = self._document._fields.get(field.split('.', 1)[0]) # We may need to cast to the correct type eg.
instance = False # ListField(EmbeddedDocumentField)
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) doc_field = getattr(
self._document._fields.get(field), "field", None)
instance = getattr(doc_field, "document_type", False)
EmbeddedDocumentField = _import_class('EmbeddedDocumentField') EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
ListField = _import_class('ListField') GenericEmbeddedDocumentField = _import_class(
GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField') 'GenericEmbeddedDocumentField')
if isinstance(doc_field, ListField):
doc_field = getattr(doc_field, "field", doc_field)
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
instance = getattr(doc_field, "document_type", False)
# handle distinct on subdocuments
if '.' in field:
for field_part in field.split('.')[1:]:
# if looping on embedded document, get the document type instance
if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
doc_field = instance
# now get the subdocument
doc_field = getattr(doc_field, field_part, doc_field)
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
if isinstance(doc_field, ListField):
doc_field = getattr(doc_field, "field", doc_field)
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
instance = getattr(doc_field, "document_type", False)
if instance and isinstance(doc_field, (EmbeddedDocumentField, if instance and isinstance(doc_field, (EmbeddedDocumentField,
GenericEmbeddedDocumentField)): GenericEmbeddedDocumentField)):
distinct = [instance(**doc) for doc in distinct] distinct = [instance(**doc) for doc in distinct]
@@ -892,6 +835,7 @@ class BaseQuerySet(object):
cleaned_fields = [] cleaned_fields = []
for key, value in kwargs.items(): for key, value in kwargs.items():
parts = key.split('__') parts = key.split('__')
op = None
if parts[0] in operators: if parts[0] in operators:
op = parts.pop(0) op = parts.pop(0)
value = {'$' + op: value} value = {'$' + op: value}
@@ -944,18 +888,13 @@ class BaseQuerySet(object):
plan = pprint.pformat(plan) plan = pprint.pformat(plan)
return plan return plan
# DEPRECATED. Has no more impact on PyMongo 3+
def snapshot(self, enabled): def snapshot(self, enabled):
"""Enable or disable snapshot mode when querying. """Enable or disable snapshot mode when querying.
:param enabled: whether or not snapshot mode is enabled :param enabled: whether or not snapshot mode is enabled
..versionchanged:: 0.5 - made chainable ..versionchanged:: 0.5 - made chainable
.. deprecated:: Ignored with PyMongo 3+
""" """
if IS_PYMONGO_3:
msg = "snapshot is deprecated as it has no impact when using PyMongo 3+."
warnings.warn(msg, DeprecationWarning)
queryset = self.clone() queryset = self.clone()
queryset._snapshot = enabled queryset._snapshot = enabled
return queryset return queryset
@@ -971,17 +910,11 @@ class BaseQuerySet(object):
queryset._timeout = enabled queryset._timeout = enabled
return queryset return queryset
# DEPRECATED. Has no more impact on PyMongo 3+
def slave_okay(self, enabled): def slave_okay(self, enabled):
"""Enable or disable the slave_okay when querying. """Enable or disable the slave_okay when querying.
:param enabled: whether or not the slave_okay is enabled :param enabled: whether or not the slave_okay is enabled
.. deprecated:: Ignored with PyMongo 3+
""" """
if IS_PYMONGO_3:
msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+."
warnings.warn(msg, DeprecationWarning)
queryset = self.clone() queryset = self.clone()
queryset._slave_okay = enabled queryset._slave_okay = enabled
return queryset return queryset
@@ -995,7 +928,6 @@ class BaseQuerySet(object):
validate_read_preference('read_preference', read_preference) validate_read_preference('read_preference', read_preference)
queryset = self.clone() queryset = self.clone()
queryset._read_preference = read_preference queryset._read_preference = read_preference
queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference
return queryset return queryset
def scalar(self, *fields): def scalar(self, *fields):
@@ -1029,7 +961,7 @@ class BaseQuerySet(object):
"""Instead of returning Document instances, return raw values from """Instead of returning Document instances, return raw values from
pymongo. pymongo.
:param coerce_types: Field types (if applicable) would be use to :param coerce_type: Field types (if applicable) would be use to
coerce types. coerce types.
""" """
queryset = self.clone() queryset = self.clone()
@@ -1037,13 +969,6 @@ class BaseQuerySet(object):
queryset._as_pymongo_coerce = coerce_types queryset._as_pymongo_coerce = coerce_types
return queryset return queryset
def max_time_ms(self, ms):
"""Wait `ms` milliseconds before killing the query on the server
:param ms: the number of milliseconds before killing the query on the server
"""
return self._chainable_method("max_time_ms", ms)
# JSON Helpers # JSON Helpers
def to_json(self, *args, **kwargs): def to_json(self, *args, **kwargs):
@@ -1057,8 +982,8 @@ class BaseQuerySet(object):
def aggregate(self, *pipeline, **kwargs): def aggregate(self, *pipeline, **kwargs):
""" """
Perform a aggregate function based in your queryset params Perform a aggreggate function based in your queryset params
:param pipeline: list of aggregation commands,\ :param pipeline: list of agreggation commands,
see: http://docs.mongodb.org/manual/core/aggregation-pipeline/ see: http://docs.mongodb.org/manual/core/aggregation-pipeline/
.. versionadded:: 0.9 .. versionadded:: 0.9
@@ -1268,62 +1193,103 @@ class BaseQuerySet(object):
def sum(self, field): def sum(self, field):
"""Sum over the values of the specified field. """Sum over the values of the specified field.
:param field: the field to sum over; use dot notation to refer to :param field: the field to sum over; use dot-notation to refer to
embedded document fields embedded document fields
.. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work
with sharding.
""" """
db_field = self._fields_to_dbfields([field]).pop() map_func = """
pipeline = [ function() {
{'$match': self._query}, var path = '{{~%(field)s}}'.split('.'),
{'$group': {'_id': 'sum', 'total': {'$sum': '$' + db_field}}} field = this;
]
# if we're performing a sum over a list field, we sum up all the for (p in path) {
# elements in the list, hence we need to $unwind the arrays first if (typeof field != 'undefined')
ListField = _import_class('ListField') field = field[path[p]];
field_parts = field.split('.') else
field_instances = self._document._lookup_field(field_parts) break;
if isinstance(field_instances[-1], ListField): }
pipeline.insert(1, {'$unwind': '$' + field})
result = self._document._get_collection().aggregate(pipeline) if (field && field.constructor == Array) {
if IS_PYMONGO_3: field.forEach(function(item) {
result = tuple(result) emit(1, item||0);
});
} else if (typeof field != 'undefined') {
emit(1, field||0);
}
}
""" % dict(field=field)
reduce_func = Code("""
function(key, values) {
var sum = 0;
for (var i in values) {
sum += values[i];
}
return sum;
}
""")
for result in self.map_reduce(map_func, reduce_func, output='inline'):
return result.value
else: else:
result = result.get('result') return 0
if result:
return result[0]['total']
return 0
def average(self, field): def average(self, field):
"""Average over the values of the specified field. """Average over the values of the specified field.
:param field: the field to average over; use dot notation to refer to :param field: the field to average over; use dot-notation to refer to
embedded document fields embedded document fields
.. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work
with sharding.
""" """
db_field = self._fields_to_dbfields([field]).pop() map_func = """
pipeline = [ function() {
{'$match': self._query}, var path = '{{~%(field)s}}'.split('.'),
{'$group': {'_id': 'avg', 'total': {'$avg': '$' + db_field}}} field = this;
]
# if we're performing an average over a list field, we average out for (p in path) {
# all the elements in the list, hence we need to $unwind the arrays if (typeof field != 'undefined')
# first field = field[path[p]];
ListField = _import_class('ListField') else
field_parts = field.split('.') break;
field_instances = self._document._lookup_field(field_parts) }
if isinstance(field_instances[-1], ListField):
pipeline.insert(1, {'$unwind': '$' + field})
result = self._document._get_collection().aggregate(pipeline) if (field && field.constructor == Array) {
if IS_PYMONGO_3: field.forEach(function(item) {
result = tuple(result) emit(1, {t: item||0, c: 1});
});
} else if (typeof field != 'undefined') {
emit(1, {t: field||0, c: 1});
}
}
""" % dict(field=field)
reduce_func = Code("""
function(key, values) {
var out = {t: 0, c: 0};
for (var i in values) {
var value = values[i];
out.t += value.t;
out.c += value.c;
}
return out;
}
""")
finalize_func = Code("""
function(key, value) {
return value.t / value.c;
}
""")
for result in self.map_reduce(map_func, reduce_func,
finalize_f=finalize_func, output='inline'):
return result.value
else: else:
result = result.get('result') return 0
if result:
return result[0]['total']
return 0
def item_frequencies(self, field, normalize=False, map_reduce=True): def item_frequencies(self, field, normalize=False, map_reduce=True):
"""Returns a dictionary of all items present in a field across """Returns a dictionary of all items present in a field across
@@ -1335,7 +1301,7 @@ class BaseQuerySet(object):
Can only do direct simple mappings and cannot map across Can only do direct simple mappings and cannot map across
:class:`~mongoengine.fields.ReferenceField` or :class:`~mongoengine.fields.ReferenceField` or
:class:`~mongoengine.fields.GenericReferenceField` for more complex :class:`~mongoengine.fields.GenericReferenceField` for more complex
counting a manual map reduce call is required. counting a manual map reduce call would is required.
If the field is a :class:`~mongoengine.fields.ListField`, the items within If the field is a :class:`~mongoengine.fields.ListField`, the items within
each list will be counted individually. each list will be counted individually.
@@ -1365,7 +1331,6 @@ class BaseQuerySet(object):
return self._get_as_pymongo(raw_doc) return self._get_as_pymongo(raw_doc)
doc = self._document._from_son(raw_doc, doc = self._document._from_son(raw_doc,
_auto_dereference=self._auto_dereference, only_fields=self.only_fields) _auto_dereference=self._auto_dereference, only_fields=self.only_fields)
if self._scalar: if self._scalar:
return self._get_scalar(doc) return self._get_scalar(doc)
@@ -1374,7 +1339,6 @@ class BaseQuerySet(object):
def rewind(self): def rewind(self):
"""Rewind the cursor to its unevaluated state. """Rewind the cursor to its unevaluated state.
.. versionadded:: 0.3 .. versionadded:: 0.3
""" """
self._iter = False self._iter = False
@@ -1391,34 +1355,22 @@ class BaseQuerySet(object):
@property @property
def _cursor_args(self): def _cursor_args(self):
if not IS_PYMONGO_3: cursor_args = {
fields_name = 'fields' 'snapshot': self._snapshot,
cursor_args = { 'timeout': self._timeout
'timeout': self._timeout, }
'snapshot': self._snapshot if self._read_preference is not None:
} cursor_args['read_preference'] = self._read_preference
if self._read_preference is not None:
cursor_args['read_preference'] = self._read_preference
else:
cursor_args['slave_okay'] = self._slave_okay
else: else:
fields_name = 'projection' cursor_args['slave_okay'] = self._slave_okay
# snapshot is not handled at all by PyMongo 3+
# TODO: evaluate similar possibilities using modifiers
if self._snapshot:
msg = "The snapshot option is not anymore available with PyMongo 3+"
warnings.warn(msg, DeprecationWarning)
cursor_args = {
'no_cursor_timeout': not self._timeout
}
if self._loaded_fields: if self._loaded_fields:
cursor_args[fields_name] = self._loaded_fields.as_dict() cursor_args['fields'] = self._loaded_fields.as_dict()
if self._search_text: if self._include_text_scores:
if fields_name not in cursor_args: if 'fields' not in cursor_args:
cursor_args[fields_name] = {} cursor_args['fields'] = {}
cursor_args[fields_name]['_text_score'] = {'$meta': "textScore"} cursor_args['fields']['text_score'] = {'$meta': "textScore"}
return cursor_args return cursor_args
@@ -1426,16 +1378,8 @@ class BaseQuerySet(object):
def _cursor(self): def _cursor(self):
if self._cursor_obj is None: if self._cursor_obj is None:
# In PyMongo 3+, we define the read preference on a collection self._cursor_obj = self._collection.find(self._query,
# level, not a cursor level. Thus, we need to get a cloned **self._cursor_args)
# collection object using `with_options` first.
if IS_PYMONGO_3 and self._read_preference is not None:
self._cursor_obj = self._collection\
.with_options(read_preference=self._read_preference)\
.find(self._query, **self._cursor_args)
else:
self._cursor_obj = self._collection.find(self._query,
**self._cursor_args)
# Apply where clauses to cursor # Apply where clauses to cursor
if self._where_clause: if self._where_clause:
where_clause = self._sub_js_fields(self._where_clause) where_clause = self._sub_js_fields(self._where_clause)
@@ -1603,7 +1547,7 @@ class BaseQuerySet(object):
return frequencies return frequencies
def _fields_to_dbfields(self, fields): def _fields_to_dbfields(self, fields, subdoc=False):
"""Translate fields paths to its db equivalents""" """Translate fields paths to its db equivalents"""
ret = [] ret = []
subclasses = [] subclasses = []
@@ -1625,7 +1569,7 @@ class BaseQuerySet(object):
ret.append(subfield) ret.append(subfield)
found = True found = True
break break
except LookUpError: except LookUpError, e:
pass pass
if not found: if not found:
@@ -1641,7 +1585,9 @@ class BaseQuerySet(object):
continue continue
if key == '$text_score': if key == '$text_score':
key_list.append(('_text_score', {'$meta': "textScore"})) # automatically set to include text scores
self._include_text_scores = True
key_list.append(('text_score', {'$meta': "textScore"}))
continue continue
direction = pymongo.ASCENDING direction = pymongo.ASCENDING
@@ -1652,7 +1598,7 @@ class BaseQuerySet(object):
key = key.replace('__', '.') key = key.replace('__', '.')
try: try:
key = self._document._translate_field_name(key) key = self._document._translate_field_name(key)
except Exception: except:
pass pass
key_list.append((key, direction)) key_list.append((key, direction))
@@ -1754,13 +1700,6 @@ class BaseQuerySet(object):
code) code)
return code return code
def _chainable_method(self, method_name, val):
queryset = self.clone()
method = getattr(queryset._cursor, method_name)
method(val)
setattr(queryset, "_" + method_name, val)
return queryset
# Deprecated # Deprecated
def ensure_index(self, **kwargs): def ensure_index(self, **kwargs):
"""Deprecated use :func:`Document.ensure_index`""" """Deprecated use :func:`Document.ensure_index`"""

View File

@@ -1,3 +1,4 @@
__all__ = ('QueryFieldList',) __all__ = ('QueryFieldList',)

View File

@@ -29,7 +29,7 @@ class QuerySetManager(object):
Document.objects is accessed. Document.objects is accessed.
""" """
if instance is not None: if instance is not None:
# Document object being used rather than a document class # Document class being used rather than a document object
return self return self
# owner is the document that contains the QuerySetManager # owner is the document that contains the QuerySetManager

View File

@@ -1,6 +1,6 @@
from mongoengine.errors import OperationError from mongoengine.errors import OperationError
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING, from mongoengine.queryset.base import (BaseQuerySet, DO_NOTHING, NULLIFY,
NULLIFY, PULL) CASCADE, DENY, PULL)
__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE', __all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE',
'DENY', 'PULL') 'DENY', 'PULL')
@@ -38,7 +38,7 @@ class QuerySet(BaseQuerySet):
def __len__(self): def __len__(self):
"""Since __len__ is called quite frequently (for example, as part of """Since __len__ is called quite frequently (for example, as part of
list(qs)), we populate the result cache and cache the length. list(qs) we populate the result cache and cache the length.
""" """
if self._len is not None: if self._len is not None:
return self._len return self._len
@@ -61,6 +61,7 @@ class QuerySet(BaseQuerySet):
data[-1] = "...(remaining elements truncated)..." data[-1] = "...(remaining elements truncated)..."
return repr(data) return repr(data)
def _iter_results(self): def _iter_results(self):
"""A generator for iterating over the result cache. """A generator for iterating over the result cache.
@@ -73,7 +74,7 @@ class QuerySet(BaseQuerySet):
upper = len(self._result_cache) upper = len(self._result_cache)
while pos < upper: while pos < upper:
yield self._result_cache[pos] yield self._result_cache[pos]
pos += 1 pos = pos + 1
if not self._has_more: if not self._has_more:
raise StopIteration raise StopIteration
if len(self._result_cache) <= pos: if len(self._result_cache) <= pos:
@@ -93,7 +94,7 @@ class QuerySet(BaseQuerySet):
except StopIteration: except StopIteration:
self._has_more = False self._has_more = False
def count(self, with_limit_and_skip=False): def count(self, with_limit_and_skip=True):
"""Count the selected elements in the query. """Count the selected elements in the query.
:param with_limit_and_skip (optional): take any :meth:`limit` or :param with_limit_and_skip (optional): take any :meth:`limit` or
@@ -160,4 +161,4 @@ class QuerySetNoDeRef(QuerySet):
"""Special no_dereference QuerySet""" """Special no_dereference QuerySet"""
def __dereference(items, max_depth=1, instance=None, name=None): def __dereference(items, max_depth=1, instance=None, name=None):
return items return items

View File

@@ -1,21 +1,20 @@
from collections import defaultdict from collections import defaultdict
from bson import SON
import pymongo import pymongo
from bson import SON
from mongoengine.base.fields import UPDATE_OPERATORS
from mongoengine.common import _import_class
from mongoengine.connection import get_connection from mongoengine.connection import get_connection
from mongoengine.errors import InvalidQueryError from mongoengine.common import _import_class
from mongoengine.python_support import IS_PYMONGO_3 from mongoengine.errors import InvalidQueryError, LookUpError
__all__ = ('query', 'update') __all__ = ('query', 'update')
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
'all', 'size', 'exists', 'not', 'elemMatch', 'type') 'all', 'size', 'exists', 'not', 'elemMatch')
GEO_OPERATORS = ('within_distance', 'within_spherical_distance', GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
'within_box', 'within_polygon', 'near', 'near_sphere', 'within_box', 'within_polygon', 'near', 'near_sphere',
'max_distance', 'min_distance', 'geo_within', 'geo_within_box', 'max_distance', 'geo_within', 'geo_within_box',
'geo_within_polygon', 'geo_within_center', 'geo_within_polygon', 'geo_within_center',
'geo_within_sphere', 'geo_intersects') 'geo_within_sphere', 'geo_intersects')
STRING_OPERATORS = ('contains', 'icontains', 'startswith', STRING_OPERATORS = ('contains', 'icontains', 'startswith',
@@ -25,13 +24,17 @@ CUSTOM_OPERATORS = ('match',)
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS + MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
STRING_OPERATORS + CUSTOM_OPERATORS) STRING_OPERATORS + CUSTOM_OPERATORS)
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert')
def query(_doc_cls=None, **kwargs):
def query(_doc_cls=None, _field_operation=False, **query):
"""Transform a query from Django-style format to Mongo format. """Transform a query from Django-style format to Mongo format.
""" """
mongo_query = {} mongo_query = {}
merge_query = defaultdict(list) merge_query = defaultdict(list)
for key, value in sorted(kwargs.items()): for key, value in sorted(query.items()):
if key == "__raw__": if key == "__raw__":
mongo_query.update(value) mongo_query.update(value)
continue continue
@@ -44,10 +47,6 @@ def query(_doc_cls=None, **kwargs):
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
op = parts.pop() op = parts.pop()
# Allow to escape operator-like field name by __
if len(parts) > 1 and parts[-1] == "":
parts.pop()
negate = False negate = False
if len(parts) > 1 and parts[-1] == 'not': if len(parts) > 1 and parts[-1] == 'not':
parts.pop() parts.pop()
@@ -105,21 +104,13 @@ def query(_doc_cls=None, **kwargs):
if op: if op:
if op in GEO_OPERATORS: if op in GEO_OPERATORS:
value = _geo_operator(field, op, value) value = _geo_operator(field, op, value)
elif op in ('match', 'elemMatch'):
ListField = _import_class('ListField')
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
if (
isinstance(value, dict) and
isinstance(field, ListField) and
isinstance(field.field, EmbeddedDocumentField)
):
value = query(field.field.document_type, **value)
else:
value = field.prepare_query_value(op, value)
value = {"$elemMatch": value}
elif op in CUSTOM_OPERATORS: elif op in CUSTOM_OPERATORS:
NotImplementedError("Custom method '%s' has not " if op in ('elem_match', 'match'):
"been implemented" % op) value = field.prepare_query_value(op, value)
value = {"$elemMatch": value}
else:
NotImplementedError("Custom method '%s' has not "
"been implemented" % op)
elif op not in STRING_OPERATORS: elif op not in STRING_OPERATORS:
value = {'$' + op: value} value = {'$' + op: value}
@@ -134,34 +125,29 @@ def query(_doc_cls=None, **kwargs):
elif key in mongo_query: elif key in mongo_query:
if key in mongo_query and isinstance(mongo_query[key], dict): if key in mongo_query and isinstance(mongo_query[key], dict):
mongo_query[key].update(value) mongo_query[key].update(value)
# $max/minDistance needs to come last - convert to SON # $maxDistance needs to come last - convert to SON
value_dict = mongo_query[key] value_dict = mongo_query[key]
if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ if ('$maxDistance' in value_dict and '$near' in value_dict):
('$near' in value_dict or '$nearSphere' in value_dict):
value_son = SON() value_son = SON()
for k, v in value_dict.iteritems(): if isinstance(value_dict['$near'], dict):
if k == '$maxDistance' or k == '$minDistance': for k, v in value_dict.iteritems():
continue if k == '$maxDistance':
value_son[k] = v continue
# Required for MongoDB >= 2.6, may fail when combining value_son[k] = v
# PyMongo 3+ and MongoDB < 2.6 if (get_connection().max_wire_version <= 1):
near_embedded = False value_son['$maxDistance'] = value_dict[
for near_op in ('$near', '$nearSphere'): '$maxDistance']
if isinstance(value_dict.get(near_op), dict) and ( else:
IS_PYMONGO_3 or get_connection().max_wire_version > 1): value_son['$near'] = SON(value_son['$near'])
value_son[near_op] = SON(value_son[near_op]) value_son['$near'][
if '$maxDistance' in value_dict: '$maxDistance'] = value_dict['$maxDistance']
value_son[near_op][ else:
'$maxDistance'] = value_dict['$maxDistance'] for k, v in value_dict.iteritems():
if '$minDistance' in value_dict: if k == '$maxDistance':
value_son[near_op][ continue
'$minDistance'] = value_dict['$minDistance'] value_son[k] = v
near_embedded = True value_son['$maxDistance'] = value_dict['$maxDistance']
if not near_embedded:
if '$maxDistance' in value_dict:
value_son['$maxDistance'] = value_dict['$maxDistance']
if '$minDistance' in value_dict:
value_son['$minDistance'] = value_dict['$minDistance']
mongo_query[key] = value_son mongo_query[key] = value_son
else: else:
# Store for manually merging later # Store for manually merging later
@@ -174,7 +160,7 @@ def query(_doc_cls=None, **kwargs):
if isinstance(v, list): if isinstance(v, list):
value = [{k: val} for val in v] value = [{k: val} for val in v]
if '$and' in mongo_query.keys(): if '$and' in mongo_query.keys():
mongo_query['$and'].extend(value) mongo_query['$and'].append(value)
else: else:
mongo_query['$and'] = value mongo_query['$and'] = value
@@ -215,10 +201,6 @@ def update(_doc_cls=None, **update):
if parts[-1] in COMPARISON_OPERATORS: if parts[-1] in COMPARISON_OPERATORS:
match = parts.pop() match = parts.pop()
# Allow to escape operator-like field name by __
if len(parts) > 1 and parts[-1] == "":
parts.pop()
if _doc_cls: if _doc_cls:
# Switch field names to proper names [set in Field(name='foo')] # Switch field names to proper names [set in Field(name='foo')]
try: try:
@@ -319,11 +301,7 @@ def update(_doc_cls=None, **update):
def _geo_operator(field, op, value): def _geo_operator(field, op, value):
"""Helper to return the query for a given geo query""" """Helper to return the query for a given geo query"""
if op == "max_distance": if field._geo_index == pymongo.GEO2D:
value = {'$maxDistance': value}
elif op == "min_distance":
value = {'$minDistance': value}
elif field._geo_index == pymongo.GEO2D:
if op == "within_distance": if op == "within_distance":
value = {'$within': {'$center': value}} value = {'$within': {'$center': value}}
elif op == "within_spherical_distance": elif op == "within_spherical_distance":
@@ -336,6 +314,8 @@ def _geo_operator(field, op, value):
value = {'$nearSphere': value} value = {'$nearSphere': value}
elif op == 'within_box': elif op == 'within_box':
value = {'$within': {'$box': value}} value = {'$within': {'$box': value}}
elif op == "max_distance":
value = {'$maxDistance': value}
else: else:
raise NotImplementedError("Geo method '%s' has not " raise NotImplementedError("Geo method '%s' has not "
"been implemented for a GeoPointField" % op) "been implemented for a GeoPointField" % op)
@@ -354,6 +334,8 @@ def _geo_operator(field, op, value):
value = {"$geoIntersects": _infer_geometry(value)} value = {"$geoIntersects": _infer_geometry(value)}
elif op == "near": elif op == "near":
value = {'$near': _infer_geometry(value)} value = {'$near': _infer_geometry(value)}
elif op == "max_distance":
value = {'$maxDistance': value}
else: else:
raise NotImplementedError("Geo method '%s' has not " raise NotImplementedError("Geo method '%s' has not "
"been implemented for a %s " % (op, field._name)) "been implemented for a %s " % (op, field._name))
@@ -370,25 +352,20 @@ def _infer_geometry(value):
raise InvalidQueryError("Invalid $geometry dictionary should have " raise InvalidQueryError("Invalid $geometry dictionary should have "
"type and coordinates keys") "type and coordinates keys")
elif isinstance(value, (list, set)): elif isinstance(value, (list, set)):
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
# TODO: should both TypeError and IndexError be alike interpreted?
try: try:
value[0][0][0] value[0][0][0]
return {"$geometry": {"type": "Polygon", "coordinates": value}} return {"$geometry": {"type": "Polygon", "coordinates": value}}
except (TypeError, IndexError): except:
pass pass
try: try:
value[0][0] value[0][0]
return {"$geometry": {"type": "LineString", "coordinates": value}} return {"$geometry": {"type": "LineString", "coordinates": value}}
except (TypeError, IndexError): except:
pass pass
try: try:
value[0] value[0]
return {"$geometry": {"type": "Point", "coordinates": value}} return {"$geometry": {"type": "Point", "coordinates": value}}
except (TypeError, IndexError): except:
pass pass
raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary " raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary "

View File

@@ -1,5 +1,8 @@
import copy import copy
from itertools import product
from functools import reduce
from mongoengine.errors import InvalidQueryError from mongoengine.errors import InvalidQueryError
from mongoengine.queryset import transform from mongoengine.queryset import transform
@@ -26,7 +29,7 @@ class DuplicateQueryConditionsError(InvalidQueryError):
class SimplificationVisitor(QNodeVisitor): class SimplificationVisitor(QNodeVisitor):
"""Simplifies query trees by combining unnecessary 'and' connection nodes """Simplifies query trees by combinging unnecessary 'and' connection nodes
into a single Q-object. into a single Q-object.
""" """

View File

@@ -6,7 +6,6 @@ __all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
signals_available = False signals_available = False
try: try:
from blinker import Namespace from blinker import Namespace
signals_available = True signals_available = True
except ImportError: except ImportError:
class Namespace(object): class Namespace(object):
@@ -28,8 +27,7 @@ except ImportError:
raise RuntimeError('signalling support is unavailable ' raise RuntimeError('signalling support is unavailable '
'because the blinker library is ' 'because the blinker library is '
'not installed.') 'not installed.')
send = lambda *a, **kw: None
send = lambda *a, **kw: None # noqa
connect = disconnect = has_receivers_for = receivers_for = \ connect = disconnect = has_receivers_for = receivers_for = \
temporarily_connected_to = _fail temporarily_connected_to = _fail
del _fail del _fail

View File

@@ -1,5 +1 @@
nose
pymongo>=2.7.1 pymongo>=2.7.1
six==1.10.0
flake8
flake8-import-order

View File

@@ -1,13 +1,11 @@
[nosetests] [nosetests]
verbosity = 2 verbosity = 3
detailed-errors = 1 detailed-errors = 1
cover-erase = 1 #with-coverage = 1
cover-branches = 1 #cover-erase = 1
cover-package = mongoengine #cover-html = 1
tests = tests #cover-html-dir = ../htmlcov
#cover-package = mongoengine
[flake8] py3where = build
ignore=E501,F401,F403,F405,I201 where = tests
exclude=build,dist,docs,venv,.tox,.eggs,tests #tests = document/__init__.py
max-complexity=42
application-import-names=mongoengine,tests

View File

@@ -1,6 +1,6 @@
import os import os
import sys import sys
from setuptools import find_packages, setup from setuptools import setup, find_packages
# Hack to silence atexit traceback in newer python versions # Hack to silence atexit traceback in newer python versions
try: try:
@@ -8,16 +8,13 @@ try:
except ImportError: except ImportError:
pass pass
DESCRIPTION = ( DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \
'MongoEngine is a Python Object-Document ' 'Mapper for working with MongoDB.'
'Mapper for working with MongoDB.' LONG_DESCRIPTION = None
)
try: try:
with open('README.rst') as fin: LONG_DESCRIPTION = open('README.rst').read()
LONG_DESCRIPTION = fin.read() except:
except Exception: pass
LONG_DESCRIPTION = None
def get_version(version_tuple): def get_version(version_tuple):
@@ -25,7 +22,6 @@ def get_version(version_tuple):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1] return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple)) return '.'.join(map(str, version_tuple))
# Dirty hack to get version number from monogengine/__init__.py - we can't # Dirty hack to get version number from monogengine/__init__.py - we can't
# import it as it depends on PyMongo and PyMongo isn't installed until this # import it as it depends on PyMongo and PyMongo isn't installed until this
# file is read # file is read
@@ -33,6 +29,7 @@ init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0] version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
VERSION = get_version(eval(version_line.split('=')[-1])) VERSION = get_version(eval(version_line.split('=')[-1]))
print(VERSION)
CLASSIFIERS = [ CLASSIFIERS = [
'Development Status :: 4 - Beta', 'Development Status :: 4 - Beta',
@@ -41,7 +38,7 @@ CLASSIFIERS = [
'Operating System :: OS Independent', 'Operating System :: OS Independent',
'Programming Language :: Python', 'Programming Language :: Python',
"Programming Language :: Python :: 2", "Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.6.6",
"Programming Language :: Python :: 2.7", "Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3", "Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.2",
@@ -56,33 +53,31 @@ CLASSIFIERS = [
extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])} extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])}
if sys.version_info[0] == 3: if sys.version_info[0] == 3:
extra_opts['use_2to3'] = True extra_opts['use_2to3'] = True
extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0'] extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'Pillow>=2.0.0', 'django>=1.5.1']
if "test" in sys.argv or "nosetests" in sys.argv: if "test" in sys.argv or "nosetests" in sys.argv:
extra_opts['packages'] = find_packages() extra_opts['packages'] = find_packages()
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
else: else:
# coverage 4 does not support Python 3.2 anymore extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'Pillow>=2.0.0', 'jinja2>=2.6', 'python-dateutil']
extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0', 'python-dateutil']
if sys.version_info[0] == 2 and sys.version_info[1] == 6: if sys.version_info[0] == 2 and sys.version_info[1] == 6:
extra_opts['tests_require'].append('unittest2') extra_opts['tests_require'].append('unittest2')
setup( setup(name='mongoengine',
name='mongoengine', version=VERSION,
version=VERSION, author='Harry Marr',
author='Harry Marr', author_email='harry.marr@{nospam}gmail.com',
author_email='harry.marr@{nospam}gmail.com', maintainer="Ross Lawley",
maintainer="Ross Lawley", maintainer_email="ross.lawley@{nospam}gmail.com",
maintainer_email="ross.lawley@{nospam}gmail.com", url='http://mongoengine.org/',
url='http://mongoengine.org/', download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
download_url='https://github.com/MongoEngine/mongoengine/tarball/master', license='MIT',
license='MIT', include_package_data=True,
include_package_data=True, description=DESCRIPTION,
description=DESCRIPTION, long_description=LONG_DESCRIPTION,
long_description=LONG_DESCRIPTION, platforms=['any'],
platforms=['any'], classifiers=CLASSIFIERS,
classifiers=CLASSIFIERS, install_requires=['pymongo>=2.7.1'],
install_requires=['pymongo>=2.7.1', 'six'], test_suite='nose.collector',
test_suite='nose.collector', **extra_opts
**extra_opts
) )

0
tests/async/__init__.py Normal file
View File

View File

@@ -0,0 +1,36 @@
from mongoengine import *
import motor
import mongoengine.connection
from mongoengine.connection import get_db, get_connection, ConnectionError
try:
import unittest2 as unittest
except ImportError:
import unittest
class ConnectionTest(unittest.TestCase):
def setUp(self):
mongoengine.connection._connection_settings = {}
mongoengine.connection._connections = {}
mongoengine.connection._dbs = {}
def test_register_connection(self):
"""
Ensure that the connect() method works properly.
"""
register_connection('asyncdb', 'mongoengineasynctest', async=True)
self.assertEqual(
mongoengine.connection._connection_settings['asyncdb']['name'],
'mongoengineasynctest')
self.assertTrue(
mongoengine.connection._connection_settings['asyncdb']['async'])
conn = get_connection('asyncdb')
self.assertTrue(isinstance(conn, motor.MotorClient))
db = get_db('asyncdb')
self.assertTrue(isinstance(db, motor.MotorDatabase))
self.assertEqual(db.name, 'mongoengineasynctest')

View File

@@ -93,7 +93,6 @@ class DeltaTest(unittest.TestCase):
def delta_recursive(self, DocClass, EmbeddedClass): def delta_recursive(self, DocClass, EmbeddedClass):
class Embedded(EmbeddedClass): class Embedded(EmbeddedClass):
id = StringField()
string_field = StringField() string_field = StringField()
int_field = IntField() int_field = IntField()
dict_field = DictField() dict_field = DictField()
@@ -115,7 +114,6 @@ class DeltaTest(unittest.TestCase):
self.assertEqual(doc._delta(), ({}, {})) self.assertEqual(doc._delta(), ({}, {}))
embedded_1 = Embedded() embedded_1 = Embedded()
embedded_1.id = "010101"
embedded_1.string_field = 'hello' embedded_1.string_field = 'hello'
embedded_1.int_field = 1 embedded_1.int_field = 1
embedded_1.dict_field = {'hello': 'world'} embedded_1.dict_field = {'hello': 'world'}
@@ -125,7 +123,6 @@ class DeltaTest(unittest.TestCase):
self.assertEqual(doc._get_changed_fields(), ['embedded_field']) self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
embedded_delta = { embedded_delta = {
'id': "010101",
'string_field': 'hello', 'string_field': 'hello',
'int_field': 1, 'int_field': 1,
'dict_field': {'hello': 'world'}, 'dict_field': {'hello': 'world'},
@@ -253,13 +250,13 @@ class DeltaTest(unittest.TestCase):
self.assertEqual(doc.embedded_field.list_field[2].list_field, self.assertEqual(doc.embedded_field.list_field[2].list_field,
[1, 2, {'hello': 'world'}]) [1, 2, {'hello': 'world'}])
del doc.embedded_field.list_field[2].list_field[2]['hello'] del(doc.embedded_field.list_field[2].list_field[2]['hello'])
self.assertEqual(doc._delta(), self.assertEqual(doc._delta(),
({}, {'embedded_field.list_field.2.list_field.2.hello': 1})) ({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
doc.save() doc.save()
doc = doc.reload(10) doc = doc.reload(10)
del doc.embedded_field.list_field[2].list_field del(doc.embedded_field.list_field[2].list_field)
self.assertEqual(doc._delta(), self.assertEqual(doc._delta(),
({}, {'embedded_field.list_field.2.list_field': 1})) ({}, {'embedded_field.list_field.2.list_field': 1}))
@@ -593,13 +590,13 @@ class DeltaTest(unittest.TestCase):
self.assertEqual(doc.embedded_field.list_field[2].list_field, self.assertEqual(doc.embedded_field.list_field[2].list_field,
[1, 2, {'hello': 'world'}]) [1, 2, {'hello': 'world'}])
del doc.embedded_field.list_field[2].list_field[2]['hello'] del(doc.embedded_field.list_field[2].list_field[2]['hello'])
self.assertEqual(doc._delta(), self.assertEqual(doc._delta(),
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1})) ({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
doc.save() doc.save()
doc = doc.reload(10) doc = doc.reload(10)
del doc.embedded_field.list_field[2].list_field del(doc.embedded_field.list_field[2].list_field)
self.assertEqual(doc._delta(), ({}, self.assertEqual(doc._delta(), ({},
{'db_embedded_field.db_list_field.2.db_list_field': 1})) {'db_embedded_field.db_list_field.2.db_list_field': 1}))
@@ -615,7 +612,7 @@ class DeltaTest(unittest.TestCase):
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
p.doc = 123 p.doc = 123
del p.doc del(p.doc)
self.assertEqual(p._delta(), ( self.assertEqual(p._delta(), (
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
@@ -735,56 +732,6 @@ class DeltaTest(unittest.TestCase):
mydoc._clear_changed_fields() mydoc._clear_changed_fields()
self.assertEqual([], mydoc._get_changed_fields()) self.assertEqual([], mydoc._get_changed_fields())
def test_lower_level_mark_as_changed(self):
class EmbeddedDoc(EmbeddedDocument):
name = StringField()
class MyDoc(Document):
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
MyDoc.drop_collection()
MyDoc().save()
mydoc = MyDoc.objects.first()
mydoc.subs['a'] = EmbeddedDoc()
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
subdoc = mydoc.subs['a']
subdoc.name = 'bar'
self.assertEqual(["name"], subdoc._get_changed_fields())
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
mydoc.save()
mydoc._clear_changed_fields()
self.assertEqual([], mydoc._get_changed_fields())
def test_upper_level_mark_as_changed(self):
class EmbeddedDoc(EmbeddedDocument):
name = StringField()
class MyDoc(Document):
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
MyDoc.drop_collection()
MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save()
mydoc = MyDoc.objects.first()
subdoc = mydoc.subs['a']
subdoc.name = 'bar'
self.assertEqual(["name"], subdoc._get_changed_fields())
self.assertEqual(["subs.a.name"], mydoc._get_changed_fields())
mydoc.subs['a'] = EmbeddedDoc()
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
mydoc.save()
mydoc._clear_changed_fields()
self.assertEqual([], mydoc._get_changed_fields())
def test_referenced_object_changed_attributes(self): def test_referenced_object_changed_attributes(self):
"""Ensures that when you save a new reference to a field, the referenced object isn't altered""" """Ensures that when you save a new reference to a field, the referenced object isn't altered"""
@@ -827,43 +774,5 @@ class DeltaTest(unittest.TestCase):
org2.reload() org2.reload()
self.assertEqual(org2.name, 'New Org 2') self.assertEqual(org2.name, 'New Org 2')
def test_delta_for_nested_map_fields(self):
class UInfoDocument(Document):
phone = StringField()
class EmbeddedRole(EmbeddedDocument):
type = StringField()
class EmbeddedUser(EmbeddedDocument):
name = StringField()
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
info = ReferenceField(UInfoDocument)
class Doc(Document):
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
num = IntField(default=-1)
Doc.drop_collection()
doc = Doc(num=1)
doc.users["007"] = EmbeddedUser(name="Agent007")
doc.save()
uinfo = UInfoDocument(phone="79089269066")
uinfo.save()
d = Doc.objects(num=1).first()
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
d.users["007"]["info"] = uinfo
delta = d._delta()
self.assertEqual(True, "users.007.roles.666" in delta[0])
self.assertEqual(True, "users.007.rolist" in delta[0])
self.assertEqual(True, "users.007.info" in delta[0])
self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"])
self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"])
self.assertEqual(uinfo.id, delta[0]["users.007.info"])
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -72,7 +72,7 @@ class DynamicTest(unittest.TestCase):
obj = collection.find_one() obj = collection.find_one()
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name']) self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
del p.misc del(p.misc)
p.save() p.save()
p = self.Person.objects.get() p = self.Person.objects.get()
@@ -81,25 +81,6 @@ class DynamicTest(unittest.TestCase):
obj = collection.find_one() obj = collection.find_one()
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name']) self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name'])
def test_reload_after_unsetting(self):
p = self.Person()
p.misc = 22
p.save()
p.update(unset__misc=1)
p.reload()
def test_reload_dynamic_field(self):
self.Person.objects.delete()
p = self.Person.objects.create()
p.update(age=1)
self.assertEqual(len(p._data), 3)
self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name'])
p.reload()
self.assertEqual(len(p._data), 4)
self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name'])
def test_dynamic_document_queries(self): def test_dynamic_document_queries(self):
"""Ensure we can query dynamic fields""" """Ensure we can query dynamic fields"""
p = self.Person() p = self.Person()
@@ -141,15 +122,6 @@ class DynamicTest(unittest.TestCase):
self.assertEqual(1, self.Person.objects(misc__hello='world').count()) self.assertEqual(1, self.Person.objects(misc__hello='world').count())
def test_three_level_complex_data_lookups(self):
"""Ensure you can query three level document dynamic fields"""
p = self.Person()
p.misc = {'hello': {'hello2': 'world'}}
p.save()
# from pprint import pprint as pp; import pdb; pdb.set_trace();
print self.Person.objects(misc__hello__hello2='world')
self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count())
def test_complex_embedded_document_validation(self): def test_complex_embedded_document_validation(self):
"""Ensure embedded dynamic documents may be validated""" """Ensure embedded dynamic documents may be validated"""
class Embedded(DynamicEmbeddedDocument): class Embedded(DynamicEmbeddedDocument):
@@ -352,7 +324,7 @@ class DynamicTest(unittest.TestCase):
person = Person.objects.first() person = Person.objects.first()
person.attrval = "This works" person.attrval = "This works"
person["phone"] = "555-1212" # but this should too person["phone"] = "555-1212" # but this should too
# Same thing two levels deep # Same thing two levels deep
person["address"]["city"] = "Lundenne" person["address"]["city"] = "Lundenne"
@@ -368,6 +340,7 @@ class DynamicTest(unittest.TestCase):
self.assertEqual(Person.objects.first().address.city, "Londinium") self.assertEqual(Person.objects.first().address.city, "Londinium")
person = Person.objects.first() person = Person.objects.first()
person["age"] = 35 person["age"] = 35
person.save() person.save()

View File

@@ -1,8 +1,9 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import unittest import unittest
import sys import sys
sys.path[0:0] = [""]
import os
import pymongo import pymongo
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
@@ -17,7 +18,7 @@ __all__ = ("IndexesTest", )
class IndexesTest(unittest.TestCase): class IndexesTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.connection = connect(db='mongoenginetest') connect(db='mongoenginetest')
self.db = get_db() self.db = get_db()
class Person(Document): class Person(Document):
@@ -31,7 +32,10 @@ class IndexesTest(unittest.TestCase):
self.Person = Person self.Person = Person
def tearDown(self): def tearDown(self):
self.connection.drop_database(self.db) for collection in self.db.collection_names():
if 'system.' in collection:
continue
self.db.drop_collection(collection)
def test_indexes_document(self): def test_indexes_document(self):
"""Ensure that indexes are used when meta[indexes] is specified for """Ensure that indexes are used when meta[indexes] is specified for
@@ -139,7 +143,7 @@ class IndexesTest(unittest.TestCase):
meta = { meta = {
'indexes': [ 'indexes': [
{ {
'fields': ('title',), 'fields': ('title',),
}, },
], ],
'allow_inheritance': True, 'allow_inheritance': True,
@@ -271,60 +275,6 @@ class IndexesTest(unittest.TestCase):
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in info.iteritems()]
self.assertTrue([('current.location.point', '2d')] in info) self.assertTrue([('current.location.point', '2d')] in info)
def test_explicit_geosphere_index(self):
"""Ensure that geosphere indexes work when created via meta[indexes]
"""
class Place(Document):
location = DictField()
meta = {
'allow_inheritance': True,
'indexes': [
'(location.point',
]
}
self.assertEqual([{'fields': [('location.point', '2dsphere')]}],
Place._meta['index_specs'])
Place.ensure_indexes()
info = Place._get_collection().index_information()
info = [value['key'] for key, value in info.iteritems()]
self.assertTrue([('location.point', '2dsphere')] in info)
def test_explicit_geohaystack_index(self):
"""Ensure that geohaystack indexes work when created via meta[indexes]
"""
raise SkipTest('GeoHaystack index creation is not supported for now'
'from meta, as it requires a bucketSize parameter.')
class Place(Document):
location = DictField()
name = StringField()
meta = {
'indexes': [
(')location.point', 'name')
]
}
self.assertEqual([{'fields': [('location.point', 'geoHaystack'), ('name', 1)]}],
Place._meta['index_specs'])
Place.ensure_indexes()
info = Place._get_collection().index_information()
info = [value['key'] for key, value in info.iteritems()]
self.assertTrue([('location.point', 'geoHaystack')] in info)
def test_create_geohaystack_index(self):
"""Ensure that geohaystack indexes can be created
"""
class Place(Document):
location = DictField()
name = StringField()
Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10)
info = Place._get_collection().index_information()
info = [value['key'] for key, value in info.iteritems()]
self.assertTrue([('location.point', 'geoHaystack'), ('name', 1)] in info)
def test_dictionary_indexes(self): def test_dictionary_indexes(self):
"""Ensure that indexes are used when meta[indexes] contains """Ensure that indexes are used when meta[indexes] contains
dictionaries instead of lists. dictionaries instead of lists.
@@ -482,7 +432,6 @@ class IndexesTest(unittest.TestCase):
class Test(Document): class Test(Document):
a = IntField() a = IntField()
b = IntField()
meta = { meta = {
'indexes': ['a'], 'indexes': ['a'],
@@ -494,36 +443,16 @@ class IndexesTest(unittest.TestCase):
obj = Test(a=1) obj = Test(a=1)
obj.save() obj.save()
connection = get_connection()
IS_MONGODB_3 = connection.server_info()['versionArray'][0] >= 3
# Need to be explicit about covered indexes as mongoDB doesn't know if # Need to be explicit about covered indexes as mongoDB doesn't know if
# the documents returned might have more keys in that here. # the documents returned might have more keys in that here.
query_plan = Test.objects(id=obj.id).exclude('a').explain() query_plan = Test.objects(id=obj.id).exclude('a').explain()
if not IS_MONGODB_3: self.assertFalse(query_plan['indexOnly'])
self.assertFalse(query_plan['indexOnly'])
else:
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK')
query_plan = Test.objects(id=obj.id).only('id').explain() query_plan = Test.objects(id=obj.id).only('id').explain()
if not IS_MONGODB_3: self.assertTrue(query_plan['indexOnly'])
self.assertTrue(query_plan['indexOnly'])
else:
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK')
query_plan = Test.objects(a=1).only('a').exclude('id').explain() query_plan = Test.objects(a=1).only('a').exclude('id').explain()
if not IS_MONGODB_3: self.assertTrue(query_plan['indexOnly'])
self.assertTrue(query_plan['indexOnly'])
else:
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN')
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'PROJECTION')
query_plan = Test.objects(a=1).explain()
if not IS_MONGODB_3:
self.assertFalse(query_plan['indexOnly'])
else:
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN')
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'FETCH')
def test_index_on_id(self): def test_index_on_id(self):
@@ -562,22 +491,16 @@ class IndexesTest(unittest.TestCase):
self.assertEqual(BlogPost.objects.count(), 10) self.assertEqual(BlogPost.objects.count(), 10)
self.assertEqual(BlogPost.objects.hint().count(), 10) self.assertEqual(BlogPost.objects.hint().count(), 10)
self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10)
# PyMongo 3.0 bug only, works correctly with 2.X and 3.0.1+ versions self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10)
if pymongo.version != '3.0':
self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10)
self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10) def invalid_index():
BlogPost.objects.hint('tags')
if pymongo.version >= '2.8': self.assertRaises(TypeError, invalid_index)
self.assertEqual(BlogPost.objects.hint('tags').count(), 10)
else:
def invalid_index():
BlogPost.objects.hint('tags').next()
self.assertRaises(TypeError, invalid_index)
def invalid_index_2(): def invalid_index_2():
return BlogPost.objects.hint(('tags', 1)).next() return BlogPost.objects.hint(('tags', 1))
self.assertRaises(Exception, invalid_index_2) self.assertRaises(Exception, invalid_index_2)
def test_unique(self): def test_unique(self):
@@ -654,38 +577,6 @@ class IndexesTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
def test_unique_embedded_document_in_list(self):
"""
Ensure that the uniqueness constraints are applied to fields in
embedded documents, even when the embedded documents in in a
list field.
"""
class SubDocument(EmbeddedDocument):
year = IntField(db_field='yr')
slug = StringField(unique=True)
class BlogPost(Document):
title = StringField()
subs = ListField(EmbeddedDocumentField(SubDocument))
BlogPost.drop_collection()
post1 = BlogPost(
title='test1', subs=[
SubDocument(year=2009, slug='conflict'),
SubDocument(year=2009, slug='conflict')
]
)
post1.save()
post2 = BlogPost(
title='test2', subs=[SubDocument(year=2014, slug='conflict')]
)
self.assertRaises(NotUniqueError, post2.save)
BlogPost.drop_collection()
def test_unique_with_embedded_document_and_embedded_unique(self): def test_unique_with_embedded_document_and_embedded_unique(self):
"""Ensure that uniqueness constraints are applied to fields on """Ensure that uniqueness constraints are applied to fields on
embedded documents. And work with unique_with as well. embedded documents. And work with unique_with as well.
@@ -818,34 +709,33 @@ class IndexesTest(unittest.TestCase):
name = StringField(required=True) name = StringField(required=True)
term = StringField(required=True) term = StringField(required=True)
class ReportEmbedded(Document): class Report(Document):
key = EmbeddedDocumentField(CompoundKey, primary_key=True) key = EmbeddedDocumentField(CompoundKey, primary_key=True)
text = StringField() text = StringField()
Report.drop_collection()
my_key = CompoundKey(name="n", term="ok") my_key = CompoundKey(name="n", term="ok")
report = ReportEmbedded(text="OK", key=my_key).save() report = Report(text="OK", key=my_key).save()
self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}},
report.to_mongo()) report.to_mongo())
self.assertEqual(report, ReportEmbedded.objects.get(pk=my_key)) self.assertEqual(report, Report.objects.get(pk=my_key))
def test_compound_key_dictfield(self): def test_compound_key_dictfield(self):
class ReportDictField(Document): class Report(Document):
key = DictField(primary_key=True) key = DictField(primary_key=True)
text = StringField() text = StringField()
Report.drop_collection()
my_key = {"name": "n", "term": "ok"} my_key = {"name": "n", "term": "ok"}
report = ReportDictField(text="OK", key=my_key).save() report = Report(text="OK", key=my_key).save()
self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}},
report.to_mongo()) report.to_mongo())
self.assertEqual(report, Report.objects.get(pk=my_key))
# We can't directly call ReportDictField.objects.get(pk=my_key),
# because dicts are unordered, and if the order in MongoDB is
# different than the one in `my_key`, this test will fail.
self.assertEqual(report, ReportDictField.objects.get(pk__name=my_key['name']))
self.assertEqual(report, ReportDictField.objects.get(pk__term=my_key['term']))
def test_string_indexes(self): def test_string_indexes(self):
@@ -860,20 +750,6 @@ class IndexesTest(unittest.TestCase):
self.assertTrue([('provider_ids.foo', 1)] in info) self.assertTrue([('provider_ids.foo', 1)] in info)
self.assertTrue([('provider_ids.bar', 1)] in info) self.assertTrue([('provider_ids.bar', 1)] in info)
def test_sparse_compound_indexes(self):
class MyDoc(Document):
provider_ids = DictField()
meta = {
"indexes": [{'fields': ("provider_ids.foo", "provider_ids.bar"),
'sparse': True}],
}
info = MyDoc.objects._collection.index_information()
self.assertEqual([('provider_ids.foo', 1), ('provider_ids.bar', 1)],
info['provider_ids.foo_1_provider_ids.bar_1']['key'])
self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse'])
def test_text_indexes(self): def test_text_indexes(self):
class Book(Document): class Book(Document):
@@ -887,141 +763,6 @@ class IndexesTest(unittest.TestCase):
key = indexes["title_text"]["key"] key = indexes["title_text"]["key"]
self.assertTrue(('_fts', 'text') in key) self.assertTrue(('_fts', 'text') in key)
def test_hashed_indexes(self):
class Book(Document):
ref_id = StringField()
meta = {
"indexes": ["#ref_id"],
}
indexes = Book.objects._collection.index_information()
self.assertTrue("ref_id_hashed" in indexes)
self.assertTrue(('ref_id', 'hashed') in indexes["ref_id_hashed"]["key"])
def test_indexes_after_database_drop(self):
"""
Test to ensure that indexes are re-created on a collection even
after the database has been dropped.
Issue #812
"""
# Use a new connection and database since dropping the database could
# cause concurrent tests to fail.
connection = connect(db='tempdatabase',
alias='test_indexes_after_database_drop')
class BlogPost(Document):
title = StringField()
slug = StringField(unique=True)
meta = {'db_alias': 'test_indexes_after_database_drop'}
try:
BlogPost.drop_collection()
# Create Post #1
post1 = BlogPost(title='test1', slug='test')
post1.save()
# Drop the Database
connection.drop_database('tempdatabase')
# Re-create Post #1
post1 = BlogPost(title='test1', slug='test')
post1.save()
# Create Post #2
post2 = BlogPost(title='test2', slug='test')
self.assertRaises(NotUniqueError, post2.save)
finally:
# Drop the temporary database at the end
connection.drop_database('tempdatabase')
def test_index_dont_send_cls_option(self):
"""
Ensure that 'cls' option is not sent through ensureIndex. We shouldn't
send internal MongoEngine arguments that are not a part of the index
spec.
This is directly related to the fact that MongoDB doesn't validate the
options that are passed to ensureIndex. For more details, see:
https://jira.mongodb.org/browse/SERVER-769
"""
class TestDoc(Document):
txt = StringField()
meta = {
'allow_inheritance': True,
'indexes': [
{'fields': ('txt',), 'cls': False}
]
}
class TestChildDoc(TestDoc):
txt2 = StringField()
meta = {
'indexes': [
{'fields': ('txt2',), 'cls': False}
]
}
TestDoc.drop_collection()
TestDoc.ensure_indexes()
TestChildDoc.ensure_indexes()
index_info = TestDoc._get_collection().index_information()
for key in index_info:
del index_info[key]['v'] # drop the index version - we don't care about that here
if 'ns' in index_info[key]:
del index_info[key]['ns'] # drop the index namespace - we don't care about that here, MongoDB 3+
if 'dropDups' in index_info[key]:
del index_info[key]['dropDups'] # drop the index dropDups - it is deprecated in MongoDB 3+
self.assertEqual(index_info, {
'txt_1': {
'key': [('txt', 1)],
'background': False
},
'_id_': {
'key': [('_id', 1)],
},
'txt2_1': {
'key': [('txt2', 1)],
'background': False
},
'_cls_1': {
'key': [('_cls', 1)],
'background': False,
}
})
def test_compound_index_underscore_cls_not_overwritten(self):
"""
Test that the compound index doesn't get another _cls when it is specified
"""
class TestDoc(Document):
shard_1 = StringField()
txt_1 = StringField()
meta = {
'collection': 'test',
'allow_inheritance': True,
'sparse': True,
'shard_key': 'shard_1',
'indexes': [
('shard_1', '_cls', 'txt_1'),
]
}
TestDoc.drop_collection()
TestDoc.ensure_indexes()
index_info = TestDoc._get_collection().index_information()
self.assertTrue('shard_1_1__cls_1_txt_1_1' in index_info)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -307,69 +307,6 @@ class InheritanceTest(unittest.TestCase):
doc = Animal(name='dog') doc = Animal(name='dog')
self.assertFalse('_cls' in doc.to_mongo()) self.assertFalse('_cls' in doc.to_mongo())
def test_abstract_handle_ids_in_metaclass_properly(self):
class City(Document):
continent = StringField()
meta = {'abstract': True,
'allow_inheritance': False}
class EuropeanCity(City):
name = StringField()
berlin = EuropeanCity(name='Berlin', continent='Europe')
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._fields_ordered), 3)
self.assertEqual(berlin._fields_ordered[0], 'id')
def test_auto_id_not_set_if_specific_in_parent_class(self):
class City(Document):
continent = StringField()
city_id = IntField(primary_key=True)
meta = {'abstract': True,
'allow_inheritance': False}
class EuropeanCity(City):
name = StringField()
berlin = EuropeanCity(name='Berlin', continent='Europe')
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._fields_ordered), 3)
self.assertEqual(berlin._fields_ordered[0], 'city_id')
def test_auto_id_vs_non_pk_id_field(self):
class City(Document):
continent = StringField()
id = IntField()
meta = {'abstract': True,
'allow_inheritance': False}
class EuropeanCity(City):
name = StringField()
berlin = EuropeanCity(name='Berlin', continent='Europe')
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._fields_ordered), 4)
self.assertEqual(berlin._fields_ordered[0], 'auto_id_0')
berlin.save()
self.assertEqual(berlin.pk, berlin.auto_id_0)
def test_abstract_document_creation_does_not_fail(self):
class City(Document):
continent = StringField()
meta = {'abstract': True,
'allow_inheritance': False}
bkk = City(continent='asia')
self.assertEqual(None, bkk.pk)
# TODO: expected error? Shouldn't we create a new error type?
self.assertRaises(KeyError, lambda: setattr(bkk, 'pk', 1))
def test_allow_inheritance_embedded_document(self): def test_allow_inheritance_embedded_document(self):
"""Ensure embedded documents respect inheritance """Ensure embedded documents respect inheritance
""" """
@@ -411,7 +348,7 @@ class InheritanceTest(unittest.TestCase):
try: try:
class MyDocument(DateCreatedDocument, DateUpdatedDocument): class MyDocument(DateCreatedDocument, DateUpdatedDocument):
pass pass
except Exception: except:
self.assertTrue(False, "Couldn't create MyDocument class") self.assertTrue(False, "Couldn't create MyDocument class")
def test_abstract_documents(self): def test_abstract_documents(self):
@@ -460,16 +397,6 @@ class InheritanceTest(unittest.TestCase):
meta = {'abstract': True} meta = {'abstract': True}
self.assertRaises(ValueError, create_bad_abstract) self.assertRaises(ValueError, create_bad_abstract)
def test_abstract_embedded_documents(self):
# 789: EmbeddedDocument shouldn't inherit abstract
class A(EmbeddedDocument):
meta = {"abstract": True}
class B(A):
pass
self.assertFalse(B._meta["abstract"])
def test_inherited_collections(self): def test_inherited_collections(self):
"""Ensure that subclassed documents don't override parents' """Ensure that subclassed documents don't override parents'
collections collections

File diff suppressed because it is too large Load Diff

View File

@@ -51,10 +51,6 @@ class TestJson(unittest.TestCase):
string = StringField() string = StringField()
embedded_field = EmbeddedDocumentField(Embedded) embedded_field = EmbeddedDocumentField(Embedded)
def __eq__(self, other):
return (self.string == other.string and
self.embedded_field == other.embedded_field)
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
doc_json = doc.to_json(sort_keys=True, separators=(',', ':')) doc_json = doc.to_json(sort_keys=True, separators=(',', ':'))
@@ -103,10 +99,6 @@ class TestJson(unittest.TestCase):
generic_embedded_document_field = GenericEmbeddedDocumentField( generic_embedded_document_field = GenericEmbeddedDocumentField(
default=lambda: EmbeddedDoc()) default=lambda: EmbeddedDoc())
def __eq__(self, other):
import json
return json.loads(self.to_json()) == json.loads(other.to_json())
doc = Doc() doc = Doc()
self.assertEqual(doc, Doc.from_json(doc.to_json())) self.assertEqual(doc, Doc.from_json(doc.to_json()))

View File

@@ -165,53 +165,6 @@ class ValidatorErrorTest(unittest.TestCase):
self.assertRaises(ValidationError, lambda: d2.validate()) self.assertRaises(ValidationError, lambda: d2.validate())
def test_parent_reference_in_child_document(self):
"""
Test to ensure a ReferenceField can store a reference to a parent
class when inherited. Issue #954.
"""
class Parent(Document):
meta = {'allow_inheritance': True}
reference = ReferenceField('self')
class Child(Parent):
pass
parent = Parent()
parent.save()
child = Child(reference=parent)
# Saving child should not raise a ValidationError
try:
child.save()
except ValidationError as e:
self.fail("ValidationError raised: %s" % e.message)
def test_parent_reference_set_as_attribute_in_child_document(self):
"""
Test to ensure a ReferenceField can store a reference to a parent
class when inherited and when set via attribute. Issue #954.
"""
class Parent(Document):
meta = {'allow_inheritance': True}
reference = ReferenceField('self')
class Child(Parent):
pass
parent = Parent()
parent.save()
child = Child()
child.reference = parent
# Saving the child should not raise a ValidationError
try:
child.save()
except ValidationError as e:
self.fail("ValidationError raised: %s" % e.message)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -1,3 +1,3 @@
from fields import * from fields import *
from file_tests import * from file_tests import *
from geo import * from geo import *

File diff suppressed because it is too large Load Diff

View File

@@ -12,7 +12,7 @@ import gridfs
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
from mongoengine.python_support import b, StringIO from mongoengine.python_support import PY3, b, StringIO
try: try:
from PIL import Image from PIL import Image
@@ -112,43 +112,7 @@ class FileTest(unittest.TestCase):
result.the_file.delete() result.the_file.delete()
# Ensure deleted file returns None # Ensure deleted file returns None
self.assertTrue(result.the_file.read() is None) self.assertTrue(result.the_file.read() == None)
def test_file_fields_stream_after_none(self):
"""Ensure that a file field can be written to after it has been saved as
None
"""
class StreamFile(Document):
the_file = FileField()
StreamFile.drop_collection()
text = b('Hello, World!')
more_text = b('Foo Bar')
content_type = 'text/plain'
streamfile = StreamFile()
streamfile.save()
streamfile.the_file.new_file()
streamfile.the_file.write(text)
streamfile.the_file.write(more_text)
streamfile.the_file.close()
streamfile.save()
result = StreamFile.objects.first()
self.assertTrue(streamfile == result)
self.assertEqual(result.the_file.read(), text + more_text)
# self.assertEqual(result.the_file.content_type, content_type)
result.the_file.seek(0)
self.assertEqual(result.the_file.tell(), 0)
self.assertEqual(result.the_file.read(len(text)), text)
self.assertEqual(result.the_file.tell(), len(text))
self.assertEqual(result.the_file.read(len(more_text)), more_text)
self.assertEqual(result.the_file.tell(), len(text + more_text))
result.the_file.delete()
# Ensure deleted file returns None
self.assertTrue(result.the_file.read() is None)
def test_file_fields_set(self): def test_file_fields_set(self):
@@ -297,71 +261,6 @@ class FileTest(unittest.TestCase):
test_file = TestFile() test_file = TestFile()
self.assertFalse(test_file.the_file in [{"test": 1}]) self.assertFalse(test_file.the_file in [{"test": 1}])
def test_file_disk_space(self):
""" Test disk space usage when we delete/replace a file """
class TestFile(Document):
the_file = FileField()
text = b('Hello, World!')
content_type = 'text/plain'
testfile = TestFile()
testfile.the_file.put(text, content_type=content_type, filename="hello")
testfile.save()
# Now check fs.files and fs.chunks
db = TestFile._get_db()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 1)
self.assertEquals(len(list(chunks)), 1)
# Deleting the docoument should delete the files
testfile.delete()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 0)
self.assertEquals(len(list(chunks)), 0)
# Test case where we don't store a file in the first place
testfile = TestFile()
testfile.save()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 0)
self.assertEquals(len(list(chunks)), 0)
testfile.delete()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 0)
self.assertEquals(len(list(chunks)), 0)
# Test case where we overwrite the file
testfile = TestFile()
testfile.the_file.put(text, content_type=content_type, filename="hello")
testfile.save()
text = b('Bonjour, World!')
testfile.the_file.replace(text, content_type=content_type, filename="hello")
testfile.save()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 1)
self.assertEquals(len(list(chunks)), 1)
testfile.delete()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 0)
self.assertEquals(len(list(chunks)), 0)
def test_image_field(self): def test_image_field(self):
if not HAS_PIL: if not HAS_PIL:
raise SkipTest('PIL not installed') raise SkipTest('PIL not installed')

View File

@@ -115,7 +115,7 @@ class GeoFieldTest(unittest.TestCase):
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0]) expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
self._test_for_expected_error(Location, coord, expected) self._test_for_expected_error(Location, coord, expected)
Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate() Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate()
def test_polygon_validation(self): def test_polygon_validation(self):
class Location(Document): class Location(Document):
@@ -226,7 +226,7 @@ class GeoFieldTest(unittest.TestCase):
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0]) expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
self._test_for_expected_error(Location, coord, expected) self._test_for_expected_error(Location, coord, expected)
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate() Location(loc=[[[1, 2], [3, 4], [5, 6], [1,2]]]).validate()
def test_multipolygon_validation(self): def test_multipolygon_validation(self):
class Location(Document): class Location(Document):
@@ -336,11 +336,12 @@ class GeoFieldTest(unittest.TestCase):
Location.drop_collection() Location.drop_collection()
Parent.drop_collection() Parent.drop_collection()
Parent(name='Berlin').save() list(Parent.objects)
info = Parent._get_collection().index_information()
collection = Parent._get_collection()
info = collection.index_information()
self.assertFalse('location_2d' in info) self.assertFalse('location_2d' in info)
info = Location._get_collection().index_information()
self.assertTrue('location_2d' in info)
self.assertEqual(len(Parent._geo_indices()), 0) self.assertEqual(len(Parent._geo_indices()), 0)
self.assertEqual(len(Location._geo_indices()), 1) self.assertEqual(len(Location._geo_indices()), 1)

View File

@@ -17,16 +17,7 @@ class PickleTest(Document):
photo = FileField() photo = FileField()
class NewDocumentPickleTest(Document): class PickleDyanmicEmbedded(DynamicEmbeddedDocument):
number = IntField()
string = StringField(choices=(('One', '1'), ('Two', '2')))
embedded = EmbeddedDocumentField(PickleEmbedded)
lists = ListField(StringField())
photo = FileField()
new_field = StringField()
class PickleDynamicEmbedded(DynamicEmbeddedDocument):
date = DateTimeField(default=datetime.now) date = DateTimeField(default=datetime.now)

View File

@@ -1,11 +1,8 @@
import unittest
from convert_to_new_inheritance_model import * from convert_to_new_inheritance_model import *
from decimalfield_as_float import * from decimalfield_as_float import *
from referencefield_dbref_to_object_id import * from refrencefield_dbref_to_object_id import *
from turn_off_inheritance import * from turn_off_inheritance import *
from uuidfield_to_binary import * from uuidfield_to_binary import *
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -1,15 +1,11 @@
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]
import unittest import unittest
from datetime import datetime, timedelta from datetime import datetime, timedelta
from pymongo.errors import OperationFailure
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_connection
from nose.plugins.skip import SkipTest
from nose.plugins.skip import SkipTest
__all__ = ("GeoQueriesTest",) __all__ = ("GeoQueriesTest",)
@@ -70,16 +66,6 @@ class GeoQueriesTest(unittest.TestCase):
self.assertEqual(events.count(), 1) self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2) self.assertEqual(events[0], event2)
# find events at least 10 degrees away of san francisco
point = [-122.415579, 37.7566023]
events = Event.objects(location__near=point, location__min_distance=10)
# The following real test passes on MongoDB 3 but minDistance seems
# buggy on older MongoDB versions
if get_connection().server_info()['versionArray'][0] > 2:
self.assertEqual(events.count(), 2)
else:
self.assertTrue(events.count() >= 2)
# find events within 10 degrees of san francisco # find events within 10 degrees of san francisco
point_and_distance = [[-122.415579, 37.7566023], 10] point_and_distance = [[-122.415579, 37.7566023], 10]
events = Event.objects(location__within_distance=point_and_distance) events = Event.objects(location__within_distance=point_and_distance)
@@ -155,13 +141,7 @@ class GeoQueriesTest(unittest.TestCase):
def test_spherical_geospatial_operators(self): def test_spherical_geospatial_operators(self):
"""Ensure that spherical geospatial queries are working """Ensure that spherical geospatial queries are working
""" """
# Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039 raise SkipTest("https://jira.mongodb.org/browse/SERVER-14039")
connection = get_connection()
info = connection.test.command('buildInfo')
mongodb_version = tuple([int(i) for i in info['version'].split('.')])
if mongodb_version < (2, 6, 4):
raise SkipTest("Need MongoDB version 2.6.4+")
class Point(Document): class Point(Document):
location = GeoPointField() location = GeoPointField()
@@ -181,7 +161,7 @@ class GeoQueriesTest(unittest.TestCase):
# Same behavior for _within_spherical_distance # Same behavior for _within_spherical_distance
points = Point.objects( points = Point.objects(
location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius] location__within_spherical_distance=[[-122, 37.5], 60/earth_radius]
) )
self.assertEqual(points.count(), 2) self.assertEqual(points.count(), 2)
@@ -189,24 +169,6 @@ class GeoQueriesTest(unittest.TestCase):
location__max_distance=60 / earth_radius) location__max_distance=60 / earth_radius)
self.assertEqual(points.count(), 2) self.assertEqual(points.count(), 2)
# Test query works with max_distance, being farer from one point
points = Point.objects(location__near_sphere=[-122, 37.8],
location__max_distance=60 / earth_radius)
close_point = points.first()
self.assertEqual(points.count(), 1)
# Test query works with min_distance, being farer from one point
points = Point.objects(location__near_sphere=[-122, 37.8],
location__min_distance=60 / earth_radius)
# The following real test passes on MongoDB 3 but minDistance seems
# buggy on older MongoDB versions
if get_connection().server_info()['versionArray'][0] > 2:
self.assertEqual(points.count(), 1)
far_point = points.first()
self.assertNotEqual(close_point, far_point)
else:
self.assertTrue(points.count() >= 1)
# Finds both points, but orders the north point first because it's # Finds both points, but orders the north point first because it's
# closer to the reference point to the north. # closer to the reference point to the north.
points = Point.objects(location__near_sphere=[-122, 38.5]) points = Point.objects(location__near_sphere=[-122, 38.5])
@@ -289,20 +251,6 @@ class GeoQueriesTest(unittest.TestCase):
self.assertEqual(events.count(), 2) self.assertEqual(events.count(), 2)
self.assertEqual(events[0], event3) self.assertEqual(events[0], event3)
# ensure min_distance and max_distance combine well
events = Event.objects(location__near=[-87.67892, 41.9120459],
location__min_distance=1000,
location__max_distance=10000).order_by("-date")
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event3)
# ensure ordering is respected by "near"
events = Event.objects(location__near=[-87.67892, 41.9120459],
# location__min_distance=10000
location__min_distance=10000).order_by("-date")
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# check that within_box works # check that within_box works
box = [(-125.0, 35.0), (-100.0, 40.0)] box = [(-125.0, 35.0), (-100.0, 40.0)]
events = Event.objects(location__geo_within_box=box) events = Event.objects(location__geo_within_box=box)

View File

@@ -1,78 +0,0 @@
import pickle
import unittest
from pymongo.mongo_client import MongoClient
from mongoengine import Document, StringField, IntField
from mongoengine.connection import connect
__author__ = 'stas'
class Person(Document):
name = StringField()
age = IntField()
class TestQuerysetPickable(unittest.TestCase):
"""
Test for adding pickling support for QuerySet instances
See issue https://github.com/MongoEngine/mongoengine/issues/442
"""
def setUp(self):
super(TestQuerysetPickable, self).setUp()
connection = connect(db="test") #type: pymongo.mongo_client.MongoClient
connection.drop_database("test")
self.john = Person.objects.create(
name="John",
age=21
)
def test_picke_simple_qs(self):
qs = Person.objects.all()
pickle.dumps(qs)
def _get_loaded(self, qs):
s = pickle.dumps(qs)
return pickle.loads(s)
def test_unpickle(self):
qs = Person.objects.all()
loadedQs = self._get_loaded(qs)
self.assertEqual(qs.count(), loadedQs.count())
#can update loadedQs
loadedQs.update(age=23)
#check
self.assertEqual(Person.objects.first().age, 23)
def test_pickle_support_filtration(self):
Person.objects.create(
name="Alice",
age=22
)
Person.objects.create(
name="Bob",
age=23
)
qs = Person.objects.filter(age__gte=22)
self.assertEqual(qs.count(), 2)
loaded = self._get_loaded(qs)
self.assertEqual(loaded.count(), 2)
self.assertEqual(loaded.filter(name="Bob").first().age, 23)

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,11 @@
import sys
sys.path[0:0] = [""]
import unittest import unittest
from mongoengine import * from mongoengine import *
from mongoengine.queryset import Q, transform from mongoengine.queryset import Q
from mongoengine.queryset import transform
__all__ = ("TransformTest",) __all__ = ("TransformTest",)
@@ -37,8 +41,8 @@ class TransformTest(unittest.TestCase):
DicDoc.drop_collection() DicDoc.drop_collection()
Doc.drop_collection() Doc.drop_collection()
DicDoc().save()
doc = Doc().save() doc = Doc().save()
dic_doc = DicDoc().save()
for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")): for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")):
update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc})
@@ -51,6 +55,7 @@ class TransformTest(unittest.TestCase):
update = transform.update(DicDoc, pull__dictField__test=doc) update = transform.update(DicDoc, pull__dictField__test=doc)
self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict)) self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict))
def test_query_field_name(self): def test_query_field_name(self):
"""Ensure that the correct field name is used when querying. """Ensure that the correct field name is used when querying.
""" """
@@ -151,33 +156,26 @@ class TransformTest(unittest.TestCase):
class Doc(Document): class Doc(Document):
meta = {'allow_inheritance': False} meta = {'allow_inheritance': False}
raw_query = Doc.objects(__raw__={ raw_query = Doc.objects(__raw__={'deleted': False,
'deleted': False, 'scraped': 'yes',
'scraped': 'yes', '$nor': [{'views.extracted': 'no'},
'$nor': [ {'attachments.views.extracted':'no'}]
{'views.extracted': 'no'}, })._query
{'attachments.views.extracted': 'no'}
]
})._query
self.assertEqual(raw_query, { expected = {'deleted': False, 'scraped': 'yes',
'deleted': False, '$nor': [{'views.extracted': 'no'},
'scraped': 'yes', {'attachments.views.extracted': 'no'}]}
'$nor': [ self.assertEqual(expected, raw_query)
{'views.extracted': 'no'},
{'attachments.views.extracted': 'no'}
]
})
def test_geojson_PointField(self): def test_geojson_PointField(self):
class Location(Document): class Location(Document):
loc = PointField() loc = PointField()
update = transform.update(Location, set__loc=[1, 2]) update = transform.update(Location, set__loc=[1, 2])
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}}) self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1,2]}}})
update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1, 2]}) update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1,2]})
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}}) self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1,2]}}})
def test_geojson_LineStringField(self): def test_geojson_LineStringField(self):
class Location(Document): class Location(Document):
@@ -199,47 +197,5 @@ class TransformTest(unittest.TestCase):
update = transform.update(Location, set__poly={"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) update = transform.update(Location, set__poly={"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}}) self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}})
def test_type(self):
class Doc(Document):
df = DynamicField()
Doc(df=True).save()
Doc(df=7).save()
Doc(df="df").save()
self.assertEqual(Doc.objects(df__type=1).count(), 0) # double
self.assertEqual(Doc.objects(df__type=8).count(), 1) # bool
self.assertEqual(Doc.objects(df__type=2).count(), 1) # str
self.assertEqual(Doc.objects(df__type=16).count(), 1) # int
def test_last_field_name_like_operator(self):
class EmbeddedItem(EmbeddedDocument):
type = StringField()
name = StringField()
class Doc(Document):
item = EmbeddedDocumentField(EmbeddedItem)
Doc.drop_collection()
doc = Doc(item=EmbeddedItem(type="axe", name="Heroic axe"))
doc.save()
self.assertEqual(1, Doc.objects(item__type__="axe").count())
self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count())
Doc.objects(id=doc.id).update(set__item__type__='sword')
self.assertEqual(1, Doc.objects(item__type__="sword").count())
self.assertEqual(0, Doc.objects(item__type__="axe").count())
def test_understandable_error_raised(self):
class Event(Document):
title = StringField()
location = GeoPointField()
box = [(35.0, -125.0), (40.0, -100.0)]
# I *meant* to execute location__within_box=box
events = Event.objects(location__within=box)
self.assertRaises(InvalidQueryError, lambda: events.count())
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -1,12 +1,14 @@
import datetime import sys
import re sys.path[0:0] = [""]
import unittest import unittest
from bson import ObjectId from bson import ObjectId
from datetime import datetime
from mongoengine import * from mongoengine import *
from mongoengine.errors import InvalidQueryError
from mongoengine.queryset import Q from mongoengine.queryset import Q
from mongoengine.errors import InvalidQueryError
__all__ = ("QTest",) __all__ = ("QTest",)
@@ -130,12 +132,12 @@ class QTest(unittest.TestCase):
TestDoc(x=10).save() TestDoc(x=10).save()
TestDoc(y=True).save() TestDoc(y=True).save()
self.assertEqual(query, { self.assertEqual(query,
'$and': [ {'$and': [
{'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]}, {'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]},
{'$or': [{'x': {'$lt': 100}}, {'y': True}]} {'$or': [{'x': {'$lt': 100}}, {'y': True}]}
] ]})
})
self.assertEqual(2, TestDoc.objects(q1 & q2).count()) self.assertEqual(2, TestDoc.objects(q1 & q2).count())
def test_or_and_or_combination(self): def test_or_and_or_combination(self):
@@ -155,14 +157,15 @@ class QTest(unittest.TestCase):
q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False))) q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False)))
query = (q1 | q2).to_query(TestDoc) query = (q1 | q2).to_query(TestDoc)
self.assertEqual(query, { self.assertEqual(query,
'$or': [ {'$or': [
{'$and': [{'x': {'$gt': 0}}, {'$and': [{'x': {'$gt': 0}},
{'$or': [{'y': True}, {'y': {'$exists': False}}]}]}, {'$or': [{'y': True}, {'y': {'$exists': False}}]}]},
{'$and': [{'x': {'$lt': 100}}, {'$and': [{'x': {'$lt': 100}},
{'$or': [{'y': False}, {'y': {'$exists': False}}]}]} {'$or': [{'y': False}, {'y': {'$exists': False}}]}]}
] ]}
}) )
self.assertEqual(2, TestDoc.objects(q1 | q2).count()) self.assertEqual(2, TestDoc.objects(q1 | q2).count())
def test_multiple_occurence_in_field(self): def test_multiple_occurence_in_field(self):
@@ -212,19 +215,19 @@ class QTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
post1 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 8), published=False) post1 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 8), published=False)
post1.save() post1.save()
post2 = BlogPost(title='Test 2', publish_date=datetime.datetime(2010, 1, 15), published=True) post2 = BlogPost(title='Test 2', publish_date=datetime(2010, 1, 15), published=True)
post2.save() post2.save()
post3 = BlogPost(title='Test 3', published=True) post3 = BlogPost(title='Test 3', published=True)
post3.save() post3.save()
post4 = BlogPost(title='Test 4', publish_date=datetime.datetime(2010, 1, 8)) post4 = BlogPost(title='Test 4', publish_date=datetime(2010, 1, 8))
post4.save() post4.save()
post5 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 15)) post5 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 15))
post5.save() post5.save()
post6 = BlogPost(title='Test 1', published=False) post6 = BlogPost(title='Test 1', published=False)
@@ -247,7 +250,7 @@ class QTest(unittest.TestCase):
self.assertTrue(all(obj.id in posts for obj in published_posts)) self.assertTrue(all(obj.id in posts for obj in published_posts))
# Check Q object combination # Check Q object combination
date = datetime.datetime(2010, 1, 10) date = datetime(2010, 1, 10)
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
posts = [post.id for post in q] posts = [post.id for post in q]
@@ -270,10 +273,8 @@ class QTest(unittest.TestCase):
# Test invalid query objs # Test invalid query objs
def wrong_query_objs(): def wrong_query_objs():
self.Person.objects('user1') self.Person.objects('user1')
def wrong_query_objs_filter(): def wrong_query_objs_filter():
self.Person.objects('user1') self.Person.objects('user1')
self.assertRaises(InvalidQueryError, wrong_query_objs) self.assertRaises(InvalidQueryError, wrong_query_objs)
self.assertRaises(InvalidQueryError, wrong_query_objs_filter) self.assertRaises(InvalidQueryError, wrong_query_objs_filter)
@@ -283,6 +284,7 @@ class QTest(unittest.TestCase):
person = self.Person(name='Guido van Rossum') person = self.Person(name='Guido van Rossum')
person.save() person.save()
import re
obj = self.Person.objects(Q(name=re.compile('^Gui'))).first() obj = self.Person.objects(Q(name=re.compile('^Gui'))).first()
self.assertEqual(obj, person) self.assertEqual(obj, person)
obj = self.Person.objects(Q(name=re.compile('^gui'))).first() obj = self.Person.objects(Q(name=re.compile('^gui'))).first()

View File

@@ -1,34 +1,21 @@
import sys import sys
import datetime
from pymongo.errors import OperationFailure
sys.path[0:0] = [""] sys.path[0:0] = [""]
try: try:
import unittest2 as unittest import unittest2 as unittest
except ImportError: except ImportError:
import unittest import unittest
from nose.plugins.skip import SkipTest
import datetime
import pymongo import pymongo
from bson.tz_util import utc from bson.tz_util import utc
from mongoengine import ( from mongoengine import *
connect, register_connection,
Document, DateTimeField
)
from mongoengine.python_support import IS_PYMONGO_3
import mongoengine.connection import mongoengine.connection
from mongoengine.connection import get_db, get_connection, ConnectionError from mongoengine.connection import get_db, get_connection, ConnectionError
def get_tz_awareness(connection):
if not IS_PYMONGO_3:
return connection.tz_aware
else:
return connection.codec_options.tz_aware
class ConnectionTest(unittest.TestCase): class ConnectionTest(unittest.TestCase):
def tearDown(self): def tearDown(self):
@@ -52,99 +39,15 @@ class ConnectionTest(unittest.TestCase):
conn = get_connection('testdb') conn = get_connection('testdb')
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
def test_connect_in_mocking(self):
"""Ensure that the connect() method works properly in mocking.
"""
try:
import mongomock
except ImportError:
raise SkipTest('you need mongomock installed to run this testcase')
connect('mongoenginetest', host='mongomock://localhost')
conn = get_connection()
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2')
conn = get_connection('testdb2')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3')
conn = get_connection('testdb3')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect('mongoenginetest4', is_mock=True, alias='testdb4')
conn = get_connection('testdb4')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5')
conn = get_connection('testdb5')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6')
conn = get_connection('testdb6')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7')
conn = get_connection('testdb7')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
def test_connect_with_host_list(self):
"""Ensure that the connect() method works when host is a list
Uses mongomock to test w/o needing multiple mongod/mongos processes
"""
try:
import mongomock
except ImportError:
raise SkipTest('you need mongomock installed to run this testcase')
connect(host=['mongomock://localhost'])
conn = get_connection()
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2')
conn = get_connection('testdb2')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host=['localhost'], is_mock=True, alias='testdb3')
conn = get_connection('testdb3')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4')
conn = get_connection('testdb4')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5')
conn = get_connection('testdb5')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6')
conn = get_connection('testdb6')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
def test_disconnect(self):
"""Ensure that the disconnect() method works properly
"""
conn1 = connect('mongoenginetest')
mongoengine.connection.disconnect()
conn2 = connect('mongoenginetest')
self.assertTrue(conn1 is not conn2)
def test_sharing_connections(self): def test_sharing_connections(self):
"""Ensure that connections are shared when the connection settings are exactly the same """Ensure that connections are shared when the connection settings are exactly the same
""" """
connect('mongoenginetests', alias='testdb1') connect('mongoenginetest', alias='testdb1')
expected_connection = get_connection('testdb1') expected_connection = get_connection('testdb1')
connect('mongoenginetests', alias='testdb2') connect('mongoenginetest', alias='testdb2')
actual_connection = get_connection('testdb2') actual_connection = get_connection('testdb2')
# Handle PyMongo 3+ Async Connection
if IS_PYMONGO_3:
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
# Purposely not catching exception to fail test if thrown.
expected_connection.server_info()
self.assertEqual(expected_connection, actual_connection) self.assertEqual(expected_connection, actual_connection)
def test_connect_uri(self): def test_connect_uri(self):
@@ -158,8 +61,7 @@ class ConnectionTest(unittest.TestCase):
c.admin.authenticate("admin", "password") c.admin.authenticate("admin", "password")
c.mongoenginetest.add_user("username", "password") c.mongoenginetest.add_user("username", "password")
if not IS_PYMONGO_3: self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
@@ -174,7 +76,8 @@ class ConnectionTest(unittest.TestCase):
c.mongoenginetest.system.users.remove({}) c.mongoenginetest.system.users.remove({})
def test_connect_uri_without_db(self): def test_connect_uri_without_db(self):
"""Ensure connect() method works properly with uri's without database_name """Ensure that the connect() method works properly with uri's
without database_name
""" """
c = connect(db='mongoenginetest', alias='admin') c = connect(db='mongoenginetest', alias='admin')
c.admin.system.users.remove({}) c.admin.system.users.remove({})
@@ -184,8 +87,7 @@ class ConnectionTest(unittest.TestCase):
c.admin.authenticate("admin", "password") c.admin.authenticate("admin", "password")
c.mongoenginetest.add_user("username", "password") c.mongoenginetest.add_user("username", "password")
if not IS_PYMONGO_3: self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
connect("mongoenginetest", host='mongodb://localhost/') connect("mongoenginetest", host='mongodb://localhost/')
@@ -199,42 +101,6 @@ class ConnectionTest(unittest.TestCase):
c.admin.system.users.remove({}) c.admin.system.users.remove({})
c.mongoenginetest.system.users.remove({}) c.mongoenginetest.system.users.remove({})
def test_connect_uri_with_authsource(self):
"""Ensure that the connect() method works well with
the option `authSource` in URI.
This feature was introduced in MongoDB 2.4 and removed in 2.6
"""
# Create users
c = connect('mongoenginetest')
c.admin.system.users.remove({})
c.admin.add_user('username2', 'password')
# Authentication fails without "authSource"
if IS_PYMONGO_3:
test_conn = connect('mongoenginetest', alias='test1',
host='mongodb://username2:password@localhost/mongoenginetest')
self.assertRaises(OperationFailure, test_conn.server_info)
else:
self.assertRaises(
ConnectionError, connect, 'mongoenginetest', alias='test1',
host='mongodb://username2:password@localhost/mongoenginetest'
)
self.assertRaises(ConnectionError, get_db, 'test1')
# Authentication succeeds with "authSource"
connect(
'mongoenginetest', alias='test2',
host=('mongodb://username2:password@localhost/'
'mongoenginetest?authSource=admin')
)
# This will fail starting from MongoDB 2.6+
db = get_db('test2')
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest')
# Clear all users
c.admin.system.users.remove({})
def test_register_connection(self): def test_register_connection(self):
"""Ensure that connections with different aliases may be registered. """Ensure that connections with different aliases may be registered.
""" """
@@ -262,11 +128,11 @@ class ConnectionTest(unittest.TestCase):
connect('mongoenginetest', alias='t1', tz_aware=True) connect('mongoenginetest', alias='t1', tz_aware=True)
conn = get_connection('t1') conn = get_connection('t1')
self.assertTrue(get_tz_awareness(conn)) self.assertTrue(conn.tz_aware)
connect('mongoenginetest2', alias='t2') connect('mongoenginetest2', alias='t2')
conn = get_connection('t2') conn = get_connection('t2')
self.assertFalse(get_tz_awareness(conn)) self.assertFalse(conn.tz_aware)
def test_datetime(self): def test_datetime(self):
connect('mongoenginetest', tz_aware=True) connect('mongoenginetest', tz_aware=True)
@@ -290,17 +156,8 @@ class ConnectionTest(unittest.TestCase):
self.assertEqual(len(mongo_connections.items()), 2) self.assertEqual(len(mongo_connections.items()), 2)
self.assertTrue('t1' in mongo_connections.keys()) self.assertTrue('t1' in mongo_connections.keys())
self.assertTrue('t2' in mongo_connections.keys()) self.assertTrue('t2' in mongo_connections.keys())
if not IS_PYMONGO_3: self.assertEqual(mongo_connections['t1'].host, 'localhost')
self.assertEqual(mongo_connections['t1'].host, 'localhost') self.assertEqual(mongo_connections['t2'].host, '127.0.0.1')
self.assertEqual(mongo_connections['t2'].host, '127.0.0.1')
else:
# Handle PyMongo 3+ Async Connection
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
# Purposely not catching exception to fail test if thrown.
mongo_connections['t1'].server_info()
mongo_connections['t2'].server_info()
self.assertEqual(mongo_connections['t1'].address[0], 'localhost')
self.assertEqual(mongo_connections['t2'].address[0], '127.0.0.1')
if __name__ == '__main__': if __name__ == '__main__':

View File

@@ -1,14 +1,11 @@
import unittest import unittest
from mongoengine.base.datastructures import StrictDict, SemiStrictDict from mongoengine.base.datastructures import StrictDict, SemiStrictDict
class TestStrictDict(unittest.TestCase): class TestStrictDict(unittest.TestCase):
def strict_dict_class(self, *args, **kwargs): def strict_dict_class(self, *args, **kwargs):
return StrictDict.create(*args, **kwargs) return StrictDict.create(*args, **kwargs)
def setUp(self): def setUp(self):
self.dtype = self.strict_dict_class(("a", "b", "c")) self.dtype = self.strict_dict_class(("a", "b", "c"))
def test_init(self): def test_init(self):
d = self.dtype(a=1, b=1, c=1) d = self.dtype(a=1, b=1, c=1)
self.assertEqual((d.a, d.b, d.c), (1, 1, 1)) self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
@@ -41,9 +38,8 @@ class TestStrictDict(unittest.TestCase):
def test_setattr_raises_on_nonexisting_attr(self): def test_setattr_raises_on_nonexisting_attr(self):
d = self.dtype() d = self.dtype()
def _f(): def _f():
d.x = 1 d.x=1
self.assertRaises(AttributeError, _f) self.assertRaises(AttributeError, _f)
def test_setattr_getattr_special(self): def test_setattr_getattr_special(self):

View File

@@ -12,13 +12,9 @@ from mongoengine.context_managers import query_counter
class FieldTest(unittest.TestCase): class FieldTest(unittest.TestCase):
@classmethod def setUp(self):
def setUpClass(cls): connect(db='mongoenginetest')
cls.db = connect(db='mongoenginetest') self.db = get_db()
@classmethod
def tearDownClass(cls):
cls.db.drop_database('mongoenginetest')
def test_list_item_dereference(self): def test_list_item_dereference(self):
"""Ensure that DBRef items in ListFields are dereferenced. """Ensure that DBRef items in ListFields are dereferenced.
@@ -308,7 +304,6 @@ class FieldTest(unittest.TestCase):
User.drop_collection() User.drop_collection()
Post.drop_collection() Post.drop_collection()
SimpleList.drop_collection()
u1 = User.objects.create(name='u1') u1 = User.objects.create(name='u1')
u2 = User.objects.create(name='u2') u2 = User.objects.create(name='u2')
@@ -323,10 +318,6 @@ class FieldTest(unittest.TestCase):
def test_circular_reference(self): def test_circular_reference(self):
"""Ensure you can handle circular references """Ensure you can handle circular references
""" """
class Relation(EmbeddedDocument):
name = StringField()
person = ReferenceField('Person')
class Person(Document): class Person(Document):
name = StringField() name = StringField()
relations = ListField(EmbeddedDocumentField('Relation')) relations = ListField(EmbeddedDocumentField('Relation'))
@@ -334,6 +325,10 @@ class FieldTest(unittest.TestCase):
def __repr__(self): def __repr__(self):
return "<Person: %s>" % self.name return "<Person: %s>" % self.name
class Relation(EmbeddedDocument):
name = StringField()
person = ReferenceField('Person')
Person.drop_collection() Person.drop_collection()
mother = Person(name="Mother") mother = Person(name="Mother")
daughter = Person(name="Daughter") daughter = Person(name="Daughter")
@@ -952,8 +947,6 @@ class FieldTest(unittest.TestCase):
class Asset(Document): class Asset(Document):
name = StringField(max_length=250, required=True) name = StringField(max_length=250, required=True)
path = StringField()
title = StringField()
parent = GenericReferenceField(default=None) parent = GenericReferenceField(default=None)
parents = ListField(GenericReferenceField()) parents = ListField(GenericReferenceField())
children = ListField(GenericReferenceField()) children = ListField(GenericReferenceField())
@@ -1031,43 +1024,6 @@ class FieldTest(unittest.TestCase):
self.assertEqual(type(foo.bar), Bar) self.assertEqual(type(foo.bar), Bar)
self.assertEqual(type(foo.baz), Baz) self.assertEqual(type(foo.baz), Baz)
def test_document_reload_reference_integrity(self):
"""
Ensure reloading a document with multiple similar id
in different collections doesn't mix them.
"""
class Topic(Document):
id = IntField(primary_key=True)
class User(Document):
id = IntField(primary_key=True)
name = StringField()
class Message(Document):
id = IntField(primary_key=True)
topic = ReferenceField(Topic)
author = ReferenceField(User)
Topic.drop_collection()
User.drop_collection()
Message.drop_collection()
# All objects share the same id, but each in a different collection
topic = Topic(id=1).save()
user = User(id=1, name='user-name').save()
Message(id=1, topic=topic, author=user).save()
concurrent_change_user = User.objects.get(id=1)
concurrent_change_user.name = 'new-name'
concurrent_change_user.save()
self.assertNotEqual(user.name, 'new-name')
msg = Message.objects.get(id=1)
msg.reload()
self.assertEqual(msg.topic, topic)
self.assertEqual(msg.author, user)
self.assertEqual(msg.author.name, 'new-name')
def test_list_lookup_not_checked_in_map(self): def test_list_lookup_not_checked_in_map(self):
"""Ensure we dereference list data correctly """Ensure we dereference list data correctly
""" """
@@ -1264,15 +1220,14 @@ class FieldTest(unittest.TestCase):
self.assertEqual(page.tags[0], page.posts[0].tags[0]) self.assertEqual(page.tags[0], page.posts[0].tags[0])
def test_select_related_follows_embedded_referencefields(self): def test_select_related_follows_embedded_referencefields(self):
class Playlist(Document):
class Song(Document): items = ListField(EmbeddedDocumentField("PlaylistItem"))
title = StringField()
class PlaylistItem(EmbeddedDocument): class PlaylistItem(EmbeddedDocument):
song = ReferenceField("Song") song = ReferenceField("Song")
class Playlist(Document): class Song(Document):
items = ListField(EmbeddedDocumentField("PlaylistItem")) title = StringField()
Playlist.drop_collection() Playlist.drop_collection()
Song.drop_collection() Song.drop_collection()

308
tests/test_django.py Normal file
View File

@@ -0,0 +1,308 @@
import sys
sys.path[0:0] = [""]
import unittest
from nose.plugins.skip import SkipTest
from mongoengine import *
from mongoengine.django.shortcuts import get_document_or_404
import django
from django.http import Http404
from django.template import Context, Template
from django.conf import settings
from django.core.paginator import Paginator
settings.configure(
USE_TZ=True,
INSTALLED_APPS=('django.contrib.auth', 'mongoengine.django.mongo_auth'),
AUTH_USER_MODEL=('mongo_auth.MongoUser'),
AUTHENTICATION_BACKENDS = ('mongoengine.django.auth.MongoEngineBackend',)
)
# For Django >= 1.7
if hasattr(django, 'setup'):
django.setup()
try:
from django.contrib.auth import authenticate, get_user_model
from mongoengine.django.auth import User
from mongoengine.django.mongo_auth.models import (
MongoUser,
MongoUserManager,
get_user_document,
)
DJ15 = True
except Exception:
DJ15 = False
from django.contrib.sessions.tests import SessionTestsMixin
from mongoengine.django.sessions import SessionStore, MongoSession
from mongoengine.django.tests import MongoTestCase
from datetime import tzinfo, timedelta
ZERO = timedelta(0)
class FixedOffset(tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return ZERO
def activate_timezone(tz):
"""Activate Django timezone support if it is available.
"""
try:
from django.utils import timezone
timezone.deactivate()
timezone.activate(tz)
except ImportError:
pass
class QuerySetTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
class Person(Document):
name = StringField()
age = IntField()
self.Person = Person
def test_order_by_in_django_template(self):
"""Ensure that QuerySets are properly ordered in Django template.
"""
self.Person.drop_collection()
self.Person(name="A", age=20).save()
self.Person(name="D", age=10).save()
self.Person(name="B", age=40).save()
self.Person(name="C", age=30).save()
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
d = {"ol": self.Person.objects.order_by('-name')}
self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:')
d = {"ol": self.Person.objects.order_by('+name')}
self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:')
d = {"ol": self.Person.objects.order_by('-age')}
self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:')
d = {"ol": self.Person.objects.order_by('+age')}
self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:')
self.Person.drop_collection()
def test_q_object_filter_in_template(self):
self.Person.drop_collection()
self.Person(name="A", age=20).save()
self.Person(name="D", age=10).save()
self.Person(name="B", age=40).save()
self.Person(name="C", age=30).save()
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))}
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
# Check double rendering doesn't throw an error
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
def test_get_document_or_404(self):
p = self.Person(name="G404")
p.save()
self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
def test_pagination(self):
"""Ensure that Pagination works as expected
"""
class Page(Document):
name = StringField()
Page.drop_collection()
for i in xrange(1, 11):
Page(name=str(i)).save()
paginator = Paginator(Page.objects.all(), 2)
t = Template("{% for i in page.object_list %}{{ i.name }}:{% endfor %}")
for p in paginator.page_range:
d = {"page": paginator.page(p)}
end = p * 2
start = end - 1
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
def test_nested_queryset_template_iterator(self):
# Try iterating the same queryset twice, nested, in a Django template.
names = ['A', 'B', 'C', 'D']
class CustomUser(Document):
name = StringField()
def __unicode__(self):
return self.name
CustomUser.drop_collection()
for name in names:
CustomUser(name=name).save()
users = CustomUser.objects.all().order_by('name')
template = Template("{% for user in users %}{{ user.name }}{% ifequal forloop.counter 2 %} {% for inner_user in users %}{{ inner_user.name }}{% endfor %} {% endifequal %}{% endfor %}")
rendered = template.render(Context({'users': users}))
self.assertEqual(rendered, 'AB ABCD CD')
def test_filter(self):
"""Ensure that a queryset and filters work as expected
"""
class Note(Document):
text = StringField()
Note.drop_collection()
for i in xrange(1, 101):
Note(name="Note: %s" % i).save()
# Check the count
self.assertEqual(Note.objects.count(), 100)
# Get the first 10 and confirm
notes = Note.objects[:10]
self.assertEqual(notes.count(), 10)
# Test djangos template filters
# self.assertEqual(length(notes), 10)
t = Template("{{ notes.count }}")
c = Context({"notes": notes})
self.assertEqual(t.render(c), "10")
# Test with skip
notes = Note.objects.skip(90)
self.assertEqual(notes.count(), 10)
# Test djangos template filters
self.assertEqual(notes.count(), 10)
t = Template("{{ notes.count }}")
c = Context({"notes": notes})
self.assertEqual(t.render(c), "10")
# Test with limit
notes = Note.objects.skip(90)
self.assertEqual(notes.count(), 10)
# Test djangos template filters
self.assertEqual(notes.count(), 10)
t = Template("{{ notes.count }}")
c = Context({"notes": notes})
self.assertEqual(t.render(c), "10")
# Test with skip and limit
notes = Note.objects.skip(10).limit(10)
# Test djangos template filters
self.assertEqual(notes.count(), 10)
t = Template("{{ notes.count }}")
c = Context({"notes": notes})
self.assertEqual(t.render(c), "10")
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
backend = SessionStore
def setUp(self):
connect(db='mongoenginetest')
MongoSession.drop_collection()
super(MongoDBSessionTest, self).setUp()
def assertIn(self, first, second, msg=None):
self.assertTrue(first in second, msg)
def assertNotIn(self, first, second, msg=None):
self.assertFalse(first in second, msg)
def test_first_save(self):
session = SessionStore()
session['test'] = True
session.save()
self.assertTrue('test' in session)
def test_session_expiration_tz(self):
activate_timezone(FixedOffset(60, 'UTC+1'))
# create and save new session
session = SessionStore()
session.set_expiry(600) # expire in 600 seconds
session['test_expire'] = True
session.save()
# reload session with key
key = session.session_key
session = SessionStore(key)
self.assertTrue('test_expire' in session, 'Session has expired before it is expected')
class MongoAuthTest(unittest.TestCase):
user_data = {
'username': 'user',
'email': 'user@example.com',
'password': 'test',
}
def setUp(self):
if not DJ15:
raise SkipTest('mongo_auth requires Django 1.5')
connect(db='mongoenginetest')
User.drop_collection()
super(MongoAuthTest, self).setUp()
def test_get_user_model(self):
self.assertEqual(get_user_model(), MongoUser)
def test_get_user_document(self):
self.assertEqual(get_user_document(), User)
def test_user_manager(self):
manager = get_user_model()._default_manager
self.assertTrue(isinstance(manager, MongoUserManager))
def test_user_manager_exception(self):
manager = get_user_model()._default_manager
self.assertRaises(MongoUser.DoesNotExist, manager.get,
username='not found')
def test_create_user(self):
manager = get_user_model()._default_manager
user = manager.create_user(**self.user_data)
self.assertTrue(isinstance(user, User))
db_user = User.objects.get(username='user')
self.assertEqual(user.id, db_user.id)
def test_authenticate(self):
get_user_model()._default_manager.create_user(**self.user_data)
user = authenticate(username='user', password='fail')
self.assertEqual(None, user)
user = authenticate(username='user', password='test')
db_user = User.objects.get(username='user')
self.assertEqual(user.id, db_user.id)
class MongoTestCaseTest(MongoTestCase):
def test_mongo_test_case(self):
self.db.dummy_collection.insert({'collection': 'will be dropped'})
if __name__ == '__main__':
unittest.main()

47
tests/test_jinja.py Normal file
View File

@@ -0,0 +1,47 @@
import sys
sys.path[0:0] = [""]
import unittest
from mongoengine import *
import jinja2
class TemplateFilterTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
def test_jinja2(self):
env = jinja2.Environment()
class TestData(Document):
title = StringField()
description = StringField()
TestData.drop_collection()
examples = [('A', '1'),
('B', '2'),
('C', '3')]
for title, description in examples:
TestData(title=title, description=description).save()
tmpl = """
{%- for record in content -%}
{%- if loop.first -%}{ {%- endif -%}
"{{ record.title }}": "{{ record.description }}"
{%- if loop.last -%} }{%- else -%},{% endif -%}
{%- endfor -%}
"""
ctx = {'content': TestData.objects}
template = env.from_string(tmpl)
rendered = template.render(**ctx)
self.assertEqual('{"A": "1","B": "2","C": "3"}', rendered)
if __name__ == '__main__':
unittest.main()

View File

@@ -1,33 +1,17 @@
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]
import unittest import unittest
from pymongo import ReadPreference import pymongo
from pymongo import ReadPreference, ReplicaSetConnection
from mongoengine.python_support import IS_PYMONGO_3
if IS_PYMONGO_3:
from pymongo import MongoClient
CONN_CLASS = MongoClient
READ_PREF = ReadPreference.SECONDARY
else:
from pymongo import ReplicaSetConnection
CONN_CLASS = ReplicaSetConnection
READ_PREF = ReadPreference.SECONDARY_ONLY
import mongoengine import mongoengine
from mongoengine import * from mongoengine import *
from mongoengine.connection import ConnectionError from mongoengine.connection import get_db, get_connection, ConnectionError
class ConnectionTest(unittest.TestCase): class ConnectionTest(unittest.TestCase):
def setUp(self):
mongoengine.connection._connection_settings = {}
mongoengine.connection._connections = {}
mongoengine.connection._dbs = {}
def tearDown(self): def tearDown(self):
mongoengine.connection._connection_settings = {} mongoengine.connection._connection_settings = {}
mongoengine.connection._connections = {} mongoengine.connection._connections = {}
@@ -38,17 +22,14 @@ class ConnectionTest(unittest.TestCase):
""" """
try: try:
conn = connect(db='mongoenginetest', conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY)
host="mongodb://localhost/mongoenginetest?replicaSet=rs",
read_preference=READ_PREF)
except ConnectionError, e: except ConnectionError, e:
return return
if not isinstance(conn, CONN_CLASS): if not isinstance(conn, ReplicaSetConnection):
# really???
return return
self.assertEqual(conn.read_preference, READ_PREF) self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_ONLY)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -25,8 +25,6 @@ class SignalTests(unittest.TestCase):
connect(db='mongoenginetest') connect(db='mongoenginetest')
class Author(Document): class Author(Document):
# Make the id deterministic for easier testing
id = SequenceField(primary_key=True)
name = StringField() name = StringField()
def __unicode__(self): def __unicode__(self):
@@ -35,7 +33,7 @@ class SignalTests(unittest.TestCase):
@classmethod @classmethod
def pre_init(cls, sender, document, *args, **kwargs): def pre_init(cls, sender, document, *args, **kwargs):
signal_output.append('pre_init signal, %s' % cls.__name__) signal_output.append('pre_init signal, %s' % cls.__name__)
signal_output.append(kwargs['values']) signal_output.append(str(kwargs['values']))
@classmethod @classmethod
def post_init(cls, sender, document, **kwargs): def post_init(cls, sender, document, **kwargs):
@@ -45,55 +43,48 @@ class SignalTests(unittest.TestCase):
@classmethod @classmethod
def pre_save(cls, sender, document, **kwargs): def pre_save(cls, sender, document, **kwargs):
signal_output.append('pre_save signal, %s' % document) signal_output.append('pre_save signal, %s' % document)
signal_output.append(kwargs)
@classmethod @classmethod
def pre_save_post_validation(cls, sender, document, **kwargs): def pre_save_post_validation(cls, sender, document, **kwargs):
signal_output.append('pre_save_post_validation signal, %s' % document) signal_output.append('pre_save_post_validation signal, %s' % document)
if kwargs.pop('created', False): if 'created' in kwargs:
signal_output.append('Is created') if kwargs['created']:
else: signal_output.append('Is created')
signal_output.append('Is updated') else:
signal_output.append(kwargs) signal_output.append('Is updated')
@classmethod @classmethod
def post_save(cls, sender, document, **kwargs): def post_save(cls, sender, document, **kwargs):
dirty_keys = document._delta()[0].keys() + document._delta()[1].keys() dirty_keys = document._delta()[0].keys() + document._delta()[1].keys()
signal_output.append('post_save signal, %s' % document) signal_output.append('post_save signal, %s' % document)
signal_output.append('post_save dirty keys, %s' % dirty_keys) signal_output.append('post_save dirty keys, %s' % dirty_keys)
if kwargs.pop('created', False): if 'created' in kwargs:
signal_output.append('Is created') if kwargs['created']:
else: signal_output.append('Is created')
signal_output.append('Is updated') else:
signal_output.append(kwargs) signal_output.append('Is updated')
@classmethod @classmethod
def pre_delete(cls, sender, document, **kwargs): def pre_delete(cls, sender, document, **kwargs):
signal_output.append('pre_delete signal, %s' % document) signal_output.append('pre_delete signal, %s' % document)
signal_output.append(kwargs)
@classmethod @classmethod
def post_delete(cls, sender, document, **kwargs): def post_delete(cls, sender, document, **kwargs):
signal_output.append('post_delete signal, %s' % document) signal_output.append('post_delete signal, %s' % document)
signal_output.append(kwargs)
@classmethod @classmethod
def pre_bulk_insert(cls, sender, documents, **kwargs): def pre_bulk_insert(cls, sender, documents, **kwargs):
signal_output.append('pre_bulk_insert signal, %s' % documents) signal_output.append('pre_bulk_insert signal, %s' % documents)
signal_output.append(kwargs)
@classmethod @classmethod
def post_bulk_insert(cls, sender, documents, **kwargs): def post_bulk_insert(cls, sender, documents, **kwargs):
signal_output.append('post_bulk_insert signal, %s' % documents) signal_output.append('post_bulk_insert signal, %s' % documents)
if kwargs.pop('loaded', False): if kwargs.get('loaded', False):
signal_output.append('Is loaded') signal_output.append('Is loaded')
else: else:
signal_output.append('Not loaded') signal_output.append('Not loaded')
signal_output.append(kwargs)
self.Author = Author self.Author = Author
Author.drop_collection() Author.drop_collection()
Author.id.set_next_value(0)
class Another(Document): class Another(Document):
@@ -105,12 +96,10 @@ class SignalTests(unittest.TestCase):
@classmethod @classmethod
def pre_delete(cls, sender, document, **kwargs): def pre_delete(cls, sender, document, **kwargs):
signal_output.append('pre_delete signal, %s' % document) signal_output.append('pre_delete signal, %s' % document)
signal_output.append(kwargs)
@classmethod @classmethod
def post_delete(cls, sender, document, **kwargs): def post_delete(cls, sender, document, **kwargs):
signal_output.append('post_delete signal, %s' % document) signal_output.append('post_delete signal, %s' % document)
signal_output.append(kwargs)
self.Another = Another self.Another = Another
Another.drop_collection() Another.drop_collection()
@@ -129,41 +118,6 @@ class SignalTests(unittest.TestCase):
self.ExplicitId = ExplicitId self.ExplicitId = ExplicitId
ExplicitId.drop_collection() ExplicitId.drop_collection()
class Post(Document):
title = StringField()
content = StringField()
active = BooleanField(default=False)
def __unicode__(self):
return self.title
@classmethod
def pre_bulk_insert(cls, sender, documents, **kwargs):
signal_output.append('pre_bulk_insert signal, %s' %
[(doc, {'active': documents[n].active})
for n, doc in enumerate(documents)])
# make changes here, this is just an example -
# it could be anything that needs pre-validation or looks-ups before bulk bulk inserting
for document in documents:
if not document.active:
document.active = True
signal_output.append(kwargs)
@classmethod
def post_bulk_insert(cls, sender, documents, **kwargs):
signal_output.append('post_bulk_insert signal, %s' %
[(doc, {'active': documents[n].active})
for n, doc in enumerate(documents)])
if kwargs.pop('loaded', False):
signal_output.append('Is loaded')
else:
signal_output.append('Not loaded')
signal_output.append(kwargs)
self.Post = Post
Post.drop_collection()
# Save up the number of connected signals so that we can check at the # Save up the number of connected signals so that we can check at the
# end that all the signals we register get properly unregistered # end that all the signals we register get properly unregistered
self.pre_signals = ( self.pre_signals = (
@@ -193,9 +147,6 @@ class SignalTests(unittest.TestCase):
signals.post_save.connect(ExplicitId.post_save, sender=ExplicitId) signals.post_save.connect(ExplicitId.post_save, sender=ExplicitId)
signals.pre_bulk_insert.connect(Post.pre_bulk_insert, sender=Post)
signals.post_bulk_insert.connect(Post.post_bulk_insert, sender=Post)
def tearDown(self): def tearDown(self):
signals.pre_init.disconnect(self.Author.pre_init) signals.pre_init.disconnect(self.Author.pre_init)
signals.post_init.disconnect(self.Author.post_init) signals.post_init.disconnect(self.Author.post_init)
@@ -212,9 +163,6 @@ class SignalTests(unittest.TestCase):
signals.post_save.disconnect(self.ExplicitId.post_save) signals.post_save.disconnect(self.ExplicitId.post_save)
signals.pre_bulk_insert.disconnect(self.Post.pre_bulk_insert)
signals.post_bulk_insert.disconnect(self.Post.post_bulk_insert)
# Check that all our signals got disconnected properly. # Check that all our signals got disconnected properly.
post_signals = ( post_signals = (
len(signals.pre_init.receivers), len(signals.pre_init.receivers),
@@ -251,121 +199,66 @@ class SignalTests(unittest.TestCase):
a.save() a.save()
self.get_signal_output(lambda: None) # eliminate signal output self.get_signal_output(lambda: None) # eliminate signal output
a1 = self.Author.objects(name='Bill Shakespeare')[0] a1 = self.Author.objects(name='Bill Shakespeare')[0]
self.assertEqual(self.get_signal_output(create_author), [ self.assertEqual(self.get_signal_output(create_author), [
"pre_init signal, Author", "pre_init signal, Author",
{'name': 'Bill Shakespeare'}, "{'name': 'Bill Shakespeare'}",
"post_init signal, Bill Shakespeare, document._created = True", "post_init signal, Bill Shakespeare, document._created = True",
]) ])
a1 = self.Author(name='Bill Shakespeare') a1 = self.Author(name='Bill Shakespeare')
self.assertEqual(self.get_signal_output(a1.save), [ self.assertEqual(self.get_signal_output(a1.save), [
"pre_save signal, Bill Shakespeare", "pre_save signal, Bill Shakespeare",
{},
"pre_save_post_validation signal, Bill Shakespeare", "pre_save_post_validation signal, Bill Shakespeare",
"Is created", "Is created",
{},
"post_save signal, Bill Shakespeare", "post_save signal, Bill Shakespeare",
"post_save dirty keys, ['name']", "post_save dirty keys, ['name']",
"Is created", "Is created"
{}
]) ])
a1.reload() a1.reload()
a1.name = 'William Shakespeare' a1.name = 'William Shakespeare'
self.assertEqual(self.get_signal_output(a1.save), [ self.assertEqual(self.get_signal_output(a1.save), [
"pre_save signal, William Shakespeare", "pre_save signal, William Shakespeare",
{},
"pre_save_post_validation signal, William Shakespeare", "pre_save_post_validation signal, William Shakespeare",
"Is updated", "Is updated",
{},
"post_save signal, William Shakespeare", "post_save signal, William Shakespeare",
"post_save dirty keys, ['name']", "post_save dirty keys, ['name']",
"Is updated", "Is updated"
{}
]) ])
self.assertEqual(self.get_signal_output(a1.delete), [ self.assertEqual(self.get_signal_output(a1.delete), [
'pre_delete signal, William Shakespeare', 'pre_delete signal, William Shakespeare',
{},
'post_delete signal, William Shakespeare', 'post_delete signal, William Shakespeare',
{}
]) ])
self.assertEqual(self.get_signal_output(load_existing_author), [ signal_output = self.get_signal_output(load_existing_author)
# test signal_output lines separately, because of random ObjectID after object load
self.assertEqual(signal_output[0],
"pre_init signal, Author", "pre_init signal, Author",
{'id': 2, 'name': 'Bill Shakespeare'}, )
"post_init signal, Bill Shakespeare, document._created = False" self.assertEqual(signal_output[2],
]) "post_init signal, Bill Shakespeare, document._created = False",
)
self.assertEqual(self.get_signal_output(bulk_create_author_with_load), [
'pre_init signal, Author', signal_output = self.get_signal_output(bulk_create_author_with_load)
{'name': 'Bill Shakespeare'},
'post_init signal, Bill Shakespeare, document._created = True', # The output of this signal is not entirely deterministic. The reloaded
'pre_bulk_insert signal, [<Author: Bill Shakespeare>]', # object will have an object ID. Hence, we only check part of the output
{}, self.assertEqual(signal_output[3], "pre_bulk_insert signal, [<Author: Bill Shakespeare>]"
'pre_init signal, Author', )
{'id': 3, 'name': 'Bill Shakespeare'}, self.assertEqual(signal_output[-2:],
'post_init signal, Bill Shakespeare, document._created = False', ["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
'post_bulk_insert signal, [<Author: Bill Shakespeare>]', "Is loaded",])
'Is loaded',
{}
])
self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [ self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [
"pre_init signal, Author", "pre_init signal, Author",
{'name': 'Bill Shakespeare'}, "{'name': 'Bill Shakespeare'}",
"post_init signal, Bill Shakespeare, document._created = True", "post_init signal, Bill Shakespeare, document._created = True",
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]", "pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
{},
"post_bulk_insert signal, [<Author: Bill Shakespeare>]", "post_bulk_insert signal, [<Author: Bill Shakespeare>]",
"Not loaded", "Not loaded",
{}
])
def test_signal_kwargs(self):
""" Make sure signal_kwargs is passed to signals calls. """
def live_and_let_die():
a = self.Author(name='Bill Shakespeare')
a.save(signal_kwargs={'live': True, 'die': False})
a.delete(signal_kwargs={'live': False, 'die': True})
self.assertEqual(self.get_signal_output(live_and_let_die), [
"pre_init signal, Author",
{'name': 'Bill Shakespeare'},
"post_init signal, Bill Shakespeare, document._created = True",
"pre_save signal, Bill Shakespeare",
{'die': False, 'live': True},
"pre_save_post_validation signal, Bill Shakespeare",
"Is created",
{'die': False, 'live': True},
"post_save signal, Bill Shakespeare",
"post_save dirty keys, ['name']",
"Is created",
{'die': False, 'live': True},
'pre_delete signal, Bill Shakespeare',
{'die': True, 'live': False},
'post_delete signal, Bill Shakespeare',
{'die': True, 'live': False}
])
def bulk_create_author():
a1 = self.Author(name='Bill Shakespeare')
self.Author.objects.insert([a1], signal_kwargs={'key': True})
self.assertEqual(self.get_signal_output(bulk_create_author), [
'pre_init signal, Author',
{'name': 'Bill Shakespeare'},
'post_init signal, Bill Shakespeare, document._created = True',
'pre_bulk_insert signal, [<Author: Bill Shakespeare>]',
{'key': True},
'pre_init signal, Author',
{'id': 2, 'name': 'Bill Shakespeare'},
'post_init signal, Bill Shakespeare, document._created = False',
'post_bulk_insert signal, [<Author: Bill Shakespeare>]',
'Is loaded',
{'key': True}
]) ])
def test_queryset_delete_signals(self): def test_queryset_delete_signals(self):
@@ -374,9 +267,7 @@ class SignalTests(unittest.TestCase):
self.Another(name='Bill Shakespeare').save() self.Another(name='Bill Shakespeare').save()
self.assertEqual(self.get_signal_output(self.Another.objects.delete), [ self.assertEqual(self.get_signal_output(self.Another.objects.delete), [
'pre_delete signal, Bill Shakespeare', 'pre_delete signal, Bill Shakespeare',
{},
'post_delete signal, Bill Shakespeare', 'post_delete signal, Bill Shakespeare',
{}
]) ])
def test_signals_with_explicit_doc_ids(self): def test_signals_with_explicit_doc_ids(self):
@@ -388,50 +279,5 @@ class SignalTests(unittest.TestCase):
# second time, it must be an update # second time, it must be an update
self.assertEqual(self.get_signal_output(ei.save), ['Is updated']) self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
def test_signals_with_switch_collection(self):
ei = self.ExplicitId(id=123)
ei.switch_collection("explicit__1")
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
ei.switch_collection("explicit__1")
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
ei.switch_collection("explicit__1", keep_created=False)
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
ei.switch_collection("explicit__1", keep_created=False)
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
def test_signals_with_switch_db(self):
connect('mongoenginetest')
register_connection('testdb-1', 'mongoenginetest2')
ei = self.ExplicitId(id=123)
ei.switch_db("testdb-1")
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
ei.switch_db("testdb-1")
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
ei.switch_db("testdb-1", keep_created=False)
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
ei.switch_db("testdb-1", keep_created=False)
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
def test_signals_bulk_insert(self):
def bulk_set_active_post():
posts = [
self.Post(title='Post 1'),
self.Post(title='Post 2'),
self.Post(title='Post 3')
]
self.Post.objects.insert(posts)
results = self.get_signal_output(bulk_set_active_post)
self.assertEqual(results, [
"pre_bulk_insert signal, [(<Post: Post 1>, {'active': False}), (<Post: Post 2>, {'active': False}), (<Post: Post 3>, {'active': False})]",
{},
"post_bulk_insert signal, [(<Post: Post 1>, {'active': True}), (<Post: Post 2>, {'active': True}), (<Post: Post 3>, {'active': True})]",
'Is loaded',
{}
])
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

22
tox.ini
View File

@@ -1,22 +0,0 @@
[tox]
envlist = {py26,py27,py33,py34,py35,pypy,pypy3}-{mg27,mg28},flake8
[testenv]
commands =
python setup.py nosetests {posargs}
deps =
nose
mg27: PyMongo<2.8
mg28: PyMongo>=2.8,<3.0
mg30: PyMongo>=3.0
mgdev: https://github.com/mongodb/mongo-python-driver/tarball/master
setenv =
PYTHON_EGG_CACHE = {envdir}/python-eggs
passenv = windir
[testenv:flake8]
deps =
flake8
flake8-import-order
commands =
flake8