Compare commits

..

1 Commits

Author SHA1 Message Date
Ross Lawley
3da37fbf6e Updated for pymongo 2012-09-10 18:24:37 +00:00
102 changed files with 6219 additions and 35594 deletions

17
.gitignore vendored
View File

@@ -1,20 +1,9 @@
.*
!.gitignore
*~
*.py[co]
.*.sw[po]
*.pyc
.*.swp
*.egg
docs/.build
docs/_build
build/
dist/
mongoengine.egg-info/
env/
.settings
.project
.pydevproject
tests/test_bugfix.py
htmlcov/
venv
venv3
scratchpad
env/

View File

@@ -1,27 +0,0 @@
#!/bin/bash
sudo apt-get remove mongodb-org-server
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
if [ "$MONGODB" = "2.4" ]; then
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
sudo apt-get update
sudo apt-get install mongodb-10gen=2.4.14
sudo service mongodb start
elif [ "$MONGODB" = "2.6" ]; then
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
sudo apt-get update
sudo apt-get install mongodb-org-server=2.6.12
# service should be started automatically
elif [ "$MONGODB" = "3.0" ]; then
echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
sudo apt-get update
sudo apt-get install mongodb-org-server=3.0.14
# service should be started automatically
else
echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0."
exit 1
fi;
mkdir db
1>db/logs mongod --dbpath=db &

View File

@@ -1,22 +0,0 @@
pylint:
disable:
# We use this a lot (e.g. via document._meta)
- protected-access
options:
additional-builtins:
# add xrange and long as valid built-ins. In Python 3, xrange is
# translated into range and long is translated into int via 2to3 (see
# "use_2to3" in setup.py). This should be removed when we drop Python
# 2 support (which probably won't happen any time soon).
- xrange
- long
pyflakes:
disable:
# undefined variables are already covered by pylint (and exclude
# xrange & long)
- F821
ignore-paths:
- benchmark.py

View File

@@ -1,107 +0,0 @@
# For full coverage, we'd have to test all supported Python, MongoDB, and
# PyMongo combinations. However, that would result in an overly long build
# with a very large number of jobs, hence we only test a subset of all the
# combinations:
# * MongoDB v2.4 & v3.0 are only tested against Python v2.7 & v3.5.
# * MongoDB v2.4 is tested against PyMongo v2.7 & v3.x.
# * MongoDB v3.0 is tested against PyMongo v3.x.
# * MongoDB v2.6 is currently the "main" version tested against Python v2.7,
# v3.5, PyPy & PyPy3, and PyMongo v2.7, v2.8 & v3.x.
#
# Reminder: Update README.rst if you change MongoDB versions we test.
language: python
python:
- 2.7
- 3.5
- 3.6
- pypy
env:
- MONGODB=2.6 PYMONGO=2.7
- MONGODB=2.6 PYMONGO=2.8
- MONGODB=2.6 PYMONGO=3.0
matrix:
# Finish the build as soon as one job fails
fast_finish: true
include:
- python: 2.7
env: MONGODB=2.4 PYMONGO=2.7
- python: 2.7
env: MONGODB=2.4 PYMONGO=3.0
- python: 2.7
env: MONGODB=3.0 PYMONGO=3.0
- python: 3.5
env: MONGODB=2.4 PYMONGO=2.7
- python: 3.5
env: MONGODB=2.4 PYMONGO=3.0
- python: 3.5
env: MONGODB=3.0 PYMONGO=3.0
- python: 3.6
env: MONGODB=2.4 PYMONGO=3.0
- python: 3.6
env: MONGODB=3.0 PYMONGO=3.0
before_install:
- bash .install_mongodb_on_travis.sh
- sleep 15 # https://docs.travis-ci.com/user/database-setup/#MongoDB-does-not-immediately-accept-connections
- mongo --eval 'db.version();'
install:
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev
python-tk
- travis_retry pip install --upgrade pip
- travis_retry pip install coveralls
- travis_retry pip install flake8 flake8-import-order
- travis_retry pip install tox>=1.9
- travis_retry pip install "virtualenv<14.0.0" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
- travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
# Cache dependencies installed via pip
cache: pip
# Run flake8 for py27
before_script:
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi
script:
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
# For now only submit coveralls for Python v2.7. Python v3.x currently shows
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
# code in a separate dir and runs tests on that.
after_success:
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi
notifications:
irc: irc.freenode.org#mongoengine
# Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z)
branches:
only:
- master
- /^v.*$/
# Whenever a new release is created via GitHub, publish it on PyPI.
deploy:
provider: pypi
user: the_drow
password:
secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek=
# create a source distribution and a pure python wheel for faster installs
distributions: "sdist bdist_wheel"
# only deploy on tagged commits (aka GitHub releases) and only for the
# parent repo's builds running Python 2.7 along with PyMongo v3.0 (we run
# Travis against many different Python and PyMongo versions and we don't
# want the deploy to occur multiple times).
on:
tags: true
repo: MongoEngine/mongoengine
condition: "$PYMONGO = 3.0"
python: 2.7

242
AUTHORS
View File

@@ -1,247 +1,5 @@
The PRIMARY AUTHORS are (and/or have been):
Ross Lawley <ross.lawley@gmail.com>
Harry Marr <harry@hmarr.com>
Matt Dennewitz <mattdennewitz@gmail.com>
Deepak Thukral <iapain@yahoo.com>
Florian Schlachter <flori@n-schlachter.de>
Steve Challis <steve@stevechallis.com>
Wilson Júnior <wilsonpjunior@gmail.com>
Dan Crosta https://github.com/dcrosta
Laine Herron https://github.com/LaineHerron
CONTRIBUTORS
Derived from the git logs, inevitably incomplete but all of whom and others
have submitted patches, reported bugs and generally helped make MongoEngine
that much better:
* blackbrrr
* Florian Schlachter
* Vincent Driessen
* Steve Challis
* flosch
* Deepak Thukral
* Colin Howe
* Wilson Júnior (https://github.com/wpjunior)
* Alistair Roche
* Dan Crosta
* Viktor Kerkez
* Stephan Jaekel
* Rached Ben Mustapha
* Greg Turner
* Daniel Hasselrot
* Mircea Pasoi
* Matt Chisholm
* James Punteney
* TimothéePeignier
* Stuart Rackham
* Serge Matveenko
* Matt Dennewitz
* Don Spaulding
* Ales Zoulek
* sshwsfc
* sib
* Samuel Clay
* Nick Vlku
* martin
* Flavio Amieiro
* Анхбаяр Лхагвадорж
* Zak Johnson
* Victor Farazdagi
* vandersonmota
* Theo Julienne
* sp
* Slavi Pantaleev
* Richard Henry
* Nicolas Perriault
* Nick Vlku Jr
* Michael Henson
* Leo Honkanen
* kuno
* Josh Ourisman
* Jaime
* Igor Ivanov
* Gregg Lind
* Gareth Lloyd
* Albert Choi
* John Arnfield
* grubberr
* Paul Aliagas
* Paul Cunnane
* Julien Rebetez
* Marc Tamlyn
* Karim Allah
* Adam Parrish
* jpfarias
* jonrscott
* Alice Zoë Bevan-McGregor (https://github.com/amcgregor/)
* Stephen Young
* tkloc
* aid
* yamaneko1212
* dave mankoff
* Alexander G. Morano
* jwilder
* Joe Shaw
* Adam Flynn
* Ankhbayar
* Jan Schrewe
* David Koblas
* Crittercism
* Alvin Liang
* andrewmlevy
* Chris Faulkner
* Ashwin Purohit
* Shalabh Aggarwal
* Chris Williams
* Robert Kajic
* Jacob Peddicord
* Nils Hasenbanck
* mostlystatic
* Greg Banks
* swashbuckler
* Adam Reeve
* Anthony Nemitz
* deignacio
* Shaun Duncan
* Meir Kriheli
* Andrey Fedoseev
* aparajita
* Tristan Escalada
* Alexander Koshelev
* Jaime Irurzun
* Alexandre González
* Thomas Steinacher
* Tommi Komulainen
* Peter Landry
* biszkoptwielki
* Anton Kolechkin
* Sergey Nikitin
* psychogenic
* Stefan Wójcik (https://github.com/wojcikstefan)
* dimonb
* Garry Polley
* James Slagle
* Adrian Scott
* Peter Teichman
* Jakub Kot
* Jorge Bastida
* Aleksandr Sorokoumov
* Yohan Graterol
* bool-dev
* Russ Weeks
* Paul Swartz
* Sundar Raman
* Benoit Louy
* Loic Raucy (https://github.com/lraucy)
* hellysmile
* Jaepil Jeong
* Daniil Sharou
* Pete Campton
* Martyn Smith
* Marcelo Anton
* Aleksey Porfirov (https://github.com/lexqt)
* Nicolas Trippar
* Manuel Hermann
* Gustavo Gawryszewski
* Max Countryman
* caitifbrito
* lcya86 刘春洋
* Martin Alderete (https://github.com/malderete)
* Nick Joyce
* Jared Forsyth
* Kenneth Falck
* Lukasz Balcerzak
* Nicolas Cortot
* Alex (https://github.com/kelsta)
* Jin Zhang
* Daniel Axtens
* Leo-Naeka
* Ryan Witt (https://github.com/ryanwitt)
* Jiequan (https://github.com/Jiequan)
* hensom (https://github.com/hensom)
* zhy0216 (https://github.com/zhy0216)
* istinspring (https://github.com/istinspring)
* Massimo Santini (https://github.com/mapio)
* Nigel McNie (https://github.com/nigelmcnie)
* ygbourhis (https://github.com/ygbourhis)
* Bob Dickinson (https://github.com/BobDickinson)
* Michael Bartnett (https://github.com/michaelbartnett)
* Alon Horev (https://github.com/alonho)
* Kelvin Hammond (https://github.com/kelvinhammond)
* Jatin Chopra (https://github.com/jatin)
* Paul Uithol (https://github.com/PaulUithol)
* Thom Knowles (https://github.com/fleat)
* Paul (https://github.com/squamous)
* Olivier Cortès (https://github.com/Karmak23)
* crazyzubr (https://github.com/crazyzubr)
* FrankSomething (https://github.com/FrankSomething)
* Alexandr Morozov (https://github.com/LK4D4)
* mishudark (https://github.com/mishudark)
* Joe Friedl (https://github.com/grampajoe)
* Daniel Ward (https://github.com/danielward)
* Aniket Deshpande (https://github.com/anicake)
* rfkrocktk (https://github.com/rfkrocktk)
* Gustavo Andrés Angulo (https://github.com/woakas)
* Dmytro Popovych (https://github.com/drudim)
* Tom (https://github.com/tomprimozic)
* j0hnsmith (https://github.com/j0hnsmith)
* Damien Churchill (https://github.com/damoxc)
* Jonathan Simon Prates (https://github.com/jonathansp)
* Thiago Papageorgiou (https://github.com/tmpapageorgiou)
* Omer Katz (https://github.com/thedrow)
* Falcon Dai (https://github.com/falcondai)
* Polyrabbit (https://github.com/polyrabbit)
* Sagiv Malihi (https://github.com/sagivmalihi)
* Dmitry Konishchev (https://github.com/KonishchevDmitry)
* Martyn Smith (https://github.com/martynsmith)
* Andrei Zbikowski (https://github.com/b1naryth1ef)
* Ronald van Rij (https://github.com/ronaldvanrij)
* François Schmidts (https://github.com/jaesivsm)
* Eric Plumb (https://github.com/professorplumb)
* Damien Churchill (https://github.com/damoxc)
* Aleksandr Sorokoumov (https://github.com/Gerrrr)
* Clay McClure (https://github.com/claymation)
* Bruno Rocha (https://github.com/rochacbruno)
* Norberto Leite (https://github.com/nleite)
* Bob Cribbs (https://github.com/bocribbz)
* Jay Shirley (https://github.com/jshirley)
* David Bordeynik (https://github.com/DavidBord)
* Axel Haustant (https://github.com/noirbizarre)
* David Czarnecki (https://github.com/czarneckid)
* Vyacheslav Murashkin (https://github.com/a4tunado)
* André Ericson https://github.com/aericson)
* Mikhail Moshnogorsky (https://github.com/mikhailmoshnogorsky)
* Diego Berrocal (https://github.com/cestdiego)
* Matthew Ellison (https://github.com/seglberg)
* Jimmy Shen (https://github.com/jimmyshen)
* J. Fernando Sánchez (https://github.com/balkian)
* Michael Chase (https://github.com/rxsegrxup)
* Eremeev Danil (https://github.com/elephanter)
* Catstyle Lee (https://github.com/Catstyle)
* Kiryl Yermakou (https://github.com/rma4ok)
* Matthieu Rigal (https://github.com/MRigal)
* Charanpal Dhanjal (https://github.com/charanpald)
* Emmanuel Leblond (https://github.com/touilleMan)
* Breeze.Kay (https://github.com/9nix00)
* Vicki Donchenko (https://github.com/kivistein)
* Emile Caron (https://github.com/emilecaron)
* Amit Lichtenberg (https://github.com/amitlicht)
* Gang Li (https://github.com/iici-gli)
* Lars Butler (https://github.com/larsbutler)
* George Macon (https://github.com/gmacon)
* Ashley Whetter (https://github.com/AWhetter)
* Paul-Armand Verhaegen (https://github.com/paularmand)
* Steven Rossiter (https://github.com/BeardedSteve)
* Luo Peng (https://github.com/RussellLuo)
* Bryan Bennett (https://github.com/bbenne10)
* Gilb's Gilb's (https://github.com/gilbsgilbs)
* Joshua Nedrud (https://github.com/Neurostack)
* Shu Shen (https://github.com/shushen)
* xiaost7 (https://github.com/xiaost7)
* Victor Varvaryuk
* Stanislav Kaledin (https://github.com/sallyruthstruik)
* Dmitry Yantsen (https://github.com/mrTable)
* Renjianxin (https://github.com/Davidrjx)
* Erdenezul Batmunkh (https://github.com/erdenezul)

View File

@@ -1,82 +0,0 @@
Contributing to MongoEngine
===========================
MongoEngine has a large `community
<https://raw.github.com/MongoEngine/mongoengine/master/AUTHORS>`_ and
contributions are always encouraged. Contributions can be as simple as
minor tweaks to the documentation. Please read these guidelines before
sending a pull request.
Bugfixes and New Features
-------------------------
Before starting to write code, look for existing `tickets
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
issue or feature request. That way you avoid working on something
that might not be of interest or that has already been addressed. If in doubt
post to the `user group <http://groups.google.com/group/mongoengine-users>`
Supported Interpreters
----------------------
MongoEngine supports CPython 2.7 and newer. Language
features not supported by all interpreters can not be used.
Please also ensure that your code is properly converted by
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
Style Guide
-----------
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
including 4 space indents. When possible we try to stick to 79 character line
limits. However, screens got bigger and an ORM has a strong focus on
readability and if it can help, we accept 119 as maximum line length, in a
similar way as `django does
<https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
Testing
-------
All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
and any pull requests are automatically tested. Any pull requests without
tests will take longer to be integrated and might be refused.
You may also submit a simple failing test as a pull request if you don't know
how to fix it, it will be easier for other people to work on it and it may get
fixed faster.
General Guidelines
------------------
- Avoid backward breaking changes if at all possible.
- If you *have* to introduce a breaking change, make it very clear in your
pull request's description. Also, describe how users of this package
should adapt to the breaking change in docs/upgrade.rst.
- Write inline documentation for new classes and methods.
- Write tests and make sure they pass (make sure you have a mongod
running on the default port, then execute ``python setup.py nosetests``
from the cmd line to run the test suite).
- Ensure tests pass on all supported Python, PyMongo, and MongoDB versions.
You can test various Python and PyMongo versions locally by executing
``tox``. For different MongoDB versions, you can rely on our automated
Travis tests.
- Add enhancements or problematic bug fixes to docs/changelog.rst.
- Add yourself to AUTHORS :)
Documentation
-------------
To contribute to the `API documentation
<http://docs.mongoengine.org/en/latest/apireference.html>`_
just make your changes to the inline documentation of the appropriate
`source code <https://github.com/MongoEngine/mongoengine>`_ or `rst file
<https://github.com/MongoEngine/mongoengine/tree/master/docs>`_ in a
branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_.
You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_
button.
If you want to test your documentation changes locally, you need to install
the ``sphinx`` and ``sphinx_rtd_theme`` packages. Once these are installed,
go to the ``docs`` directory, run ``make html`` and inspect the updated docs
by running ``open _build/html/index.html``.

View File

@@ -1,5 +1,5 @@
Copyright (c) 2009 See AUTHORS
Copyright (c) 2009-2010 Harry Marr
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
@@ -8,10 +8,10 @@ copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND

View File

@@ -2,74 +2,36 @@
MongoEngine
===========
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
:Repository: https://github.com/MongoEngine/mongoengine
:Author: Harry Marr (http://github.com/hmarr)
:Maintainer: Stefan Wójcik (http://github.com/wojcikstefan)
.. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master
:target: https://travis-ci.org/MongoEngine/mongoengine
.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master
:target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat
:target: https://landscape.io/github/MongoEngine/mongoengine/master
:alt: Code Health
About
=====
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
Documentation is available at https://mongoengine-odm.readthedocs.io - there
is currently a `tutorial <https://mongoengine-odm.readthedocs.io/tutorial.html>`_,
a `user guide <https://mongoengine-odm.readthedocs.io/guide/index.html>`_, and
an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_.
Supported MongoDB Versions
==========================
MongoEngine is currently tested against MongoDB v2.4, v2.6, and v3.0. Future
versions should be supported as well, but aren't actively tested at the moment.
Make sure to open an issue or submit a pull request if you experience any
problems with MongoDB v3.2+.
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
Documentation available at http://hmarr.com/mongoengine/ - there is currently
a `tutorial <http://hmarr.com/mongoengine/tutorial.html>`_, a `user guide
<http://hmarr.com/mongoengine/userguide.html>`_ and an `API reference
<http://hmarr.com/mongoengine/apireference.html>`_.
Installation
============
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
and thus you can use ``easy_install -U mongoengine``. Otherwise, you can download the
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
source from `GitHub <http://github.com/hmarr/mongoengine>`_ and run ``python
setup.py install``.
Dependencies
============
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_.
At the very least, you'll need these two packages to use MongoEngine:
- pymongo>=2.7.1
- six>=1.10.0
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
- dateutil>=2.1.0
If you need to use an ``ImageField`` or ``ImageGridFsProxy``:
- Pillow>=2.0.0
- pymongo 1.1+
- sphinx (optional - for documentation generation)
Examples
========
Some simple examples of what MongoEngine code looks like:
.. code :: python
from mongoengine import *
connect('mydb')
Some simple examples of what MongoEngine code looks like::
class BlogPost(Document):
title = StringField(required=True, max_length=200)
posted = DateTimeField(default=datetime.datetime.utcnow)
posted = DateTimeField(default=datetime.datetime.now)
tags = ListField(StringField(max_length=50))
meta = {'allow_inheritance': True}
class TextPost(BlogPost):
content = StringField(required=True)
@@ -96,55 +58,40 @@ Some simple examples of what MongoEngine code looks like:
... print 'Link:', post.url
... print
...
=== Using MongoEngine ===
See the tutorial
# Count all blog posts and its subtypes
>>> BlogPost.objects.count()
=== MongoEngine Docs ===
Link: hmarr.com/mongoengine
>>> len(BlogPost.objects)
2
>>> TextPost.objects.count()
>>> len(HtmlPost.objects)
1
>>> LinkPost.objects.count()
>>> len(LinkPost.objects)
1
# Count tagged posts
>>> BlogPost.objects(tags='mongoengine').count()
# Find tagged posts
>>> len(BlogPost.objects(tags='mongoengine'))
2
>>> BlogPost.objects(tags='mongodb').count()
>>> len(BlogPost.objects(tags='mongodb'))
1
Tests
=====
To run the test suite, ensure you are running a local instance of MongoDB on
the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``.
To run the test suite on every supported Python and PyMongo version, you can
use ``tox``. You'll need to make sure you have each supported Python version
installed in your environment and then:
.. code-block:: shell
# Install tox
$ pip install tox
# Run the test suites
$ tox
If you wish to run a subset of tests, use the nosetests convention:
.. code-block:: shell
# Run all the tests in a particular test file
$ python setup.py nosetests --tests tests/fields/fields.py
# Run only particular test class in that file
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest
# Use the -s option if you want to print some debug statements or use pdb
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s
the standard port, and run ``python setup.py test``.
Community
=========
- `MongoEngine Users mailing list
- `MongoEngine Users mailing list
<http://groups.google.com/group/mongoengine-users>`_
- `MongoEngine Developers mailing list
- `MongoEngine Developers mailing list
<http://groups.google.com/group/mongoengine-dev>`_
- `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_
Contributing
============
We welcome contributions! See the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ - to
contribute to the project, fork it on GitHub and send a pull request, all
contributions and suggestions are welcome!

View File

@@ -1,207 +0,0 @@
#!/usr/bin/env python
"""
Simple benchmark comparing PyMongo and MongoEngine.
Sample run on a mid 2015 MacBook Pro (commit b282511):
Benchmarking...
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
2.58979988098
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
1.26657605171
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
8.4351580143
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries without continual assign - MongoEngine
7.20191693306
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True
6.31104588509
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
6.07083487511
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
5.97704291344
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
5.9111430645
"""
import timeit
def main():
print("Benchmarking...")
setup = """
from pymongo import MongoClient
connection = MongoClient()
connection.drop_database('timeit_test')
"""
stmt = """
from pymongo import MongoClient
connection = MongoClient()
db = connection.timeit_test
noddy = db.noddy
for i in range(10000):
example = {'fields': {}}
for j in range(20):
example['fields']['key' + str(j)] = 'value ' + str(j)
noddy.save(example)
myNoddys = noddy.find()
[n for n in myNoddys] # iterate
"""
print("-" * 100)
print("""Creating 10000 dictionaries - Pymongo""")
t = timeit.Timer(stmt=stmt, setup=setup)
print(t.timeit(1))
stmt = """
from pymongo import MongoClient
from pymongo.write_concern import WriteConcern
connection = MongoClient()
db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0))
noddy = db.noddy
for i in range(10000):
example = {'fields': {}}
for j in range(20):
example['fields']["key"+str(j)] = "value "+str(j)
noddy.save(example)
myNoddys = noddy.find()
[n for n in myNoddys] # iterate
"""
print("-" * 100)
print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""")
t = timeit.Timer(stmt=stmt, setup=setup)
print(t.timeit(1))
setup = """
from pymongo import MongoClient
connection = MongoClient()
connection.drop_database('timeit_test')
connection.close()
from mongoengine import Document, DictField, connect
connect('timeit_test')
class Noddy(Document):
fields = DictField()
"""
stmt = """
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save()
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine""")
t = timeit.Timer(stmt=stmt, setup=setup)
print(t.timeit(1))
stmt = """
for i in range(10000):
noddy = Noddy()
fields = {}
for j in range(20):
fields["key"+str(j)] = "value "+str(j)
noddy.fields = fields
noddy.save()
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print("-" * 100)
print("""Creating 10000 dictionaries without continual assign - MongoEngine""")
t = timeit.Timer(stmt=stmt, setup=setup)
print(t.timeit(1))
stmt = """
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(write_concern={"w": 0}, cascade=True)
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""")
t = timeit.Timer(stmt=stmt, setup=setup)
print(t.timeit(1))
stmt = """
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(write_concern={"w": 0}, validate=False, cascade=True)
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""")
t = timeit.Timer(stmt=stmt, setup=setup)
print(t.timeit(1))
stmt = """
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(validate=False, write_concern={"w": 0})
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""")
t = timeit.Timer(stmt=stmt, setup=setup)
print(t.timeit(1))
stmt = """
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""")
t = timeit.Timer(stmt=stmt, setup=setup)
print(t.timeit(1))
if __name__ == "__main__":
main()

229
docs/_themes/nature/static/nature.css_t vendored Normal file
View File

@@ -0,0 +1,229 @@
/**
* Sphinx stylesheet -- default theme
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
@import url("basic.css");
/* -- page layout ----------------------------------------------------------- */
body {
font-family: Arial, sans-serif;
font-size: 100%;
background-color: #111;
color: #555;
margin: 0;
padding: 0;
}
div.documentwrapper {
float: left;
width: 100%;
}
div.bodywrapper {
margin: 0 0 0 230px;
}
hr{
border: 1px solid #B1B4B6;
}
div.document {
background-color: #eee;
}
div.body {
background-color: #ffffff;
color: #3E4349;
padding: 0 30px 30px 30px;
font-size: 0.8em;
}
div.footer {
color: #555;
width: 100%;
padding: 13px 0;
text-align: center;
font-size: 75%;
}
div.footer a {
color: #444;
text-decoration: underline;
}
div.related {
background-color: #6BA81E;
line-height: 32px;
color: #fff;
text-shadow: 0px 1px 0 #444;
font-size: 0.80em;
}
div.related a {
color: #E2F3CC;
}
div.sphinxsidebar {
font-size: 0.75em;
line-height: 1.5em;
}
div.sphinxsidebarwrapper{
padding: 20px 0;
}
div.sphinxsidebar h3,
div.sphinxsidebar h4 {
font-family: Arial, sans-serif;
color: #222;
font-size: 1.2em;
font-weight: normal;
margin: 0;
padding: 5px 10px;
background-color: #ddd;
text-shadow: 1px 1px 0 white
}
div.sphinxsidebar h4{
font-size: 1.1em;
}
div.sphinxsidebar h3 a {
color: #444;
}
div.sphinxsidebar p {
color: #888;
padding: 5px 20px;
}
div.sphinxsidebar p.topless {
}
div.sphinxsidebar ul {
margin: 10px 20px;
padding: 0;
color: #000;
}
div.sphinxsidebar a {
color: #444;
}
div.sphinxsidebar input {
border: 1px solid #ccc;
font-family: sans-serif;
font-size: 1em;
}
div.sphinxsidebar input[type=text]{
margin-left: 20px;
}
/* -- body styles ----------------------------------------------------------- */
a {
color: #005B81;
text-decoration: none;
}
a:hover {
color: #E32E00;
text-decoration: underline;
}
div.body h1,
div.body h2,
div.body h3,
div.body h4,
div.body h5,
div.body h6 {
font-family: Arial, sans-serif;
background-color: #BED4EB;
font-weight: normal;
color: #212224;
margin: 30px 0px 10px 0px;
padding: 5px 0 5px 10px;
text-shadow: 0px 1px 0 white
}
div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; }
div.body h2 { font-size: 150%; background-color: #C8D5E3; }
div.body h3 { font-size: 120%; background-color: #D8DEE3; }
div.body h4 { font-size: 110%; background-color: #D8DEE3; }
div.body h5 { font-size: 100%; background-color: #D8DEE3; }
div.body h6 { font-size: 100%; background-color: #D8DEE3; }
a.headerlink {
color: #c60f0f;
font-size: 0.8em;
padding: 0 4px 0 4px;
text-decoration: none;
}
a.headerlink:hover {
background-color: #c60f0f;
color: white;
}
div.body p, div.body dd, div.body li {
line-height: 1.5em;
}
div.admonition p.admonition-title + p {
display: inline;
}
div.highlight{
background-color: white;
}
div.note {
background-color: #eee;
border: 1px solid #ccc;
}
div.seealso {
background-color: #ffc;
border: 1px solid #ff6;
}
div.topic {
background-color: #eee;
}
div.warning {
background-color: #ffe4e4;
border: 1px solid #f66;
}
p.admonition-title {
display: inline;
}
p.admonition-title:after {
content: ":";
}
pre {
padding: 10px;
background-color: White;
color: #222;
line-height: 1.2em;
border: 1px solid #C6C9CB;
font-size: 1.2em;
margin: 1.5em 0 1.5em 0;
-webkit-box-shadow: 1px 1px 1px #d8d8d8;
-moz-box-shadow: 1px 1px 1px #d8d8d8;
}
tt {
background-color: #ecf0f3;
color: #222;
padding: 1px 2px;
font-size: 1.2em;
font-family: monospace;
}

54
docs/_themes/nature/static/pygments.css vendored Normal file
View File

@@ -0,0 +1,54 @@
.c { color: #999988; font-style: italic } /* Comment */
.k { font-weight: bold } /* Keyword */
.o { font-weight: bold } /* Operator */
.cm { color: #999988; font-style: italic } /* Comment.Multiline */
.cp { color: #999999; font-weight: bold } /* Comment.preproc */
.c1 { color: #999988; font-style: italic } /* Comment.Single */
.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
.ge { font-style: italic } /* Generic.Emph */
.gr { color: #aa0000 } /* Generic.Error */
.gh { color: #999999 } /* Generic.Heading */
.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
.go { color: #111 } /* Generic.Output */
.gp { color: #555555 } /* Generic.Prompt */
.gs { font-weight: bold } /* Generic.Strong */
.gu { color: #aaaaaa } /* Generic.Subheading */
.gt { color: #aa0000 } /* Generic.Traceback */
.kc { font-weight: bold } /* Keyword.Constant */
.kd { font-weight: bold } /* Keyword.Declaration */
.kp { font-weight: bold } /* Keyword.Pseudo */
.kr { font-weight: bold } /* Keyword.Reserved */
.kt { color: #445588; font-weight: bold } /* Keyword.Type */
.m { color: #009999 } /* Literal.Number */
.s { color: #bb8844 } /* Literal.String */
.na { color: #008080 } /* Name.Attribute */
.nb { color: #999999 } /* Name.Builtin */
.nc { color: #445588; font-weight: bold } /* Name.Class */
.no { color: #ff99ff } /* Name.Constant */
.ni { color: #800080 } /* Name.Entity */
.ne { color: #990000; font-weight: bold } /* Name.Exception */
.nf { color: #990000; font-weight: bold } /* Name.Function */
.nn { color: #555555 } /* Name.Namespace */
.nt { color: #000080 } /* Name.Tag */
.nv { color: purple } /* Name.Variable */
.ow { font-weight: bold } /* Operator.Word */
.mf { color: #009999 } /* Literal.Number.Float */
.mh { color: #009999 } /* Literal.Number.Hex */
.mi { color: #009999 } /* Literal.Number.Integer */
.mo { color: #009999 } /* Literal.Number.Oct */
.sb { color: #bb8844 } /* Literal.String.Backtick */
.sc { color: #bb8844 } /* Literal.String.Char */
.sd { color: #bb8844 } /* Literal.String.Doc */
.s2 { color: #bb8844 } /* Literal.String.Double */
.se { color: #bb8844 } /* Literal.String.Escape */
.sh { color: #bb8844 } /* Literal.String.Heredoc */
.si { color: #bb8844 } /* Literal.String.Interpol */
.sx { color: #bb8844 } /* Literal.String.Other */
.sr { color: #808000 } /* Literal.String.Regex */
.s1 { color: #bb8844 } /* Literal.String.Single */
.ss { color: #bb8844 } /* Literal.String.Symbol */
.bp { color: #999999 } /* Name.Builtin.Pseudo */
.vc { color: #ff99ff } /* Name.Variable.Class */
.vg { color: #ff99ff } /* Name.Variable.Global */
.vi { color: #ff99ff } /* Name.Variable.Instance */
.il { color: #009999 } /* Literal.Number.Integer.Long */

4
docs/_themes/nature/theme.conf vendored Normal file
View File

@@ -0,0 +1,4 @@
[theme]
inherit = basic
stylesheet = nature.css
pygments_style = tango

View File

@@ -6,7 +6,6 @@ Connecting
==========
.. autofunction:: mongoengine.connect
.. autofunction:: mongoengine.register_connection
Documents
=========
@@ -16,113 +15,56 @@ Documents
.. attribute:: objects
A :class:`~mongoengine.queryset.QuerySet` object that is created lazily
A :class:`~mongoengine.queryset.QuerySet` object that is created lazily
on access.
.. autoclass:: mongoengine.EmbeddedDocument
:members:
.. autoclass:: mongoengine.DynamicDocument
:members:
.. autoclass:: mongoengine.DynamicEmbeddedDocument
:members:
.. autoclass:: mongoengine.document.MapReduceDocument
:members:
.. autoclass:: mongoengine.ValidationError
:members:
.. autoclass:: mongoengine.FieldDoesNotExist
Context Managers
================
.. autoclass:: mongoengine.context_managers.switch_db
.. autoclass:: mongoengine.context_managers.switch_collection
.. autoclass:: mongoengine.context_managers.no_dereference
.. autoclass:: mongoengine.context_managers.query_counter
Querying
========
.. automodule:: mongoengine.queryset
:synopsis: Queryset level operations
.. autoclass:: mongoengine.queryset.QuerySet
:members:
.. autoclass:: mongoengine.queryset.QuerySet
:members:
:inherited-members:
.. automethod:: QuerySet.__call__
.. autoclass:: mongoengine.queryset.QuerySetNoCache
:members:
.. automethod:: mongoengine.queryset.QuerySetNoCache.__call__
.. autofunction:: mongoengine.queryset.queryset_manager
.. automethod:: mongoengine.queryset.QuerySet.__call__
.. autofunction:: mongoengine.queryset.queryset_manager
Fields
======
.. autoclass:: mongoengine.base.fields.BaseField
.. autoclass:: mongoengine.fields.StringField
.. autoclass:: mongoengine.fields.URLField
.. autoclass:: mongoengine.fields.EmailField
.. autoclass:: mongoengine.fields.IntField
.. autoclass:: mongoengine.fields.LongField
.. autoclass:: mongoengine.fields.FloatField
.. autoclass:: mongoengine.fields.DecimalField
.. autoclass:: mongoengine.fields.BooleanField
.. autoclass:: mongoengine.fields.DateTimeField
.. autoclass:: mongoengine.fields.ComplexDateTimeField
.. autoclass:: mongoengine.fields.EmbeddedDocumentField
.. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField
.. autoclass:: mongoengine.fields.DynamicField
.. autoclass:: mongoengine.fields.ListField
.. autoclass:: mongoengine.fields.EmbeddedDocumentListField
.. autoclass:: mongoengine.fields.SortedListField
.. autoclass:: mongoengine.fields.DictField
.. autoclass:: mongoengine.fields.MapField
.. autoclass:: mongoengine.fields.ReferenceField
.. autoclass:: mongoengine.fields.GenericReferenceField
.. autoclass:: mongoengine.fields.CachedReferenceField
.. autoclass:: mongoengine.fields.BinaryField
.. autoclass:: mongoengine.fields.FileField
.. autoclass:: mongoengine.fields.ImageField
.. autoclass:: mongoengine.fields.SequenceField
.. autoclass:: mongoengine.fields.ObjectIdField
.. autoclass:: mongoengine.fields.UUIDField
.. autoclass:: mongoengine.fields.GeoPointField
.. autoclass:: mongoengine.fields.PointField
.. autoclass:: mongoengine.fields.LineStringField
.. autoclass:: mongoengine.fields.PolygonField
.. autoclass:: mongoengine.fields.MultiPointField
.. autoclass:: mongoengine.fields.MultiLineStringField
.. autoclass:: mongoengine.fields.MultiPolygonField
.. autoclass:: mongoengine.fields.GridFSError
.. autoclass:: mongoengine.fields.GridFSProxy
.. autoclass:: mongoengine.fields.ImageGridFsProxy
.. autoclass:: mongoengine.fields.ImproperlyConfigured
.. autoclass:: mongoengine.StringField
Embedded Document Querying
==========================
.. autoclass:: mongoengine.URLField
.. versionadded:: 0.9
.. autoclass:: mongoengine.IntField
Additional queries for Embedded Documents are available when using the
:class:`~mongoengine.EmbeddedDocumentListField` to store a list of embedded
documents.
.. autoclass:: mongoengine.FloatField
A list of embedded documents is returned as a special list with the
following methods:
.. autoclass:: mongoengine.DecimalField
.. autoclass:: mongoengine.base.datastructures.EmbeddedDocumentList
:members:
.. autoclass:: mongoengine.BooleanField
Misc
====
.. autoclass:: mongoengine.DateTimeField
.. autofunction:: mongoengine.common._import_class
.. autoclass:: mongoengine.EmbeddedDocumentField
.. autoclass:: mongoengine.DictField
.. autoclass:: mongoengine.ListField
.. autoclass:: mongoengine.BinaryField
.. autoclass:: mongoengine.ObjectIdField
.. autoclass:: mongoengine.ReferenceField
.. autoclass:: mongoengine.GenericReferenceField
.. autoclass:: mongoengine.FileField
.. autoclass:: mongoengine.GeoPointField

View File

@@ -2,779 +2,6 @@
Changelog
=========
Changes in 0.15.0
=================
- Add LazyReferenceField and GenericLazyReferenceField to address #1230
Changes in 0.14.1
=================
- Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630
- Added support for the `$position` param in the `$push` operator #1566
- Fixed `DateTimeField` interpreting an empty string as today #1533
- Added a missing `__ne__` method to the `GridFSProxy` class #1632
- Fixed `BaseQuerySet._fields_to_db_fields` #1553
Changes in 0.14.0
=================
- BREAKING CHANGE: Removed the `coerce_types` param from `QuerySet.as_pymongo` #1549
- POTENTIAL BREAKING CHANGE: Made EmbeddedDocument not hashable by default #1528
- Improved code quality #1531, #1540, #1541, #1547
Changes in 0.13.0
=================
- POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see
docs/upgrade.rst for details.
Changes in 0.12.0
=================
- POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476
- POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476
- Fixed the way `Document.objects.create` works with duplicate IDs #1485
- Fixed connecting to a replica set with PyMongo 2.x #1436
- Fixed using sets in field choices #1481
- Fixed deleting items from a `ListField` #1318
- Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237
- Fixed behavior of a `dec` update operator #1450
- Added a `rename` update operator #1454
- Added validation for the `db_field` parameter #1448
- Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440
- Fixed the error message displayed when validating unicode URLs #1486
- Raise an error when trying to save an abstract document #1449
Changes in 0.11.0
=================
- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428
- BREAKING CHANGE: Dropped Python 2.6 support. #1428
- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428
- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334
- Fixed absent rounding for DecimalField when `force_string` is set. #1103
Changes in 0.10.8
=================
- Added support for QuerySet.batch_size (#1426)
- Fixed query set iteration within iteration #1427
- Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421
- Added ability to filter the generic reference field by ObjectId and DBRef #1425
- Fixed delete cascade for models with a custom primary key field #1247
- Added ability to specify an authentication mechanism (e.g. X.509) #1333
- Added support for falsey primary keys (e.g. doc.pk = 0) #1354
- Fixed QuerySet#sum/average for fields w/ explicit db_field #1417
- Fixed filtering by embedded_doc=None #1422
- Added support for cursor.comment #1420
- Fixed doc.get_<field>_display #1419
- Fixed __repr__ method of the StrictDict #1424
- Added a deprecation warning for Python 2.6
Changes in 0.10.7
=================
- Dropped Python 3.2 support #1390
- Fixed the bug where dynamic doc has index inside a dict field #1278
- Fixed: ListField minus index assignment does not work #1128
- Fixed cascade delete mixing among collections #1224
- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206
- Raise `OperationError` when trying to do a `drop_collection` on document with no collection set.
- count on ListField of EmbeddedDocumentField fails. #1187
- Fixed long fields stored as int32 in Python 3. #1253
- MapField now handles unicodes keys correctly. #1267
- ListField now handles negative indicies correctly. #1270
- Fixed AttributeError when initializing EmbeddedDocument with positional args. #681
- Fixed no_cursor_timeout error with pymongo 3.0+ #1304
- Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336
- Fixed support for `__` to escape field names that match operators names in `update` #1351
- Fixed BaseDocument#_mark_as_changed #1369
- Added support for pickling QuerySet instances. #1397
- Fixed connecting to a list of hosts #1389
- Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334
- Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218
- Improvements to the dictionary fields docs #1383
Changes in 0.10.6
=================
- Add support for mocking MongoEngine based on mongomock. #1151
- Fixed not being able to run tests on Windows. #1153
- Allow creation of sparse compound indexes. #1114
- count on ListField of EmbeddedDocumentField fails. #1187
Changes in 0.10.5
=================
- Fix for reloading of strict with special fields. #1156
Changes in 0.10.4
=================
- SaveConditionError is now importable from the top level package. #1165
- upsert_one method added. #1157
Changes in 0.10.3
=================
- Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042
Changes in 0.10.2
=================
- Allow shard key to point to a field in an embedded document. #551
- Allow arbirary metadata in fields. #1129
- ReferenceFields now support abstract document types. #837
Changes in 0.10.1
=================
- Fix infinite recursion with CASCADE delete rules under specific conditions. #1046
- Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047
- Fix ignored chained options #842
- Document save's save_condition error raises `SaveConditionError` exception #1070
- Fix Document.reload for DynamicDocument. #1050
- StrictDict & SemiStrictDict are shadowed at init time. #1105
- Fix ListField minus index assignment does not work. #1119
- Remove code that marks field as changed when the field has default but not existed in database #1126
- Remove test dependencies (nose and rednose) from install dependencies list. #1079
- Recursively build query when using elemMatch operator. #1130
- Fix instance back references for lists of embedded documents. #1131
Changes in 0.10.0
=================
- Django support was removed and will be available as a separate extension. #958
- Allow to load undeclared field with meta attribute 'strict': False #957
- Support for PyMongo 3+ #946
- Removed get_or_create() deprecated since 0.8.0. #300
- Improve Document._created status when switch collection and db #1020
- Queryset update doesn't go through field validation #453
- Added support for specifying authentication source as option `authSource` in URI. #967
- Fixed mark_as_changed to handle higher/lower level fields changed. #927
- ListField of embedded docs doesn't set the _instance attribute when iterating over it #914
- Support += and *= for ListField #595
- Use sets for populating dbrefs to dereference
- Fixed unpickled documents replacing the global field's list. #888
- Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910
- Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769)
- Fix for updating sorting in SortedListField. #978
- Added __ support to escape field name in fields lookup keywords that match operators names #949
- Fix for issue where FileField deletion did not free space in GridFS.
- No_dereference() not respected on embedded docs containing reference. #517
- Document save raise an exception if save_condition fails #1005
- Fixes some internal _id handling issue. #961
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
- Capped collection multiple of 256. #1011
- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods.
- Fix for delete with write_concern {'w': 0}. #1008
- Allow dynamic lookup for more than two parts. #882
- Added support for min_distance on geo queries. #831
- Allow to add custom metadata to fields #705
Changes in 0.9.0
================
- Update FileField when creating a new file #714
- Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826
- ComplexDateTimeField should fall back to None when null=True #864
- Request Support for $min, $max Field update operators #863
- `BaseDict` does not follow `setdefault` #866
- Add support for $type operator # 766
- Fix tests for pymongo 2.8+ #877
- No module named 'django.utils.importlib' (Django dev) #872
- Field Choices Now Accept Subclasses of Documents
- Ensure Indexes before Each Save #812
- Generate Unique Indices for Lists of EmbeddedDocuments #358
- Sparse fields #515
- write_concern not in params of Collection#remove #801
- Better BaseDocument equality check when not saved #798
- OperationError: Shard Keys are immutable. Tried to update id even though the document is not yet saved #771
- with_limit_and_skip for count should default like in pymongo #759
- Fix storing value of precision attribute in DecimalField #787
- Set attribute to None does not work (at least for fields with default values) #734
- Querying by a field defined in a subclass raises InvalidQueryError #744
- Add Support For MongoDB 2.6.X's maxTimeMS #778
- abstract shouldn't be inherited in EmbeddedDocument # 789
- Allow specifying the '_cls' as a field for indexes #397
- Stop ensure_indexes running on a secondaries unless connection is through mongos #746
- Not overriding default values when loading a subset of fields #399
- Saving document doesn't create new fields in existing collection #620
- Added `Queryset.aggregate` wrapper to aggregation framework #703
- Added support to show original model fields on to_json calls instead of db_field #697
- Added Queryset.search_text to Text indexes searchs #700
- Fixed tests for Django 1.7 #696
- Follow ReferenceFields in EmbeddedDocuments with select_related #690
- Added preliminary support for text indexes #680
- Added `elemMatch` operator as well - `match` is too obscure #653
- Added support for progressive JPEG #486 #548
- Allow strings to be used in index creation #675
- Fixed EmbeddedDoc weakref proxy issue #592
- Fixed nested reference field distinct error #583
- Fixed change tracking on nested MapFields #539
- Dynamic fields in embedded documents now visible to queryset.only() / qs.exclude() #425 #507
- Add authentication_source option to register_connection #178 #464 #573 #580 #590
- Implemented equality between Documents and DBRefs #597
- Fixed ReferenceField inside nested ListFields dereferencing problem #368
- Added the ability to reload specific document fields #100
- Added db_alias support and fixes for custom map/reduce output #586
- post_save signal now has access to delta information about field changes #594 #589
- Don't query with $orderby for qs.get() #600
- Fix id shard key save issue #636
- Fixes issue with recursive embedded document errors #557
- Fix clear_changed_fields() clearing unsaved documents bug #602
- Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x.
- Removing support for Python < 2.6.6
- Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664
- QuerySet.modify() and Document.modify() methods to provide find_and_modify() like behaviour #677 #773
- Added support for the using() method on a queryset #676
- PYPY support #673
- Connection pooling #674
- Avoid to open all documents from cursors in an if stmt #655
- Ability to clear the ordering #657
- Raise NotUniqueError in Document.update() on pymongo.errors.DuplicateKeyError #626
- Slots - memory improvements #625
- Fixed incorrectly split a query key when it ends with "_" #619
- Geo docs updates #613
- Workaround a dateutil bug #608
- Conditional save for atomic-style operations #511
- Allow dynamic dictionary-style field access #559
- Increase email field length to accommodate new TLDs #726
- index_cls is ignored when deciding to set _cls as index prefix #733
- Make 'db' argument to connection optional #737
- Allow atomic update for the entire `DictField` #742
- Added MultiPointField, MultiLineField, MultiPolygonField
- Fix multiple connections aliases being rewritten #748
- Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791
- Make `in_bulk()` respect `no_dereference()` #775
- Handle None from model __str__; Fixes #753 #754
- _get_changed_fields fix for embedded documents with id field. #925
Changes in 0.8.7
================
- Calling reload on deleted / nonexistent documents raises DoesNotExist (#538)
- Stop ensure_indexes running on a secondaries (#555)
- Fix circular import issue with django auth (#531) (#545)
Changes in 0.8.6
================
- Fix django auth import (#531)
Changes in 0.8.5
================
- Fix multi level nested fields getting marked as changed (#523)
- Django 1.6 login fix (#522) (#527)
- Django 1.6 session fix (#509)
- EmbeddedDocument._instance is now set when setting the attribute (#506)
- Fixed EmbeddedDocument with ReferenceField equality issue (#502)
- Fixed GenericReferenceField serialization order (#499)
- Fixed count and none bug (#498)
- Fixed bug with .only() and DictField with digit keys (#496)
- Added user_permissions to Django User object (#491, #492)
- Fix updating Geo Location fields (#488)
- Fix handling invalid dict field value (#485)
- Added app_label to MongoUser (#484)
- Use defaults when host and port are passed as None (#483)
- Fixed distinct casting issue with ListField of EmbeddedDocuments (#470)
- Fixed Django 1.6 sessions (#454, #480)
Changes in 0.8.4
================
- Remove database name necessity in uri connection schema (#452)
- Fixed "$pull" semantics for nested ListFields (#447)
- Allow fields to be named the same as query operators (#445)
- Updated field filter logic - can now exclude subclass fields (#443)
- Fixed dereference issue with embedded listfield referencefields (#439)
- Fixed slice when using inheritance causing fields to be excluded (#437)
- Fixed ._get_db() attribute after a Document.switch_db() (#441)
- Dynamic Fields store and recompose Embedded Documents / Documents correctly (#449)
- Handle dynamic fieldnames that look like digits (#434)
- Added get_user_document and improve mongo_auth module (#423)
- Added str representation of GridFSProxy (#424)
- Update transform to handle docs erroneously passed to unset (#416)
- Fixed indexing - turn off _cls (#414)
- Fixed dereference threading issue in ComplexField.__get__ (#412)
- Fixed QuerySetNoCache.count() caching (#410)
- Don't follow references in _get_changed_fields (#422, #417)
- Allow args and kwargs to be passed through to_json (#420)
Changes in 0.8.3
================
- Fixed EmbeddedDocuments with `id` also storing `_id` (#402)
- Added get_proxy_object helper to filefields (#391)
- Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365)
- Fixed sum and average mapreduce dot notation support (#375, #376, #393)
- Fixed as_pymongo to return the id (#386)
- Document.select_related() now respects `db_alias` (#377)
- Reload uses shard_key if applicable (#384)
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
**Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3
- Fixed pickling dynamic documents `_dynamic_fields` (#387)
- Fixed ListField setslice and delslice dirty tracking (#390)
- Added Django 1.5 PY3 support (#392)
- Added match ($elemMatch) support for EmbeddedDocuments (#379)
- Fixed weakref being valid after reload (#374)
- Fixed queryset.get() respecting no_dereference (#373)
- Added full_result kwarg to update (#380)
Changes in 0.8.2
================
- Added compare_indexes helper (#361)
- Fixed cascading saves which weren't turned off as planned (#291)
- Fixed Datastructures so instances are a Document or EmbeddedDocument (#363)
- Improved cascading saves write performance (#361)
- Fixed ambiguity and differing behaviour regarding field defaults (#349)
- ImageFields now include PIL error messages if invalid error (#353)
- Added lock when calling doc.Delete() for when signals have no sender (#350)
- Reload forces read preference to be PRIMARY (#355)
- Querysets are now lest restrictive when querying duplicate fields (#332, #333)
- FileField now honouring db_alias (#341)
- Removed customised __set__ change tracking in ComplexBaseField (#344)
- Removed unused var in _get_changed_fields (#347)
- Added pre_save_post_validation signal (#345)
- DateTimeField now auto converts valid datetime isostrings into dates (#343)
- DateTimeField now uses dateutil for parsing if available (#343)
- Fixed Doc.objects(read_preference=X) not setting read preference (#352)
- Django session ttl index expiry fixed (#329)
- Fixed pickle.loads (#342)
- Documentation fixes
Changes in 0.8.1
================
- Fixed Python 2.6 django auth importlib issue (#326)
- Fixed pickle unsaved document regression (#327)
Changes in 0.8.0
================
- Fixed querying ReferenceField custom_id (#317)
- Fixed pickle issues with collections (#316)
- Added `get_next_value` preview for SequenceFields (#319)
- Added no_sub_classes context manager and queryset helper (#312)
- Querysets now utilises a local cache
- Changed __len__ behaviour in the queryset (#247, #311)
- Fixed querying string versions of ObjectIds issue with ReferenceField (#307)
- Added $setOnInsert support for upserts (#308)
- Upserts now possible with just query parameters (#309)
- Upserting is the only way to ensure docs are saved correctly (#306)
- Fixed register_delete_rule inheritance issue
- Fix cloning of sliced querysets (#303)
- Fixed update_one write concern (#302)
- Updated minimum requirement for pymongo to 2.5
- Add support for new geojson fields, indexes and queries (#299)
- If values cant be compared mark as changed (#287)
- Ensure as_pymongo() and to_json honour only() and exclude() (#293)
- Document serialization uses field order to ensure a strict order is set (#296)
- DecimalField now stores as float not string (#289)
- UUIDField now stores as a binary by default (#292)
- Added Custom User Model for Django 1.5 (#285)
- Cascading saves now default to off (#291)
- ReferenceField now store ObjectId's by default rather than DBRef (#290)
- Added ImageField support for inline replacements (#86)
- Added SequenceField.set_next_value(value) helper (#159)
- Updated .only() behaviour - now like exclude it is chainable (#202)
- Added with_limit_and_skip support to count() (#235)
- Objects queryset manager now inherited (#256)
- Updated connection to use MongoClient (#262, #274)
- Fixed db_alias and inherited Documents (#143)
- Documentation update for document errors (#124)
- Deprecated `get_or_create` (#35)
- Updated inheritable objects created by upsert now contain _cls (#118)
- Added support for creating documents with embedded documents in a single operation (#6)
- Added to_json and from_json to Document (#1)
- Added to_json and from_json to QuerySet (#131)
- Updated index creation now tied to Document class (#102)
- Added none() to queryset (#127)
- Updated SequenceFields to allow post processing of the calculated counter value (#141)
- Added clean method to documents for pre validation data cleaning (#60)
- Added support setting for read prefrence at a query level (#157)
- Added _instance to EmbeddedDocuments pointing to the parent (#139)
- Inheritance is off by default (#122)
- Remove _types and just use _cls for inheritance (#148)
- Only allow QNode instances to be passed as query objects (#199)
- Dynamic fields are now validated on save (#153) (#154)
- Added support for multiple slices and made slicing chainable. (#170) (#190) (#191)
- Fixed GridFSProxy __getattr__ behaviour (#196)
- Fix Django timezone support (#151)
- Simplified Q objects, removed QueryTreeTransformerVisitor (#98) (#171)
- FileFields now copyable (#198)
- Querysets now return clones and are no longer edit in place (#56)
- Added support for $maxDistance (#179)
- Uses getlasterror to test created on updated saves (#163)
- Fixed inheritance and unique index creation (#140)
- Fixed reverse delete rule with inheritance (#197)
- Fixed validation for GenericReferences which haven't been dereferenced
- Added switch_db context manager (#106)
- Added switch_db method to document instances (#106)
- Added no_dereference context manager (#82) (#61)
- Added switch_collection context manager (#220)
- Added switch_collection method to document instances (#220)
- Added support for compound primary keys (#149) (#121)
- Fixed overriding objects with custom manager (#58)
- Added no_dereference method for querysets (#82) (#61)
- Undefined data should not override instance methods (#49)
- Added Django Group and Permission (#142)
- Added Doc class and pk to Validation messages (#69)
- Fixed Documents deleted via a queryset don't call any signals (#105)
- Added the "get_decoded" method to the MongoSession class (#216)
- Fixed invalid choices error bubbling (#214)
- Updated Save so it calls $set and $unset in a single operation (#211)
- Fixed inner queryset looping (#204)
Changes in 0.7.10
=================
- Fix UnicodeEncodeError for dbref (#278)
- Allow construction using positional parameters (#268)
- Updated EmailField length to support long domains (#243)
- Added 64-bit integer support (#251)
- Added Django sessions TTL support (#224)
- Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240)
- Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242)
- Added "id" back to _data dictionary (#255)
- Only mark a field as changed if the value has changed (#258)
- Explicitly check for Document instances when dereferencing (#261)
- Fixed order_by chaining issue (#265)
- Added dereference support for tuples (#250)
- Resolve field name to db field name when using distinct(#260, #264, #269)
- Added kwargs to doc.save to help interop with django (#223, #270)
- Fixed cloning querysets in PY3
- Int fields no longer unset in save when changed to 0 (#272)
- Fixed ReferenceField query chaining bug fixed (#254)
Changes in 0.7.9
================
- Better fix handling for old style _types
- Embedded SequenceFields follow collection naming convention
Changes in 0.7.8
================
- Fix sequence fields in embedded documents (#166)
- Fix query chaining with .order_by() (#176)
- Added optional encoding and collection config for Django sessions (#180, #181, #183)
- Fixed EmailField so can add extra validation (#173, #174, #187)
- Fixed bulk inserts can now handle custom pk's (#192)
- Added as_pymongo method to return raw or cast results from pymongo (#193)
Changes in 0.7.7
================
- Fix handling for old style _types
Changes in 0.7.6
================
- Unicode fix for repr (#133)
- Allow updates with match operators (#144)
- Updated URLField - now can have a override the regex (#136)
- Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573)
- Fixed reload issue with ReferenceField where dbref=False (#138)
Changes in 0.7.5
================
- ReferenceFields with dbref=False use ObjectId instead of strings (#134)
See ticket for upgrade notes (#134)
Changes in 0.7.4
================
- Fixed index inheritance issues - firmed up testcases (#123) (#125)
Changes in 0.7.3
================
- Reverted EmbeddedDocuments meta handling - now can turn off inheritance (#119)
Changes in 0.7.2
================
- Update index spec generation so its not destructive (#113)
Changes in 0.7.1
================
- Fixed index spec inheritance (#111)
Changes in 0.7.0
================
- Updated queryset.delete so you can use with skip / limit (#107)
- Updated index creation allows kwargs to be passed through refs (#104)
- Fixed Q object merge edge case (#109)
- Fixed reloading on sharded documents (hmarr/mongoengine#569)
- Added NotUniqueError for duplicate keys (#62)
- Added custom collection / sequence naming for SequenceFields (#92)
- Fixed UnboundLocalError in composite index with pk field (#88)
- Updated ReferenceField's to optionally store ObjectId strings
this will become the default in 0.8 (#89)
- Added FutureWarning - save will default to `cascade=False` in 0.8
- Added example of indexing embedded document fields (#75)
- Fixed ImageField resizing when forcing size (#80)
- Add flexibility for fields handling bad data (#78)
- Embedded Documents no longer handle meta definitions
- Use weakref proxies in base lists / dicts (#74)
- Improved queryset filtering (hmarr/mongoengine#554)
- Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561)
- Fixed abstract classes and shard keys (#64)
- Fixed Python 2.5 support
- Added Python 3 support (thanks to Laine Heron)
Changes in 0.6.20
=================
- Added support for distinct and db_alias (#59)
- Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
- Fixed BinaryField lookup re (#48)
Changes in 0.6.19
=================
- Added Binary support to UUID (#47)
- Fixed MapField lookup for fields without declared lookups (#46)
- Fixed BinaryField python value issue (#48)
- Fixed SequenceField non numeric value lookup (#41)
- Fixed queryset manager issue (#52)
- Fixed FileField comparision (hmarr/mongoengine#547)
Changes in 0.6.18
=================
- Fixed recursion loading bug in _get_changed_fields
Changes in 0.6.17
=================
- Fixed issue with custom queryset manager expecting explict variable names
Changes in 0.6.16
=================
- Fixed issue where db_alias wasn't inherited
Changes in 0.6.15
=================
- Updated validation error messages
- Added support for null / zero / false values in item_frequencies
- Fixed cascade save edge case
- Fixed geo index creation through reference fields
- Added support for args / kwargs when using @queryset_manager
- Deref list custom id fix
Changes in 0.6.14
=================
- Fixed error dict with nested validation
- Fixed Int/Float fields and not equals None
- Exclude tests from installation
- Allow tuples for index meta
- Fixed use of str in instance checks
- Fixed unicode support in transform update
- Added support for add_to_set and each
Changes in 0.6.13
=================
- Fixed EmbeddedDocument db_field validation issue
- Fixed StringField unicode issue
- Fixes __repr__ modifying the cursor
Changes in 0.6.12
=================
- Fixes scalar lookups for primary_key
- Fixes error with _delta handling DBRefs
Changes in 0.6.11
=================
- Fixed inconsistency handling None values field attrs
- Fixed map_field embedded db_field issue
- Fixed .save() _delta issue with DbRefs
- Fixed Django TestCase
- Added cmp to Embedded Document
- Added PULL reverse_delete_rule
- Fixed CASCADE delete bug
- Fixed db_field data load error
- Fixed recursive save with FileField
Changes in 0.6.10
=================
- Fixed basedict / baselist to return super(..)
- Promoted BaseDynamicField to DynamicField
Changes in 0.6.9
================
- Fixed sparse indexes on inherited docs
- Removed FileField auto deletion, needs more work maybe 0.7
Changes in 0.6.8
================
- Fixed FileField losing reference when no default set
- Removed possible race condition from FileField (grid_file)
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
- Added support for pull operations on nested EmbeddedDocuments
- Added support for choices with GenericReferenceFields
- Added support for choices with GenericEmbeddedDocumentFields
- Fixed Django 1.4 sessions first save data loss
- FileField now automatically delete files on .delete()
- Fix for GenericReference to_mongo method
- Fixed connection regression
- Updated Django User document, now allows inheritance
Changes in 0.6.7
================
- Fixed indexing on '_id' or 'pk' or 'id'
- Invalid data from the DB now raises a InvalidDocumentError
- Cleaned up the Validation Error - docs and code
- Added meta `auto_create_index` so you can disable index creation
- Added write concern options to inserts
- Fixed typo in meta for index options
- Bug fix Read preference now passed correctly
- Added support for File like objects for GridFS
- Fix for #473 - Dereferencing abstracts
Changes in 0.6.6
================
- Django 1.4 fixed (finally)
- Added tests for Django
Changes in 0.6.5
================
- More Django updates
Changes in 0.6.4
================
- Refactored connection / fixed replicasetconnection
- Bug fix for unknown connection alias error message
- Sessions support Django 1.3 and Django 1.4
- Minor fix for ReferenceField
Changes in 0.6.3
================
- Updated sessions for Django 1.4
- Bug fix for updates where listfields contain embedded documents
- Bug fix for collection naming and mixins
Changes in 0.6.2
================
- Updated documentation for ReplicaSet connections
- Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems.
Changes in 0.6.1
================
- Fix for replicaSet connections
Changes in 0.6
==============
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
- Added support for covered indexes when inheritance is off
- No longer always upsert on save for items with a '_id'
- Error raised if update doesn't have an operation
- DeReferencing is now thread safe
- Errors raised if trying to perform a join in a query
- Updates can now take __raw__ queries
- Added custom 2D index declarations
- Added replicaSet connection support
- Updated deprecated imports from pymongo (safe for pymongo 2.2)
- Added uri support for connections
- Added scalar for efficiently returning partial data values (aliased to values_list)
- Fixed limit skip bug
- Improved Inheritance / Mixin
- Added sharding support
- Added pymongo 2.1 support
- Fixed Abstract documents can now declare indexes
- Added db_alias support to individual documents
- Fixed GridFS documents can now be pickled
- Added Now raises an InvalidDocumentError when declaring multiple fields with the same db_field
- Added InvalidQueryError when calling with_id with a filter
- Added support for DBRefs in distinct()
- Fixed issue saving False booleans
- Fixed issue with dynamic documents deltas
- Added Reverse Delete Rule support to ListFields - MapFields aren't supported
- Added customisable cascade kwarg options
- Fixed Handle None values for non-required fields
- Removed Document._get_subclasses() - no longer required
- Fixed bug requiring subclasses when not actually needed
- Fixed deletion of dynamic data
- Added support for the $elementMatch operator
- Added reverse option to SortedListFields
- Fixed dereferencing - multi directional list dereferencing
- Fixed issue creating indexes with recursive embedded documents
- Fixed recursive lookup in _unique_with_indexes
- Fixed passing ComplexField defaults to constructor for ReferenceFields
- Fixed validation of DictField Int keys
- Added optional cascade saving
- Fixed dereferencing - max_depth now taken into account
- Fixed document mutation saving issue
- Fixed positional operator when replacing embedded documents
- Added Non-Django Style choices back (you can have either)
- Fixed __repr__ of a sliced queryset
- Added recursive validation error of documents / complex fields
- Fixed breaking during queryset iteration
- Added pre and post bulk-insert signals
- Added ImageField - requires PIL
- Fixed Reference Fields can be None in get_or_create / queries
- Fixed accessing pk on an embedded document
- Fixed calling a queryset after drop_collection now recreates the collection
- Add field name to validation exception messages
- Added UUID field
- Improved efficiency of .get()
- Updated ComplexFields so if required they won't accept empty lists / dicts
- Added spec file for rpm-based distributions
- Fixed ListField so it doesnt accept strings
- Added DynamicDocument and EmbeddedDynamicDocument classes for expando schemas
Changes in v0.5.2
=================
- A Robust Circular reference bugfix
Changes in v0.5.1
=================
- Fixed simple circular reference bug
Changes in v0.5
===============
- Added InvalidDocumentError - so Document core methods can't be overwritten
- Added GenericEmbeddedDocument - so you can embed any type of embeddable document
- Added within_polygon support - for those with mongodb 1.9
- Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments
- Added where() - filter to allowing users to specify query expressions as Javascript
- Added SequenceField - for creating sequential counters
- Added update() convenience method to a document
- Added cascading saves - so changes to Referenced documents are saved on .save()
- Added select_related() support
- Added support for the positional operator
- Updated geo index checking to be recursive and check in embedded documents
- Updated default collection naming convention
- Added Document Mixin support
- Fixed queryet __repr__ mid iteration
- Added hint() support, so can tell Mongo the proper index to use for the query
- Fixed issue with inconsistent setting of _cls breaking inherited referencing
- Added help_text and verbose_name to fields to help with some form libs
- Updated item_frequencies to handle embedded document lookups
- Added delta tracking now only sets / unsets explicitly changed fields
- Fixed saving so sets updated values rather than overwrites
- Added ComplexDateTimeField - Handles datetimes correctly with microseconds
- Added ComplexBaseField - for improved flexibility and performance
- Added get_FIELD_display() method for easy choice field displaying
- Added queryset.slave_okay(enabled) method
- Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable
- Added insert method for bulk inserts
- Added blinker signal support
- Added query_counter context manager for tests
- Added map_reduce method item_frequencies and set as default (as db.eval doesn't work in sharded environments)
- Added inline_map_reduce option to map_reduce
- Updated connection exception so it provides more info on the cause.
- Added searching multiple levels deep in ``DictField``
- Added ``DictField`` entries containing strings to use matching operators
- Added ``MapField``, similar to ``DictField``
- Added Abstract Base Classes
- Added Custom Objects Managers
- Added sliced subfields updating
- Added ``NotRegistered`` exception if dereferencing ``Document`` not in the registry
- Added a write concern for ``save``, ``update``, ``update_one`` and ``get_or_create``
- Added slicing / subarray fetching controls
- Fixed various unique index and other index issues
- Fixed threaded connection issues
- Added spherical geospatial query operators
- Updated queryset to handle latest version of pymongo
map_reduce now requires an output.
- Added ``Document`` __hash__, __ne__ for pickling
- Added ``FileField`` optional size arg for read method
- Fixed ``FileField`` seek and tell methods for reading files
- Added ``QuerySet.clone`` to support copying querysets
- Fixed item_frequencies when using name thats the same as a native js function
- Added reverse delete rules
- Fixed issue with unset operation
- Fixed Q-object bug
- Added ``QuerySet.all_fields`` resets previous .only() and .exclude()
- Added ``QuerySet.exclude``
- Added django style choices
- Fixed order and filter issue
- Added ``QuerySet.only`` subfield support
- Added creation_counter to ``BaseField`` allowing fields to be sorted in the
way the user has specified them
- Fixed various errors
- Added many tests
Changes in v0.4
===============
- Added ``GridFSStorage`` Django storage backend
@@ -805,7 +32,7 @@ Changes in v0.3
===============
- Added MapReduce support
- Added ``contains``, ``startswith`` and ``endswith`` query operators (and
case-insensitive versions that are prefixed with 'i')
case-insensitive versions that are prefixed with 'i')
- Deprecated fields' ``name`` parameter, replaced with ``db_field``
- Added ``QuerySet.only`` for only retrieving specific fields
- Added ``QuerySet.in_bulk()`` for bulk querying using ids
@@ -852,7 +79,7 @@ Changes in v0.2
===============
- Added ``Q`` class for building advanced queries
- Added ``QuerySet`` methods for atomic updates to documents
- Fields may now specify ``unique=True`` to enforce uniqueness across a
- Fields may now specify ``unique=True`` to enforce uniqueness across a
collection
- Added option for default document ordering
- Fixed bug in index definitions
@@ -860,7 +87,7 @@ Changes in v0.2
Changes in v0.1.3
=================
- Added Django authentication backend
- Added ``Document.meta`` support for indexes, which are ensured just before
- Added ``Document.meta`` support for indexes, which are ensured just before
querying takes place
- A few minor bugfixes

View File

@@ -17,10 +17,6 @@ class Post(Document):
tags = ListField(StringField(max_length=30))
comments = ListField(EmbeddedDocumentField(Comment))
# bugfix
meta = {'allow_inheritance': True}
class TextPost(Post):
content = StringField()
@@ -49,8 +45,7 @@ print 'ALL POSTS'
print
for post in Post.objects:
print post.title
#print '=' * post.title.count()
print "=" * 20
print '=' * len(post.title)
if isinstance(post, TextPost):
print post.content

View File

@@ -13,14 +13,10 @@
import sys, os
import sphinx_rtd_theme
import mongoengine
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.append(os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
@@ -42,12 +38,13 @@ master_doc = 'index'
# General information about the project.
project = u'MongoEngine'
copyright = u'2009, MongoEngine Authors'
copyright = u'2009-2010, Harry Marr'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
import mongoengine
# The short X.Y version.
version = mongoengine.get_version()
# The full version, including alpha/beta/rc tags.
@@ -95,17 +92,15 @@ pygments_style = 'sphinx'
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'sphinx_rtd_theme'
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'canonical_url': 'http://docs.mongoengine.org/en/latest/'
}
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
@@ -121,12 +116,12 @@ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "favicon.ico"
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
@@ -137,11 +132,7 @@ html_favicon = "favicon.ico"
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'index': ['globaltoc.html', 'searchbox.html'],
'**': ['localtoc.html', 'relations.html', 'searchbox.html']
}
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
@@ -182,8 +173,8 @@ latex_paper_size = 'a4'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'MongoEngine.tex', 'MongoEngine Documentation',
'Ross Lawley', 'manual'),
('index', 'MongoEngine.tex', u'MongoEngine Documentation',
u'Harry Marr', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -202,5 +193,3 @@ latex_documents = [
# If false, no module index is generated.
#latex_use_modindex = True
autoclass_content = 'both'

View File

@@ -1,19 +1,87 @@
=============================
Using MongoEngine with Django
=============================
Connecting
==========
In your **settings.py** file, ignore the standard database settings (unless you
also plan to use the ORM in your project), and instead call
:func:`~mongoengine.connect` somewhere in the settings module.
Authentication
==============
Django Support
==============
MongoEngine includes a Django authentication backend, which uses MongoDB. The
:class:`~mongoengine.django.auth.User` model is a MongoEngine
:class:`~mongoengine.Document`, but implements most of the methods and
attributes that the standard Django :class:`User` model does - so the two are
moderately compatible. Using this backend will allow you to store users in
MongoDB but still use many of the Django authentication infrastucture (such as
the :func:`login_required` decorator and the :func:`authenticate` function). To
enable the MongoEngine auth backend, add the following to you **settings.py**
file::
.. note:: Django support has been split from the main MongoEngine
repository. The *legacy* Django extension may be found bundled with the
0.9 release of MongoEngine.
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
The :mod:`~mongoengine.django.auth` module also contains a
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
.. versionadded:: 0.1.3
Help Wanted!
------------
Sessions
========
Django allows the use of different backend stores for its sessions. MongoEngine
provides a MongoDB-based session backend for Django, which allows you to use
sessions in you Django application with just MongoDB. To enable the MongoEngine
session backend, ensure that your settings module has
``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
``INSTALLED_APPS``. From there, all you need to do is add the following line
into you settings module::
The MongoEngine team is looking for help contributing and maintaining a new
Django extension for MongoEngine! If you have Django experience and would like
to help contribute to the project, please get in touch on the
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
simply contributing on
`GitHub <https://github.com/MongoEngine/django-mongoengine>`_.
SESSION_ENGINE = 'mongoengine.django.sessions'
.. versionadded:: 0.2.1
Storage
=======
With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`,
it is useful to have a Django file storage backend that wraps this. The new
storage module is called :class:`~mongoengine.django.GridFSStorage`. Using it
is very similar to using the default FileSystemStorage.::
fs = mongoengine.django.GridFSStorage()
filename = fs.save('hello.txt', 'Hello, World!')
All of the `Django Storage API methods
<http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been
implemented except :func:`path`. If the filename provided already exists, an
underscore and a number (before # the file extension, if one exists) will be
appended to the filename until the generated filename doesn't exist. The
:func:`save` method will return the new filename.::
>>> fs.exists('hello.txt')
True
>>> fs.open('hello.txt').read()
'Hello, World!'
>>> fs.size('hello.txt')
13
>>> fs.url('hello.txt')
'http://your_media_url/hello.txt'
>>> fs.open('hello.txt').name
'hello.txt'
>>> fs.listdir()
([], [u'hello.txt'])
All files will be saved and retrieved in GridFS via the :class::`FileDocument`
document, allowing easy access to the files without the GridFSStorage
backend.::
>>> from mongoengine.django.storage import FileDocument
>>> FileDocument.objects()
[<FileDocument: FileDocument object>]
.. versionadded:: 0.4

View File

@@ -3,137 +3,18 @@
=====================
Connecting to MongoDB
=====================
To connect to a running instance of :program:`mongod`, use the
:func:`~mongoengine.connect` function. The first argument is the name of the
database to connect to::
database to connect to. If the database does not exist, it will be created. If
the database requires authentication, :attr:`username` and :attr:`password`
arguments may be provided::
from mongoengine import connect
connect('project1')
connect('project1', username='webapp', password='pwd123')
By default, MongoEngine assumes that the :program:`mongod` instance is running
on **localhost** on port **27017**. If MongoDB is running elsewhere, you should
provide the :attr:`host` and :attr:`port` arguments to
on **localhost** on port **27017**. If MongoDB is running elsewhere, you may
provide :attr:`host` and :attr:`port` arguments to
:func:`~mongoengine.connect`::
connect('project1', host='192.168.1.35', port=12345)
If the database requires authentication, :attr:`username` and :attr:`password`
arguments should be provided::
connect('project1', username='webapp', password='pwd123')
URI style connections are also supported -- just supply the URI as
the :attr:`host` to
:func:`~mongoengine.connect`::
connect('project1', host='mongodb://localhost/database_name')
.. note:: Database, username and password from URI string overrides
corresponding parameters in :func:`~mongoengine.connect`: ::
connect(
db='test',
username='user',
password='12345',
host='mongodb://admin:qwerty@localhost/production'
)
will establish connection to ``production`` database using
``admin`` username and ``qwerty`` password.
Replica Sets
============
MongoEngine supports connecting to replica sets::
from mongoengine import connect
# Regular connect
connect('dbname', replicaset='rs-name')
# MongoDB URI-style connect
connect(host='mongodb://localhost/dbname?replicaSet=rs-name')
Read preferences are supported through the connection or via individual
queries by passing the read_preference ::
Bar.objects().read_preference(ReadPreference.PRIMARY)
Bar.objects(read_preference=ReadPreference.PRIMARY)
Multiple Databases
==================
To use multiple databases you can use :func:`~mongoengine.connect` and provide
an `alias` name for the connection - if no `alias` is provided then "default"
is used.
In the background this uses :func:`~mongoengine.register_connection` to
store the data and you can register all aliases up front if required.
Individual documents can also support multiple databases by providing a
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef`
objects to point across databases and collections. Below is an example schema,
using 3 different databases to store data::
class User(Document):
name = StringField()
meta = {'db_alias': 'user-db'}
class Book(Document):
name = StringField()
meta = {'db_alias': 'book-db'}
class AuthorBooks(Document):
author = ReferenceField(User)
book = ReferenceField(Book)
meta = {'db_alias': 'users-books-db'}
Context Managers
================
Sometimes you may want to switch the database or collection to query against.
For example, archiving older data into a separate database for performance
reasons or writing functions that dynamically choose collections to write
a document to.
Switch Database
---------------
The :class:`~mongoengine.context_managers.switch_db` context manager allows
you to change the database alias for a given class allowing quick and easy
access to the same User document across databases::
from mongoengine.context_managers import switch_db
class User(Document):
name = StringField()
meta = {'db_alias': 'user-db'}
with switch_db(User, 'archive-user-db') as User:
User(name='Ross').save() # Saves the 'archive-user-db'
Switch Collection
-----------------
The :class:`~mongoengine.context_managers.switch_collection` context manager
allows you to change the collection for a given class allowing quick and easy
access to the same Group document across collection::
from mongoengine.context_managers import switch_collection
class Group(Document):
name = StringField()
Group(name='test').save() # Saves in the default db
with switch_collection(Group, 'group2000') as Group:
Group(name='hello Group 2000 collection!').save() # Saves in group2000 collection
.. note:: Make sure any aliases have been registered with
:func:`~mongoengine.register_connection` or :func:`~mongoengine.connect`
before using the context manager.

View File

@@ -4,14 +4,14 @@ Defining documents
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
working with relational databases, rows are stored in **tables**, which have a
strict **schema** that the rows follow. MongoDB stores documents in
**collections** rather than tables --- the principal difference is that no schema
is enforced at a database level.
**collections** rather than tables - the principle difference is that no schema
is enforced at a database level.
Defining a document's schema
============================
MongoEngine allows you to define schemata for documents as this helps to reduce
coding errors, and allows for utility methods to be defined on fields which may
be present.
be present.
To define a schema for a document, create a class that inherits from
:class:`~mongoengine.Document`. Fields are specified by adding **field
@@ -19,43 +19,11 @@ objects** as class attributes to the document class::
from mongoengine import *
import datetime
class Page(Document):
title = StringField(max_length=200, required=True)
date_modified = DateTimeField(default=datetime.datetime.now)
As BSON (the binary format for storing data in mongodb) is order dependent,
documents are serialized based on their field order.
Dynamic document schemas
========================
One of the benefits of MongoDB is dynamic schemas for a collection, whilst data
should be planned and organised (after all explicit is better than implicit!)
there are scenarios where having dynamic / expando style documents is desirable.
:class:`~mongoengine.DynamicDocument` documents work in the same way as
:class:`~mongoengine.Document` but any data / attributes set to them will also
be saved ::
from mongoengine import *
class Page(DynamicDocument):
title = StringField(max_length=200, required=True)
# Create a new page and add tags
>>> page = Page(title='Using MongoEngine')
>>> page.tags = ['mongodb', 'mongoengine']
>>> page.save()
>>> Page.objects(tags='mongoengine').count()
>>> 1
.. note::
There is one caveat on Dynamic Documents: fields cannot start with `_`
Dynamic fields are stored in creation order *after* any declared fields.
Fields
======
By default, fields are not required. To make a field mandatory, set the
@@ -63,62 +31,51 @@ By default, fields are not required. To make a field mandatory, set the
validation constraints available (such as :attr:`max_length` in the example
above). Fields may also take default values, which will be used if a value is
not provided. Default values may optionally be a callable, which will be called
to retrieve the value (such as in the above example). The field types available
to retrieve the value (such as in the above example). The field types available
are as follows:
* :class:`~mongoengine.fields.BinaryField`
* :class:`~mongoengine.fields.BooleanField`
* :class:`~mongoengine.fields.ComplexDateTimeField`
* :class:`~mongoengine.fields.DateTimeField`
* :class:`~mongoengine.fields.DecimalField`
* :class:`~mongoengine.fields.DictField`
* :class:`~mongoengine.fields.DynamicField`
* :class:`~mongoengine.fields.EmailField`
* :class:`~mongoengine.fields.EmbeddedDocumentField`
* :class:`~mongoengine.fields.EmbeddedDocumentListField`
* :class:`~mongoengine.fields.FileField`
* :class:`~mongoengine.fields.FloatField`
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
* :class:`~mongoengine.fields.GenericReferenceField`
* :class:`~mongoengine.fields.GeoPointField`
* :class:`~mongoengine.fields.ImageField`
* :class:`~mongoengine.fields.IntField`
* :class:`~mongoengine.fields.ListField`
* :class:`~mongoengine.fields.MapField`
* :class:`~mongoengine.fields.ObjectIdField`
* :class:`~mongoengine.fields.ReferenceField`
* :class:`~mongoengine.fields.SequenceField`
* :class:`~mongoengine.fields.SortedListField`
* :class:`~mongoengine.fields.StringField`
* :class:`~mongoengine.fields.URLField`
* :class:`~mongoengine.fields.UUIDField`
* :class:`~mongoengine.fields.PointField`
* :class:`~mongoengine.fields.LineStringField`
* :class:`~mongoengine.fields.PolygonField`
* :class:`~mongoengine.fields.MultiPointField`
* :class:`~mongoengine.fields.MultiLineStringField`
* :class:`~mongoengine.fields.MultiPolygonField`
* :class:`~mongoengine.StringField`
* :class:`~mongoengine.URLField`
* :class:`~mongoengine.IntField`
* :class:`~mongoengine.FloatField`
* :class:`~mongoengine.DecimalField`
* :class:`~mongoengine.DateTimeField`
* :class:`~mongoengine.ListField`
* :class:`~mongoengine.DictField`
* :class:`~mongoengine.ObjectIdField`
* :class:`~mongoengine.EmbeddedDocumentField`
* :class:`~mongoengine.ReferenceField`
* :class:`~mongoengine.GenericReferenceField`
* :class:`~mongoengine.BooleanField`
* :class:`~mongoengine.FileField`
* :class:`~mongoengine.EmailField`
* :class:`~mongoengine.SortedListField`
* :class:`~mongoengine.BinaryField`
* :class:`~mongoengine.GeoPointField`
Field arguments
---------------
Each field type can be customized by keyword arguments. The following keyword
Each field type can be customized by keyword arguments. The following keyword
arguments can be set on all fields:
:attr:`db_field` (Default: None)
The MongoDB field name.
:attr:`name` (Default: None)
The mongoengine field name.
:attr:`required` (Default: False)
If set to True and the field is not set on the document instance, a
:class:`~mongoengine.ValidationError` will be raised when the document is
:class:`~mongoengine.base.ValidationError` will be raised when the document is
validated.
:attr:`default` (Default: None)
A value to use when no value is set for this field.
The definition of default parameters follow `the general rules on Python
The definion of default parameters follow `the general rules on Python
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
which means that some care should be taken when dealing with default mutable objects
(like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`)::
which means that some care should be taken when dealing with default mutable objects
(like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`)::
class ExampleFirst(Document):
# Default an empty list
@@ -132,8 +89,7 @@ arguments can be set on all fields:
# This can make an .append call to add values to the default (and all the following objects),
# instead to just an object
values = ListField(IntField(), default=[1,2,3])
.. note:: Unsetting a field with a default value will revert back to the default.
:attr:`unique` (Default: False)
When True, no documents in the collection will have the same value for this
@@ -144,47 +100,17 @@ arguments can be set on all fields:
field, will not have two documents in the collection with the same value.
:attr:`primary_key` (Default: False)
When True, use this field as a primary key for the collection. `DictField`
and `EmbeddedDocuments` both support being the primary key for a document.
.. note:: If set, this field is also accessible through the `pk` field.
When True, use this field as a primary key for the collection.
:attr:`choices` (Default: None)
An iterable (e.g. list, tuple or set) of choices to which the value of this
field should be limited.
Can be either be a nested tuples of value (stored in mongo) and a
human readable key ::
SIZE = (('S', 'Small'),
('M', 'Medium'),
('L', 'Large'),
('XL', 'Extra Large'),
('XXL', 'Extra Extra Large'))
class Shirt(Document):
size = StringField(max_length=3, choices=SIZE)
Or a flat iterable just containing values ::
SIZE = ('S', 'M', 'L', 'XL', 'XXL')
class Shirt(Document):
size = StringField(max_length=3, choices=SIZE)
:attr:`**kwargs` (Optional)
You can supply additional metadata as arbitrary additional keyword
arguments. You can not override existing attributes, however. Common
choices include `help_text` and `verbose_name`, commonly used by form and
widget libraries.
An iterable of choices to which the value of this field should be limited.
List fields
-----------
MongoDB allows storing lists of items. To add a list of items to a
:class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field
type. :class:`~mongoengine.fields.ListField` takes another field object as its first
MongoDB allows the storage of lists of items. To add a list of items to a
:class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field
type. :class:`~mongoengine.ListField` takes another field object as its first
argument, which specifies which type elements may be stored within the list::
class Page(Document):
@@ -195,30 +121,30 @@ Embedded documents
MongoDB has the ability to embed documents within other documents. Schemata may
be defined for these embedded documents, just as they may be for regular
documents. To create an embedded document, just define a document as usual, but
inherit from :class:`~mongoengine.EmbeddedDocument` rather than
inherit from :class:`~mongoengine.EmbeddedDocument` rather than
:class:`~mongoengine.Document`::
class Comment(EmbeddedDocument):
content = StringField()
To embed the document within another document, use the
:class:`~mongoengine.fields.EmbeddedDocumentField` field type, providing the embedded
:class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded
document class as the first argument::
class Page(Document):
comments = ListField(EmbeddedDocumentField(Comment))
comment1 = Comment(content='Good work!')
comment2 = Comment(content='Nice article!')
comment1 = Comment('Good work!')
comment2 = Comment('Nice article!')
page = Page(comments=[comment1, comment2])
Dictionary Fields
-----------------
Often, an embedded document may be used instead of a dictionary generally
embedded documents are recommended as dictionaries dont support validation
or custom field types. However, sometimes you will not know the structure of what you want to
store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
Often, an embedded document may be used instead of a dictionary -- generally
this is recommended as dictionaries don't support validation or custom field
types. However, sometimes you will not know the structure of what you want to
store; in this situation a :class:`~mongoengine.DictField` is appropriate::
class SurveyResponse(Document):
date = DateTimeField()
user = ReferenceField(User)
@@ -226,19 +152,16 @@ store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate
survey_response = SurveyResponse(date=datetime.now(), user=request.user)
response_form = ResponseForm(request.POST)
survey_response.answers = response_form.cleaned_data()
survey_response.answers = response_form.cleaned_data()
survey_response.save()
Dictionaries can store complex data, other dictionaries, lists, references to
other objects, so are the most flexible field type available.
Reference fields
----------------
References may be stored to other documents in the database using the
:class:`~mongoengine.fields.ReferenceField`. Pass in another document class as the
:class:`~mongoengine.ReferenceField`. Pass in another document class as the
first argument to the constructor, then simply assign document objects to the
field::
class User(Document):
name = StringField()
@@ -256,9 +179,9 @@ field::
The :class:`User` object is automatically turned into a reference behind the
scenes, and dereferenced when the :class:`Page` object is retrieved.
To add a :class:`~mongoengine.fields.ReferenceField` that references the document
To add a :class:`~mongoengine.ReferenceField` that references the document
being defined, use the string ``'self'`` in place of the document class as the
argument to :class:`~mongoengine.fields.ReferenceField`'s constructor. To reference a
argument to :class:`~mongoengine.ReferenceField`'s constructor. To reference a
document that has not yet been defined, use the name of the undefined document
as the constructor's argument::
@@ -270,110 +193,19 @@ as the constructor's argument::
class ProfilePage(Document):
content = StringField()
.. _one-to-many-with-listfields:
One to Many with ListFields
'''''''''''''''''''''''''''
If you are implementing a one to many relationship via a list of references,
then the references are stored as DBRefs and to query you need to pass an
instance of the object to the query::
class User(Document):
name = StringField()
class Page(Document):
content = StringField()
authors = ListField(ReferenceField(User))
bob = User(name="Bob Jones").save()
john = User(name="John Smith").save()
Page(content="Test Page", authors=[bob, john]).save()
Page(content="Another Page", authors=[john]).save()
# Find all pages Bob authored
Page.objects(authors__in=[bob])
# Find all pages that both Bob and John have authored
Page.objects(authors__all=[bob, john])
# Remove Bob from the authors for a page.
Page.objects(id='...').update_one(pull__authors=bob)
# Add John to the authors for a page.
Page.objects(id='...').update_one(push__authors=john)
Dealing with deletion of referred documents
'''''''''''''''''''''''''''''''''''''''''''
By default, MongoDB doesn't check the integrity of your data, so deleting
documents that other documents still hold references to will lead to consistency
issues. Mongoengine's :class:`ReferenceField` adds some functionality to
safeguard against these kinds of database integrity problems, providing each
reference with a delete rule specification. A delete rule is specified by
supplying the :attr:`reverse_delete_rule` attributes on the
:class:`ReferenceField` definition, like this::
class ProfilePage(Document):
...
employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE)
The declaration in this example means that when an :class:`Employee` object is
removed, the :class:`ProfilePage` that references that employee is removed as
well. If a whole batch of employees is removed, all profile pages that are
linked are removed as well.
Its value can take any of the following constants:
:const:`mongoengine.DO_NOTHING`
This is the default and won't do anything. Deletes are fast, but may cause
database inconsistency or dangling references.
:const:`mongoengine.DENY`
Deletion is denied if there still exist references to the object being
deleted.
:const:`mongoengine.NULLIFY`
Any object's fields still referring to the object being deleted are removed
(using MongoDB's "unset" operation), effectively nullifying the relationship.
:const:`mongoengine.CASCADE`
Any object containing fields that are referring to the object being deleted
are deleted first.
:const:`mongoengine.PULL`
Removes the reference to the object (using MongoDB's "pull" operation)
from any object's fields of
:class:`~mongoengine.fields.ListField` (:class:`~mongoengine.fields.ReferenceField`).
.. warning::
A safety note on setting up these delete rules! Since the delete rules are
not recorded on the database level by MongoDB itself, but instead at runtime,
in-memory, by the MongoEngine module, it is of the upmost importance
that the module that declares the relationship is loaded **BEFORE** the
delete is invoked.
If, for example, the :class:`Employee` object lives in the
:mod:`payroll` app, and the :class:`ProfilePage` in the :mod:`people`
app, it is extremely important that the :mod:`people` app is loaded
before any employee is removed, because otherwise, MongoEngine could
never know this relationship exists.
In Django, be sure to put all apps that have such delete rule declarations in
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
Generic reference fields
''''''''''''''''''''''''
A second kind of reference field also exists,
:class:`~mongoengine.fields.GenericReferenceField`. This allows you to reference any
kind of :class:`~mongoengine.Document`, and hence doesn't take a
:class:`~mongoengine.GenericReferenceField`. This allows you to reference any
kind of :class:`~mongoengine.Document`, and hence doesn't take a
:class:`~mongoengine.Document` subclass as a constructor argument::
class Link(Document):
url = StringField()
class Post(Document):
title = StringField()
class Bookmark(Document):
bookmark_object = GenericReferenceField()
@@ -387,19 +219,18 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a
Bookmark(bookmark_object=post).save()
.. note::
Using :class:`~mongoengine.fields.GenericReferenceField`\ s is slightly less
efficient than the standard :class:`~mongoengine.fields.ReferenceField`\ s, so if
you will only be referencing one document type, prefer the standard
:class:`~mongoengine.fields.ReferenceField`.
Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less
efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if
you will only be referencing one document type, prefer the standard
:class:`~mongoengine.ReferenceField`.
Uniqueness constraints
----------------------
MongoEngine allows you to specify that a field should be unique across a
collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's
collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's
constructor. If you try to save a document that has the same value for a unique
field as a document that is already in the database, a
:class:`~mongoengine.NotUniqueError` will be raised. You may also specify
field as a document that is already in the database, a
:class:`~mongoengine.OperationError` will be raised. You may also specify
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
either a single field name, or a list or tuple of field names::
@@ -410,14 +241,14 @@ either a single field name, or a list or tuple of field names::
Skipping Document validation on save
------------------------------------
You can also skip the whole document validation process by setting
``validate=False`` when calling the :meth:`~mongoengine.document.Document.save`
You can also skip the whole document validation process by setting
``validate=False`` when caling the :meth:`~mongoengine.document.Document.save`
method::
class Recipient(Document):
name = StringField()
email = EmailField()
recipient = Recipient(name='admin', email='root@localhost')
recipient.save() # will raise a ValidationError while
recipient.save(validate=False) # won't
@@ -426,7 +257,7 @@ Document collections
====================
Document classes that inherit **directly** from :class:`~mongoengine.Document`
will have their own **collection** in the database. The name of the collection
is by default the name of the class, converted to lowercase (so in the example
is by default the name of the class, coverted to lowercase (so in the example
above, the collection would be called `page`). If you need to change the name
of the collection (e.g. to use MongoEngine with an existing database), then
create a class dictionary attribute called :attr:`meta` on your document, and
@@ -443,201 +274,35 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
:attr:`max_documents` is the maximum number of documents that is allowed to be
stored in the collection, and :attr:`max_size` is the maximum size of the
collection in bytes. :attr:`max_size` is rounded up to the next multiple of 256
by MongoDB internally and mongoengine before. Use also a multiple of 256 to
avoid confusions. If :attr:`max_size` is not specified and
:attr:`max_documents` is, :attr:`max_size` defaults to 10485760 bytes (10MB).
The following example shows a :class:`Log` document that will be limited to
collection in bytes. If :attr:`max_size` is not specified and
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
The following example shows a :class:`Log` document that will be limited to
1000 entries and 2MB of disk space::
class Log(Document):
ip_address = StringField()
meta = {'max_documents': 1000, 'max_size': 2000000}
.. defining-indexes_
Indexes
=======
You can specify indexes on collections to make querying faster. This is done
by creating a list of index specifications called :attr:`indexes` in the
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
either be a single field name, a tuple containing multiple field names, or a
dictionary containing a full index definition.
A direction may be specified on fields by prefixing the field name with a
**+** (for ascending) or a **-** sign (for descending). Note that direction
only matters on multi-field indexes. Text indexes may be specified by prefixing
the field name with a **$**. Hashed indexes may be specified by prefixing
the field name with a **#**::
either be a single field name, or a tuple containing multiple field names. A
direction may be specified on fields by prefixing the field name with a **+**
or a **-** sign. Note that direction only matters on multi-field indexes. ::
class Page(Document):
category = IntField()
title = StringField()
rating = StringField()
created = DateTimeField()
meta = {
'indexes': [
'title',
'$title', # text index
'#title', # hashed index
('title', '-rating'),
('category', '_cls'),
{
'fields': ['created'],
'expireAfterSeconds': 3600
}
]
'indexes': ['title', ('title', '-rating')]
}
If a dictionary is passed then the following options are available:
:attr:`fields` (Default: None)
The fields to index. Specified in the same format as described above.
:attr:`cls` (Default: True)
If you have polymorphic models that inherit and have
:attr:`allow_inheritance` turned on, you can configure whether the index
should have the :attr:`_cls` field added automatically to the start of the
index.
:attr:`sparse` (Default: False)
Whether the index should be sparse.
:attr:`unique` (Default: False)
Whether the index should be unique.
:attr:`expireAfterSeconds` (Optional)
Allows you to automatically expire data from a collection by setting the
time in seconds to expire the a field.
.. note::
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
Global index default options
----------------------------
There are a few top level defaults for all indexes that can be set::
class Page(Document):
title = StringField()
rating = StringField()
meta = {
'index_options': {},
'index_background': True,
'index_drop_dups': True,
'index_cls': False
}
:attr:`index_options` (Optional)
Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_
:attr:`index_background` (Optional)
Set the default value for if an index should be indexed in the background
:attr:`index_cls` (Optional)
A way to turn off a specific index for _cls.
:attr:`index_drop_dups` (Optional)
Set the default value for if an index should drop duplicates
.. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
and has no effect
Compound Indexes and Indexing sub documents
-------------------------------------------
Compound indexes can be created by adding the Embedded field or dictionary
field name to the index definition.
Sometimes its more efficient to index parts of Embedded / dictionary fields,
in this case use 'dot' notation to identify the value to index eg: `rank.title`
.. _geospatial-indexes:
Geospatial indexes
------------------
The best geo index for mongodb is the new "2dsphere", which has an improved
spherical model and provides better performance and more options when querying.
The following fields will explicitly add a "2dsphere" index:
- :class:`~mongoengine.fields.PointField`
- :class:`~mongoengine.fields.LineStringField`
- :class:`~mongoengine.fields.PolygonField`
- :class:`~mongoengine.fields.MultiPointField`
- :class:`~mongoengine.fields.MultiLineStringField`
- :class:`~mongoengine.fields.MultiPolygonField`
As "2dsphere" indexes can be part of a compound index, you may not want the
automatic index but would prefer a compound index. In this example we turn off
auto indexing and explicitly declare a compound index on ``location`` and ``datetime``::
class Log(Document):
location = PointField(auto_index=False)
datetime = DateTimeField()
meta = {
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
}
Pre MongoDB 2.4 Geo
'''''''''''''''''''
.. note:: For MongoDB < 2.4 this is still current, however the new 2dsphere
index is a big improvement over the previous 2D model - so upgrading is
advised.
Geospatial indexes will be automatically created for all
:class:`~mongoengine.fields.GeoPointField`\ s
It is also possible to explicitly define geospatial indexes. This is
useful if you need to define a geospatial index on a subfield of a
:class:`~mongoengine.fields.DictField` or a custom field that contains a
point. To create a geospatial index you must prefix the field with the
***** sign. ::
class Place(Document):
location = DictField()
meta = {
'indexes': [
'*location.point',
],
}
Time To Live indexes
--------------------
A special index type that allows you to automatically expire data from a
collection after a given period. See the official
`ttl <http://docs.mongodb.org/manual/tutorial/expire-data/#expire-data-from-collections-by-setting-ttl>`_
documentation for more information. A common usecase might be session data::
class Session(Document):
created = DateTimeField(default=datetime.now)
meta = {
'indexes': [
{'fields': ['created'], 'expireAfterSeconds': 3600}
]
}
.. warning:: TTL indexes happen on the MongoDB server and not in the application
code, therefore no signals will be fired on document deletion.
If you need signals to be fired on deletion, then you must handle the
deletion of Documents in your application code.
Comparing Indexes
-----------------
Use :func:`mongoengine.Document.compare_indexes` to compare actual indexes in
the database to those that your document definitions define. This is useful
for maintenance purposes and ensuring you have the correct indexes for your
schema.
Geospatial indexes will be automatically created for all
:class:`~mongoengine.GeoPointField`\ s
Ordering
========
A default ordering can be specified for your
@@ -659,7 +324,7 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
blog_post_1 = BlogPost(title="Blog Post #1")
blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0)
blog_post_2 = BlogPost(title="Blog Post #2")
blog_post_2 = BlogPost(title="Blog Post #2")
blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0)
blog_post_3 = BlogPost(title="Blog Post #3")
@@ -671,98 +336,46 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
# get the "first" BlogPost using default ordering
# from BlogPost.meta.ordering
latest_post = BlogPost.objects.first()
latest_post = BlogPost.objects.first()
assert latest_post.title == "Blog Post #3"
# override default ordering, order BlogPosts by "published_date"
first_post = BlogPost.objects.order_by("+published_date").first()
assert first_post.title == "Blog Post #1"
Shard keys
==========
If your collection is sharded, then you need to specify the shard key as a tuple,
using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`.
This ensures that the shard key is sent with the query when calling the
:meth:`~mongoengine.document.Document.save` or
:meth:`~mongoengine.document.Document.update` method on an existing
:class:`~mongoengine.Document` instance::
class LogEntry(Document):
machine = StringField()
app = StringField()
timestamp = DateTimeField()
data = StringField()
meta = {
'shard_key': ('machine', 'timestamp',)
}
.. _document-inheritance:
Document inheritance
====================
To create a specialised type of a :class:`~mongoengine.Document` you have
defined, you may subclass it and add any extra fields or methods you may need.
As this is new class is not a direct subclass of
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
will use the same collection as its superclass uses. This allows for more
convenient and efficient retrieval of related documents -- all you need do is
set :attr:`allow_inheritance` to True in the :attr:`meta` data for a
document.::
convenient and efficient retrieval of related documents::
# Stored in a collection named 'page'
class Page(Document):
title = StringField(max_length=200, required=True)
meta = {'allow_inheritance': True}
# Also stored in the collection named 'page'
class DatedPage(Page):
date = DateTimeField()
.. note:: From 0.8 onwards :attr:`allow_inheritance` defaults
to False, meaning you must set it to True to use inheritance.
Working with existing data
--------------------------
As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and
easily get working with existing data. Just define the document to match
the expected schema in your database ::
To enable correct retrieval of documents involved in this kind of heirarchy,
two extra attributes are stored on each document in the database: :attr:`_cls`
and :attr:`_types`. These are hidden from the user through the MongoEngine
interface, but may not be present if you are trying to use MongoEngine with
an existing database. For this reason, you may disable this inheritance
mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling
you to work with existing databases. To disable inheritance on a document
class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
dictionary::
# Will work with data in an existing collection named 'cmsPage'
class Page(Document):
title = StringField(max_length=200, required=True)
meta = {
'collection': 'cmsPage'
'collection': 'cmsPage',
'allow_inheritance': False,
}
If you have wildly varying schemas then using a
:class:`~mongoengine.DynamicDocument` might be more appropriate, instead of
defining all possible field types.
If you use :class:`~mongoengine.Document` and the database contains data that
isn't defined then that data will be stored in the `document._data` dictionary.
Abstract classes
================
If you want to add some extra functionality to a group of Document classes but
you don't need or want the overhead of inheritance you can use the
:attr:`abstract` attribute of :attr:`~mongoengine.Document.meta`.
This won't turn on :ref:`document-inheritance` but will allow you to keep your
code DRY::
class BaseDocument(Document):
meta = {
'abstract': True,
}
def check_permissions(self):
...
class User(BaseDocument):
...
Now the User class will have access to the inherited `check_permissions` method
and won't store any of the extra `_cls` information.

View File

@@ -2,14 +2,14 @@
Documents instances
===================
To create a new document object, create an instance of the relevant document
class, providing values for its fields as constructor keyword arguments.
class, providing values for its fields as its constructor keyword arguments.
You may provide values for any of the fields on the document::
>>> page = Page(title="Test Page")
>>> page.title
'Test Page'
You may also assign values to the document's fields using standard object
You may also assign values to the document's fields using standard object
attribute syntax::
>>> page.title = "Example Page"
@@ -18,72 +18,18 @@ attribute syntax::
Saving and deleting documents
=============================
MongoEngine tracks changes to documents to provide efficient saving. To save
the document to the database, call the :meth:`~mongoengine.Document.save` method.
If the document does not exist in the database, it will be created. If it does
already exist, then any changes will be updated atomically. For example::
To save the document to the database, call the
:meth:`~mongoengine.Document.save` method. If the document does not exist in
the database, it will be created. If it does already exist, it will be
updated.
>>> page = Page(title="Test Page")
>>> page.save() # Performs an insert
>>> page.title = "My Page"
>>> page.save() # Performs an atomic set on the title field.
.. note::
Changes to documents are tracked and on the whole perform ``set`` operations.
* ``list_field.push(0)`` --- *sets* the resulting list
* ``del(list_field)`` --- *unsets* whole list
With lists its preferable to use ``Doc.update(push__list_field=0)`` as
this stops the whole list being updated --- stopping any race conditions.
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
Note that this will only work if the document exists in the database and has a
valide :attr:`id`.
.. seealso::
:ref:`guide-atomic-updates`
Pre save data validation and cleaning
-------------------------------------
MongoEngine allows you to create custom cleaning rules for your documents when
calling :meth:`~mongoengine.Document.save`. By providing a custom
:meth:`~mongoengine.Document.clean` method you can do any pre validation / data
cleaning.
This might be useful if you want to ensure a default value based on other
document values for example::
class Essay(Document):
status = StringField(choices=('Published', 'Draft'), required=True)
pub_date = DateTimeField()
def clean(self):
"""Ensures that only published essays have a `pub_date` and
automatically sets the pub_date if published and not set"""
if self.status == 'Draft' and self.pub_date is not None:
msg = 'Draft entries should not have a publication date.'
raise ValidationError(msg)
# Set the pub_date for published items if not set.
if self.status == 'Published' and self.pub_date is None:
self.pub_date = datetime.now()
.. note::
Cleaning is only called if validation is turned on and when calling
:meth:`~mongoengine.Document.save`.
Cascading Saves
---------------
If your document contains :class:`~mongoengine.fields.ReferenceField` or
:class:`~mongoengine.fields.GenericReferenceField` objects, then by default the
:meth:`~mongoengine.Document.save` method will not save any changes to
those objects. If you want all references to be saved also, noting each
save is a separate query, then passing :attr:`cascade` as True
to the save method will cascade any saves.
Deleting documents
------------------
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
Note that this will only work if the document exists in the database and has a
valid :attr:`id`.
Document IDs
============
Each document in the database has a unique id. This may be accessed through the
@@ -113,16 +59,14 @@ you may still use :attr:`id` to access the primary key if you want::
>>> bob.id == bob.email == 'bob@example.com'
True
You can also access the document's "primary key" using the :attr:`pk` field,
it's an alias to :attr:`id`::
You can also access the document's "primary key" using the :attr:`pk` field; in
is an alias to :attr:`id`::
>>> page = Page(title="Another Test Page")
>>> page.save()
>>> page.id == page.pk
True
.. note::
If you define your own primary key field, the field implicitly becomes
required, so a :class:`~mongoengine.ValidationError` will be thrown if
you don't provide it.
required, so a :class:`ValidationError` will be thrown if you don't provide
it.

View File

@@ -7,7 +7,7 @@ GridFS
Writing
-------
GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field
GridFS support comes in the form of the :class:`~mongoengine.FileField` field
object. This field acts as a file-like object and provides a couple of
different ways of inserting and retrieving data. Arbitrary metadata such as
content type can also be stored alongside the files. In the following example,
@@ -18,16 +18,26 @@ a document is created to store details about animals, including a photo::
family = StringField()
photo = FileField()
marmot = Animal(genus='Marmota', family='Sciuridae')
marmot = Animal('Marmota', 'Sciuridae')
marmot_photo = open('marmot.jpg', 'r') # Retrieve a photo from disk
marmot.photo = marmot_photo # Store photo in the document
marmot.photo.content_type = 'image/jpeg' # Store metadata
marmot.save()
Another way of writing to a :class:`~mongoengine.FileField` is to use the
:func:`put` method. This allows for metadata to be stored in the same call as
the file::
marmot.photo.put(marmot_photo, content_type='image/jpeg')
marmot_photo = open('marmot.jpg', 'rb')
marmot.photo.put(marmot_photo, content_type = 'image/jpeg')
marmot.save()
Retrieval
---------
So using the :class:`~mongoengine.fields.FileField` is just like using any other
So using the :class:`~mongoengine.FileField` is just like using any other
field. The file can also be retrieved just as easily::
marmot = Animal.objects(genus='Marmota').first()
@@ -37,7 +47,7 @@ field. The file can also be retrieved just as easily::
Streaming
---------
Streaming data into a :class:`~mongoengine.fields.FileField` is achieved in a
Streaming data into a :class:`~mongoengine.FileField` is achieved in a
slightly different manner. First, a new file must be created by calling the
:func:`new_file` method. Data can then be written using :func:`write`::
@@ -46,7 +56,7 @@ slightly different manner. First, a new file must be created by calling the
marmot.photo.write('some_more_image_data')
marmot.photo.close()
marmot.save()
marmot.photo.save()
Deletion
--------
@@ -55,8 +65,7 @@ Deleting stored files is achieved with the :func:`delete` method::
marmot.photo.delete()
.. warning::
.. note::
The FileField in a Document actually only stores the ID of a file in a
separate GridFS collection. This means that deleting a document
with a defined FileField does not actually delete the file. You must be
@@ -70,5 +79,5 @@ Replacing files
Files can be replaced with the :func:`replace` method. This works just like
the :func:`put` method so even metadata can (and should) be replaced::
another_marmot = open('another_marmot.png', 'rb')
another_marmot = open('another_marmot.png', 'r')
marmot.photo.replace(another_marmot, content_type='image/png')

View File

@@ -11,6 +11,3 @@ User Guide
document-instances
querying
gridfs
signals
text-indexes
mongomock

View File

@@ -1,31 +1,31 @@
======================
Installing MongoEngine
======================
To use MongoEngine, you will need to download `MongoDB <http://mongodb.com/>`_
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
and ensure it is running in an accessible location. You will also need
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
install MongoEngine using setuptools, then the dependencies will be handled for
you.
MongoEngine is available on PyPI, so you can use :program:`pip`:
MongoEngine is available on PyPI, so to use it you can use
:program:`easy_install`:
.. code-block:: console
$ pip install mongoengine
# easy_install mongoengine
Alternatively, if you don't have setuptools installed, `download it from PyPi
Alternatively, if you don't have setuptools installed, `download it from PyPi
<http://pypi.python.org/pypi/mongoengine/>`_ and run
.. code-block:: console
$ python setup.py install
# python setup.py install
To use the bleeding-edge version of MongoEngine, you can get the source from
`GitHub <http://github.com/mongoengine/mongoengine/>`_ and install it as above:
`GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above:
.. code-block:: console
$ git clone git://github.com/mongoengine/mongoengine
$ cd mongoengine
$ python setup.py install
# git clone git://github.com/hmarr/mongoengine
# cd mongoengine
# python setup.py install

View File

@@ -1,21 +0,0 @@
==============================
Use mongomock for testing
==============================
`mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just
what the name implies, mocking a mongo database.
To use with mongoengine, simply specify mongomock when connecting with
mongoengine:
.. code-block:: python
connect('mongoenginetest', host='mongomock://localhost')
conn = get_connection()
or with an alias:
.. code-block:: python
connect('mongoenginetest', host='mongomock://localhost', alias='testdb')
conn = get_connection('testdb')

View File

@@ -5,8 +5,8 @@ Querying the database
is used for accessing the objects in the database associated with the class.
The :attr:`objects` attribute is actually a
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
:class:`~mongoengine.queryset.QuerySet` object on access. The
:class:`~mongoengine.queryset.QuerySet` object may be iterated over to
a new :class:`~mongoengine.queryset.QuerySet` object on access. The
:class:`~mongoengine.queryset.QuerySet` object may may be iterated over to
fetch documents from the database::
# Prints out the names of all the users in the database
@@ -14,16 +14,16 @@ fetch documents from the database::
print user.name
.. note::
As of MongoEngine 0.8 the querysets utilise a local cache. So iterating
it multiple times will only cause a single query. If this is not the
desired behaviour you can call :class:`~mongoengine.QuerySet.no_cache`
(version **0.8.3+**) to return a non-caching queryset.
Once the iteration finishes (when :class:`StopIteration` is raised),
:meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the
:class:`~mongoengine.queryset.QuerySet` may be iterated over again. The
results of the first iteration are *not* cached, so the database will be hit
each time the :class:`~mongoengine.queryset.QuerySet` is iterated over.
Filtering queries
=================
The query may be filtered by calling the
:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword
:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword
arguments. The keys in the keyword arguments correspond to fields on the
:class:`~mongoengine.Document` you are querying::
@@ -39,18 +39,33 @@ syntax::
# been written by a user whose 'country' field is set to 'uk'
uk_pages = Page.objects(author__country='uk')
.. note::
Querying lists
--------------
On most fields, this syntax will look up documents where the field specified
matches the given value exactly, but when the field refers to a
:class:`~mongoengine.ListField`, a single item may be provided, in which case
lists that contain that item will be matched::
(version **0.9.1+**) if your field name is like mongodb operator name (for example
type, lte, lt...) and you want to place it at the end of lookup keyword
mongoengine automatically prepend $ to it. To avoid this use __ at the end of
your lookup keyword. For example if your field name is ``type`` and you want to
query by this field you must use ``.objects(user__type__="admin")`` instead of
``.objects(user__type="admin")``
class Page(Document):
tags = ListField(StringField())
# This will match all pages that have the word 'coding' as an item in the
# 'tags' list
Page.objects(tags='coding')
Raw queries
-----------
It is possible to provide a raw PyMongo query as a query parameter, which will
be integrated directly into the query. This is done using the ``__raw__``
keyword argument::
Page.objects(__raw__={'tags': 'coding'})
.. versionadded:: 0.4
Query operators
===============
Operators other than equality may also be used in queries --- just attach the
Operators other than equality may also be used in queries; just attach the
operator name to a key with a double-underscore::
# Only find users whose age is 18 or less
@@ -69,12 +84,9 @@ Available operators are as follows:
* ``nin`` -- value is not in list (a list of values should be provided)
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
* ``all`` -- every item in list of values provided is in array
* ``size`` -- the size of the array is
* ``size`` -- the size of the array is
* ``exists`` -- value for field exists
String queries
--------------
The following operators are available as shortcuts to querying with regular
expressions:
@@ -86,150 +98,38 @@ expressions:
* ``istartswith`` -- string field starts with value (case insensitive)
* ``endswith`` -- string field ends with value
* ``iendswith`` -- string field ends with value (case insensitive)
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
.. versionadded:: 0.3
Geo queries
-----------
There are a few special operators for performing geographical queries.
The following were added in MongoEngine 0.8 for
:class:`~mongoengine.fields.PointField`,
:class:`~mongoengine.fields.LineStringField` and
:class:`~mongoengine.fields.PolygonField`:
* ``geo_within`` -- check if a geometry is within a polygon. For ease of use
it accepts either a geojson geometry or just the polygon coordinates eg::
loc.objects(point__geo_within=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
loc.objects(point__geo_within={"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
* ``geo_within_box`` -- simplified geo_within searching with a box eg::
loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)])
loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>])
* ``geo_within_polygon`` -- simplified geo_within searching within a simple polygon eg::
loc.objects(point__geo_within_polygon=[[40, 5], [40, 6], [41, 6], [40, 5]])
loc.objects(point__geo_within_polygon=[ [ <x1> , <y1> ] ,
[ <x2> , <y2> ] ,
[ <x3> , <y3> ] ])
* ``geo_within_center`` -- simplified geo_within the flat circle radius of a point eg::
loc.objects(point__geo_within_center=[(-125.0, 35.0), 1])
loc.objects(point__geo_within_center=[ [ <x>, <y> ] , <radius> ])
* ``geo_within_sphere`` -- simplified geo_within the spherical circle radius of a point eg::
loc.objects(point__geo_within_sphere=[(-125.0, 35.0), 1])
loc.objects(point__geo_within_sphere=[ [ <x>, <y> ] , <radius> ])
* ``geo_intersects`` -- selects all locations that intersect with a geometry eg::
# Inferred from provided points lists:
loc.objects(poly__geo_intersects=[40, 6])
loc.objects(poly__geo_intersects=[[40, 5], [40, 6]])
loc.objects(poly__geo_intersects=[[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]])
# With geoJson style objects
loc.objects(poly__geo_intersects={"type": "Point", "coordinates": [40, 6]})
loc.objects(poly__geo_intersects={"type": "LineString",
"coordinates": [[40, 5], [40, 6]]})
loc.objects(poly__geo_intersects={"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]})
* ``near`` -- find all the locations near a given point::
loc.objects(point__near=[40, 5])
loc.objects(point__near={"type": "Point", "coordinates": [40, 5]})
You can also set the maximum and/or the minimum distance in meters as well::
loc.objects(point__near=[40, 5], point__max_distance=1000)
loc.objects(point__near=[40, 5], point__min_distance=100)
The older 2D indexes are still supported with the
:class:`~mongoengine.fields.GeoPointField`:
There are a few special operators for performing geographical queries, that
may used with :class:`~mongoengine.GeoPointField`\ s:
* ``within_distance`` -- provide a list containing a point and a maximum
distance (e.g. [(41.342, -87.653), 5])
* ``within_spherical_distance`` -- same as above but using the spherical geo model
(e.g. [(41.342, -87.653), 5/earth_radius])
* ``near`` -- order the documents by how close they are to a given point
* ``near_sphere`` -- Same as above but using the spherical geo model
* ``within_box`` -- filter documents to those within a given bounding box (e.g.
[(35.0, -125.0), (40.0, -100.0)])
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
* ``near`` -- order the documents by how close they are to a given point
.. note:: Requires Mongo Server 2.0
* ``max_distance`` -- can be added to your location queries to set a maximum
distance.
* ``min_distance`` -- can be added to your location queries to set a minimum
distance.
Querying lists
--------------
On most fields, this syntax will look up documents where the field specified
matches the given value exactly, but when the field refers to a
:class:`~mongoengine.fields.ListField`, a single item may be provided, in which case
lists that contain that item will be matched::
class Page(Document):
tags = ListField(StringField())
# This will match all pages that have the word 'coding' as an item in the
# 'tags' list
Page.objects(tags='coding')
.. versionadded:: 0.4
Querying by position
====================
It is possible to query by position in a list by using a numerical value as a
query operator. So if you wanted to find all pages whose first tag was ``db``,
you could use the following query::
Page.objects(tags__0='db')
If you only want to fetch part of a list eg: you want to paginate a list, then
the `slice` operator is required::
# comments - skip 5, limit 10
Page.objects.fields(slice__comments=[5, 10])
For updating documents, if you don't know the position in a list, you can use
the $ positional operator ::
Post.objects(comments__by="joe").update(**{'inc__comments__$__votes': 1})
However, this doesn't map well to the syntax so you can also use a capital S instead ::
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
.. note::
Due to :program:`Mongo`, currently the $ operator only applies to the
first matched item in the query.
Raw queries
-----------
It is possible to provide a raw :mod:`PyMongo` query as a query parameter, which will
be integrated directly into the query. This is done using the ``__raw__``
keyword argument::
Page.objects(__raw__={'tags': 'coding'})
BlogPost.objects(tags__0='db')
.. versionadded:: 0.4
Limiting and skipping results
=============================
Just as with traditional ORMs, you may limit the number of results returned or
Just as with traditional ORMs, you may limit the number of results returned, or
skip a number or results in you query.
:meth:`~mongoengine.queryset.QuerySet.limit` and
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
:class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax
is preferred for achieving this::
:class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
achieving this is using array-slicing syntax::
# Only the first 5 people
users = User.objects[:5]
@@ -237,7 +137,7 @@ is preferred for achieving this::
# All except for the first 5 people
users = User.objects[5:]
# 5 users, starting from the 11th user found
# 5 users, starting from the 10th user found
users = User.objects[10:15]
You may also index the query to retrieve a single result. If an item at that
@@ -259,21 +159,25 @@ Retrieving unique results
-------------------------
To retrieve a result that should be unique in the collection, use
:meth:`~mongoengine.queryset.QuerySet.get`. This will raise
:class:`~mongoengine.queryset.DoesNotExist` if
no document matches the query, and
:class:`~mongoengine.queryset.MultipleObjectsReturned`
if more than one document matched the query. These exceptions are merged into
your document definitions eg: `MyDoc.DoesNotExist`
:class:`~mongoengine.queryset.DoesNotExist` if no document matches the query,
and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one
document matched the query.
A variation of this method, get_or_create() existed, but it was unsafe. It
could not be made safe, because there are no transactions in mongoDB. Other
approaches should be investigated, to ensure you don't accidentally duplicate
data when using something similar to this method. Therefore it was deprecated
in 0.8 and removed in 0.10.
A variation of this method exists,
:meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new
document with the query arguments if no documents match the query. An
additional keyword argument, :attr:`defaults` may be provided, which will be
used as default values for the new document, in the case that it should need
to be created::
>>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30})
>>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40})
>>> a.name == b.name and a.age == b.age
True
Default Document queries
========================
By default, the objects :attr:`~Document.objects` attribute on a
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter
the collection -- it returns all objects. This may be changed by defining a
method on a document that modifies a queryset. The method should accept two
@@ -308,7 +212,7 @@ custom manager methods as you like::
BlogPost(title='test1', published=False).save()
BlogPost(title='test2', published=True).save()
assert len(BlogPost.objects) == 2
assert len(BlogPost.live_posts()) == 1
assert len(BlogPost.live_posts) == 1
Custom QuerySets
================
@@ -316,19 +220,14 @@ Should you want to add custom methods for interacting with or filtering
documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be
the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on
a document, set ``queryset_class`` to the custom class in a
:class:`~mongoengine.Document`'s ``meta`` dictionary::
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
class AwesomerQuerySet(QuerySet):
def get_awesome(self):
return self.filter(awesome=True)
pass
class Page(Document):
meta = {'queryset_class': AwesomerQuerySet}
# To call:
Page.objects.get_awesome()
.. versionadded:: 0.4
Aggregation
@@ -340,19 +239,12 @@ Javascript code that is executed on the database server.
Counting results
----------------
Just as with limiting and skipping results, there is a method on a
:class:`~mongoengine.queryset.QuerySet` object --
:meth:`~mongoengine.queryset.QuerySet.count`::
Just as with limiting and skipping results, there is a method on
:class:`~mongoengine.queryset.QuerySet` objects --
:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic
way of achieving this::
num_users = User.objects.count()
You could technically use ``len(User.objects)`` to get the same result, but it
would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`.
When you execute a server-side count query, you let MongoDB do the heavy
lifting and you receive a single integer over the wire. Meanwhile, len()
retrieves all the results, places them in a local cache, and finally counts
them. If we compare the performance of the two operations, len() is much slower
than :meth:`~mongoengine.queryset.QuerySet.count`.
num_users = len(User.objects)
Further aggregation
-------------------
@@ -362,7 +254,6 @@ You may sum over the values of a specific field on documents using
yearly_expense = Employee.objects.sum('salary')
.. note::
If the field isn't present on a document, that document will be ignored from
the sum.
@@ -385,20 +276,12 @@ would be generating "tag-clouds"::
from operator import itemgetter
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
Query efficiency and performance
================================
There are a couple of methods to improve efficiency when querying, reducing the
information returned by the query or efficient dereferencing .
Retrieving a subset of fields
-----------------------------
=============================
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
and for efficiency only these should be retrieved from the database. This issue
is especially important for MongoDB, as fields may often be extremely large
(e.g. a :class:`~mongoengine.fields.ListField` of
(e.g. a :class:`~mongoengine.ListField` of
:class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a
blog post. To select only a subset of fields, use
:meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to
@@ -419,165 +302,34 @@ will be given::
>>> f.rating # default value
3
.. note::
The :meth:`~mongoengine.queryset.QuerySet.exclude` is the opposite of
:meth:`~mongoengine.queryset.QuerySet.only` if you want to exclude a field.
If you later need the missing fields, just call
:meth:`~mongoengine.Document.reload` on your document.
Getting related data
--------------------
When iterating the results of :class:`~mongoengine.fields.ListField` or
:class:`~mongoengine.fields.DictField` we automatically dereference any
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
number the queries to mongo.
There are times when that efficiency is not enough, documents that have
:class:`~mongoengine.fields.ReferenceField` objects or
:class:`~mongoengine.fields.GenericReferenceField` objects at the top level are
expensive as the number of queries to MongoDB can quickly rise.
To limit the number of queries use
:func:`~mongoengine.queryset.QuerySet.select_related` which converts the
QuerySet to a list and dereferences as efficiently as possible. By default
:func:`~mongoengine.queryset.QuerySet.select_related` only dereferences any
references to the depth of 1 level. If you have more complicated documents and
want to dereference more of the object at once then increasing the :attr:`max_depth`
will dereference more levels of the document.
Turning off dereferencing
-------------------------
Sometimes for performance reasons you don't want to automatically dereference
data. To turn off dereferencing of the results of a query use
:func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so::
post = Post.objects.no_dereference().first()
assert(isinstance(post.author, ObjectId))
You can also turn off all dereferencing for a fixed period by using the
:class:`~mongoengine.context_managers.no_dereference` context manager::
with no_dereference(Post) as Post:
post = Post.objects.first()
assert(isinstance(post.author, ObjectId))
# Outside the context manager dereferencing occurs.
assert(isinstance(post.author, User))
Advanced queries
================
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
arguments can't fully express the query you want to use -- for example if you
need to combine a number of constraints using *and* and *or*. This is made
need to combine a number of constraints using *and* and *or*. This is made
possible in MongoEngine through the :class:`~mongoengine.queryset.Q` class.
A :class:`~mongoengine.queryset.Q` object represents part of a query, and
can be initialised using the same keyword-argument syntax you use to query
documents. To build a complex query, you may combine
documents. To build a complex query, you may combine
:class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or)
operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the
first positional argument to :attr:`Document.objects` when you filter it by
calling it with keyword arguments::
from mongoengine.queryset.visitor import Q
# Get published posts
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))
# Get top posts
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
.. warning:: You have to use bitwise operators. You cannot use ``or``, ``and``
to combine queries as ``Q(a=a) or Q(b=b)`` is not the same as
``Q(a=a) | Q(b=b)``. As ``Q(a=a)`` equates to true ``Q(a=a) or Q(b=b)`` is
the same as ``Q(a=a)``.
.. _guide-atomic-updates:
Atomic updates
==============
Documents may be updated atomically by using the
:meth:`~mongoengine.queryset.QuerySet.update_one`,
:meth:`~mongoengine.queryset.QuerySet.update` and
:meth:`~mongoengine.queryset.QuerySet.modify` methods on a
:class:`~mongoengine.queryset.QuerySet` or
:meth:`~mongoengine.Document.modify` and
:meth:`~mongoengine.Document.save` (with :attr:`save_condition` argument) on a
:class:`~mongoengine.Document`.
There are several different "modifiers" that you may use with these methods:
* ``set`` -- set a particular value
* ``unset`` -- delete a particular value (since MongoDB v1.3)
* ``inc`` -- increment a value by a given amount
* ``dec`` -- decrement a value by a given amount
* ``push`` -- append a value to a list
* ``push_all`` -- append several values to a list
* ``pop`` -- remove the first or last element of a list `depending on the value`_
* ``pull`` -- remove a value from a list
* ``pull_all`` -- remove several values from a list
* ``add_to_set`` -- add value to a list only if its not in the list already
.. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/
The syntax for atomic updates is similar to the querying syntax, but the
modifier comes before the field, not after it::
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
>>> post.save()
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
>>> post.reload() # the document has been changed, so we need to reload it
>>> post.page_views
1
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
>>> post.reload()
>>> post.title
'Example Post'
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
>>> post.reload()
>>> post.tags
['database', 'nosql']
.. note::
If no modifier operator is specified the default will be ``$set``. So the following sentences are identical::
>>> BlogPost.objects(id=post.id).update(title='Example Post')
>>> BlogPost.objects(id=post.id).update(set__title='Example Post')
.. note::
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
on changed documents by tracking changes to that document.
The positional operator allows you to update list items without knowing the
index position, therefore making the update a single atomic operation. As we
cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
>>> post = BlogPost(title='Test', page_views=0, tags=['database', 'mongo'])
>>> post.save()
>>> BlogPost.objects(id=post.id, tags='mongo').update(set__tags__S='mongodb')
>>> post.reload()
>>> post.tags
['database', 'mongodb']
From MongoDB version 2.6, push operator supports $position value which allows
to push values with index.
>>> post = BlogPost(title="Test", tags=["mongo"])
>>> post.save()
>>> post.update(push__tags__0=["database", "code"])
>>> post.reload()
>>> post.tags
['database', 'code', 'mongo']
.. note::
Currently only top level lists are handled, future versions of mongodb /
pymongo plan to support nested positional operators. See `The $ positional
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
.. warning::
Only use these advanced queries if absolutely necessary as they will execute
significantly slower than regular queries. This is because they are not
natively supported by MongoDB -- they are compiled to Javascript and sent
to the server for execution.
Server-side javascript execution
================================
@@ -616,7 +368,7 @@ Some variables are made available in the scope of the Javascript function:
The following example demonstrates the intended usage of
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
over a field on a document (this functionality is already available through
over a field on a document (this functionality is already available throught
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
example)::
@@ -643,7 +395,7 @@ Javascript code. When accessing a field on a collection object, use
square-bracket notation, and prefix the MongoEngine field name with a tilde.
The field name that follows the tilde will be translated to the name used in
the database. Note that when referring to fields on embedded documents,
the name of the :class:`~mongoengine.fields.EmbeddedDocumentField`, followed by a dot,
the name of the :class:`~mongoengine.EmbeddedDocumentField`, followed by a dot,
should be used before the name of the field on the embedded document. The
following example shows how the substitutions are made::
@@ -681,3 +433,43 @@ following example shows how the substitutions are made::
return comments;
}
""")
.. _guide-atomic-updates:
Atomic updates
==============
Documents may be updated atomically by using the
:meth:`~mongoengine.queryset.QuerySet.update_one` and
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
that you may use with these methods:
* ``set`` -- set a particular value
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
* ``inc`` -- increment a value by a given amount
* ``dec`` -- decrement a value by a given amount
* ``pop`` -- remove the last item from a list
* ``push`` -- append a value to a list
* ``push_all`` -- append several values to a list
* ``pop`` -- remove the first or last element of a list
* ``pull`` -- remove a value from a list
* ``pull_all`` -- remove several values from a list
* ``add_to_set`` -- add value to a list only if its not in the list already
The syntax for atomic updates is similar to the querying syntax, but the
modifier comes before the field, not after it::
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
>>> post.save()
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
>>> post.reload() # the document has been changed, so we need to reload it
>>> post.page_views
1
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
>>> post.reload()
>>> post.title
'Example Post'
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
>>> post.reload()
>>> post.tags
['database', 'nosql']

View File

@@ -1,145 +0,0 @@
.. _signals:
=======
Signals
=======
.. versionadded:: 0.5
.. note::
Signal support is provided by the excellent `blinker`_ library. If you wish
to enable signal support this library must be installed, though it is not
required for MongoEngine to function.
Overview
--------
Signals are found within the `mongoengine.signals` module. Unless
specified signals receive no additional arguments beyond the `sender` class and
`document` instance. Post-signals are only called if there were no exceptions
raised during the processing of their related function.
Available signals include:
`pre_init`
Called during the creation of a new :class:`~mongoengine.Document` or
:class:`~mongoengine.EmbeddedDocument` instance, after the constructor
arguments have been collected but before any additional processing has been
done to them. (I.e. assignment of default values.) Handlers for this signal
are passed the dictionary of arguments using the `values` keyword argument
and may modify this dictionary prior to returning.
`post_init`
Called after all processing of a new :class:`~mongoengine.Document` or
:class:`~mongoengine.EmbeddedDocument` instance has been completed.
`pre_save`
Called within :meth:`~mongoengine.Document.save` prior to performing
any actions.
`pre_save_post_validation`
Called within :meth:`~mongoengine.Document.save` after validation
has taken place but before saving.
`post_save`
Called within :meth:`~mongoengine.Document.save` after all actions
(validation, insert/update, cascades, clearing dirty flags) have completed
successfully. Passed the additional boolean keyword argument `created` to
indicate if the save was an insert or an update.
`pre_delete`
Called within :meth:`~mongoengine.Document.delete` prior to
attempting the delete operation.
`post_delete`
Called within :meth:`~mongoengine.Document.delete` upon successful
deletion of the record.
`pre_bulk_insert`
Called after validation of the documents to insert, but prior to any data
being written. In this case, the `document` argument is replaced by a
`documents` argument representing the list of documents being inserted.
`post_bulk_insert`
Called after a successful bulk insert operation. As per `pre_bulk_insert`,
the `document` argument is omitted and replaced with a `documents` argument.
An additional boolean argument, `loaded`, identifies the contents of
`documents` as either :class:`~mongoengine.Document` instances when `True` or
simply a list of primary key values for the inserted records if `False`.
Attaching Events
----------------
After writing a handler function like the following::
import logging
from datetime import datetime
from mongoengine import *
from mongoengine import signals
def update_modified(sender, document):
document.modified = datetime.utcnow()
You attach the event handler to your :class:`~mongoengine.Document` or
:class:`~mongoengine.EmbeddedDocument` subclass::
class Record(Document):
modified = DateTimeField()
signals.pre_save.connect(update_modified)
While this is not the most elaborate document model, it does demonstrate the
concepts involved. As a more complete demonstration you can also define your
handlers within your subclass::
class Author(Document):
name = StringField()
@classmethod
def pre_save(cls, sender, document, **kwargs):
logging.debug("Pre Save: %s" % document.name)
@classmethod
def post_save(cls, sender, document, **kwargs):
logging.debug("Post Save: %s" % document.name)
if 'created' in kwargs:
if kwargs['created']:
logging.debug("Created")
else:
logging.debug("Updated")
signals.pre_save.connect(Author.pre_save, sender=Author)
signals.post_save.connect(Author.post_save, sender=Author)
Finally, you can also use this small decorator to quickly create a number of
signals and attach them to your :class:`~mongoengine.Document` or
:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators::
def handler(event):
"""Signal decorator to allow use of callback functions as class decorators."""
def decorator(fn):
def apply(cls):
event.connect(fn, sender=cls)
return cls
fn.apply = apply
return fn
return decorator
Using the first example of updating a modification time the code is now much
cleaner looking while still allowing manual execution of the callback::
@handler(signals.pre_save)
def update_modified(sender, document):
document.modified = datetime.utcnow()
@update_modified.apply
class Record(Document):
modified = DateTimeField()
.. _blinker: http://pypi.python.org/pypi/blinker

View File

@@ -1,51 +0,0 @@
===========
Text Search
===========
After MongoDB 2.4 version, supports search documents by text indexes.
Defining a Document with text index
===================================
Use the *$* prefix to set a text index, Look the declaration::
class News(Document):
title = StringField()
content = StringField()
is_active = BooleanField()
meta = {'indexes': [
{'fields': ['$title', "$content"],
'default_language': 'english',
'weights': {'title': 10, 'content': 2}
}
]}
Querying
========
Saving a document::
News(title="Using mongodb text search",
content="Testing text search").save()
News(title="MongoEngine 0.9 released",
content="Various improvements").save()
Next, start a text search using :attr:`QuerySet.search_text` method::
document = News.objects.search_text('testing').first()
document.title # may be: "Using mongodb text search"
document = News.objects.search_text('released').first()
document.title # may be: "MongoEngine 0.9 released"
Ordering by text score
======================
::
objects = News.objects.search('mongo').order_by('$text_score')

View File

@@ -2,83 +2,34 @@
MongoEngine User Documentation
==============================
**MongoEngine** is an Object-Document Mapper, written in Python for working with
MongoEngine is an Object-Document Mapper, written in Python for working with
MongoDB. To install it, simply run
.. code-block:: console
$ pip install -U mongoengine
# pip install -U mongoengine
:doc:`tutorial`
A quick tutorial building a tumblelog to get you up and running with
MongoEngine.
:doc:`guide/index`
The Full guide to MongoEngine --- from modeling documents to storing files,
from querying for data to firing signals and *everything* between.
:doc:`apireference`
The complete API documentation --- the innards of documents, querysets and fields.
:doc:`upgrade`
How to upgrade MongoEngine.
:doc:`django`
Using MongoEngine and Django
Community
---------
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_.
To get help with using MongoEngine, use the `MongoEngine Users mailing list
<http://groups.google.com/group/mongoengine-users>`_ or the ever popular
`stackoverflow <http://www.stackoverflow.com>`_.
Contributing
------------
**Yes please!** We are always looking for contributions, additions and improvements.
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_
and contributions are always encouraged. Contributions can be as simple as
minor tweaks to this documentation, the website or the core.
To contribute, fork the project on
`GitHub <http://github.com/MongoEngine/mongoengine>`_ and send a
pull request.
Changes
-------
See the :doc:`changelog` for a full list of changes to MongoEngine and
:doc:`upgrade` for upgrade information.
.. note:: Always read and test the `upgrade <upgrade>`_ documentation before
putting updates live in production **;)**
Offline Reading
---------------
Download the docs in `pdf <https://media.readthedocs.org/pdf/mongoengine-odm/latest/mongoengine-odm.pdf>`_
or `epub <https://media.readthedocs.org/epub/mongoengine-odm/latest/mongoengine-odm.epub>`_
formats for offline reading.
<http://groups.google.com/group/mongoengine-users>`_ or come chat on the
`#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_.
If you are interested in contributing, join the developers' `mailing list
<http://groups.google.com/group/mongoengine-dev>`_.
.. toctree::
:maxdepth: 1
:numbered:
:hidden:
:maxdepth: 2
tutorial
guide/index
apireference
changelog
upgrade
django
tutorial
guide/index
apireference
django
changelog
Indices and tables
------------------
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

View File

@@ -1,83 +1,73 @@
========
Tutorial
========
This tutorial introduces **MongoEngine** by means of example --- we will walk
through how to create a simple **Tumblelog** application. A tumblelog is a
blog that supports mixed media content, including text, images, links, video,
audio, etc. For simplicity's sake, we'll stick to text, image, and link
entries. As the purpose of this tutorial is to introduce MongoEngine, we'll
through how to create a simple **Tumblelog** application. A Tumblelog is a type
of blog where posts are not constrained to being conventional text-based posts.
As well as text-based entries, users may post images, links, videos, etc. For
simplicity's sake, we'll stick to text, image and link entries in our
application. As the purpose of this tutorial is to introduce MongoEngine, we'll
focus on the data-modelling side of the application, leaving out a user
interface.
Getting started
===============
Before we start, make sure that a copy of MongoDB is running in an accessible
location --- running it locally will be easier, but if that is not an option
then it may be run on a remote server. If you haven't installed MongoEngine,
simply use pip to install it like so::
$ pip install mongoengine
then it may be run on a remote server.
Before we can start using MongoEngine, we need to tell it how to connect to our
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
function. If running locally, the only argument we need to provide is the name
of the MongoDB database to use::
function. The only argument we need to provide is the name of the MongoDB
database to use::
from mongoengine import *
connect('tumblelog')
There are lots of options for connecting to MongoDB, for more information about
them see the :ref:`guide-connecting` guide.
For more information about connecting to MongoDB see :ref:`guide-connecting`.
Defining our documents
======================
MongoDB is *schemaless*, which means that no schema is enforced by the database
--- we may add and remove fields however we want and MongoDB won't complain.
This makes life a lot easier in many regards, especially when there is a change
to the data model. However, defining schemas for our documents can help to iron
out bugs involving incorrect types or missing fields, and also allow us to
to the data model. However, defining schemata for our documents can help to
iron out bugs involving incorrect types or missing fields, and also allow us to
define utility methods on our documents in the same way that traditional
:abbr:`ORMs (Object-Relational Mappers)` do.
In our Tumblelog application we need to store several different types of
information. We will need to have a collection of **users**, so that we may
link posts to an individual. We also need to store our different types of
**posts** (eg: text, image and link) in the database. To aid navigation of our
information. We will need to have a collection of **users**, so that we may
link posts to an individual. We also need to store our different types
**posts** (text, image and link) in the database. To aid navigation of our
Tumblelog, posts may have **tags** associated with them, so that the list of
posts shown to the user may be limited to posts that have been assigned a
specific tag. Finally, it would be nice if **comments** could be added to
posts. We'll start with **users**, as the other document models are slightly
more involved.
specified tag. Finally, it would be nice if **comments** could be added to
posts. We'll start with **users**, as the others are slightly more involved.
Users
-----
Just as if we were using a relational database with an ORM, we need to define
which fields a :class:`User` may have, and what types of data they might store::
which fields a :class:`User` may have, and what their types will be::
class User(Document):
email = StringField(required=True)
first_name = StringField(max_length=50)
last_name = StringField(max_length=50)
This looks similar to how the structure of a table would be defined in a
This looks similar to how a the structure of a table would be defined in a
regular ORM. The key difference is that this schema will never be passed on to
MongoDB --- this will only be enforced at the application level, making future
changes easy to manage. Also, the User documents will be stored in a
MongoDB *collection* rather than a table.
MongoDB --- this will only be enforced at the application level. Also, the User
documents will be stored in a MongoDB *collection* rather than a table.
Posts, Comments and Tags
------------------------
Now we'll think about how to store the rest of the information. If we were
using a relational database, we would most likely have a table of **posts**, a
table of **comments** and a table of **tags**. To associate the comments with
individual posts, we would put a column in the comments table that contained a
foreign key to the posts table. We'd also need a link table to provide the
foreign key to the posts table. We'd also need a link table to provide the
many-to-many relationship between posts and tags. Then we'd need to address the
problem of storing the specialised post-types (text, image and link). There are
several ways we can achieve this, but each of them have their problems --- none
@@ -85,25 +75,21 @@ of them stand out as particularly intuitive solutions.
Posts
^^^^^
Happily mongoDB *isn't* a relational database, so we're not going to do it that
But MongoDB *isn't* a relational database, so we're not going to do it that
way. As it turns out, we can use MongoDB's schemaless nature to provide us with
a much nicer solution. We will store all of the posts in *one collection* and
each post type will only store the fields it needs. If we later want to add
a much nicer solution. We will store all of the posts in *one collection* ---
each post type will just have the fields it needs. If we later want to add
video posts, we don't have to modify the collection at all, we just *start
using* the new fields we need to support video posts. This fits with the
Object-Oriented principle of *inheritance* nicely. We can think of
:class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
:class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
this kind of modeling out of the box --- all you need do is turn on inheritance
by setting :attr:`allow_inheritance` to True in the :attr:`meta`::
this kind of modelling out of the box::
class Post(Document):
title = StringField(max_length=120, required=True)
author = ReferenceField(User)
meta = {'allow_inheritance': True}
class TextPost(Post):
content = StringField()
@@ -114,21 +100,20 @@ by setting :attr:`allow_inheritance` to True in the :attr:`meta`::
link_url = StringField()
We are storing a reference to the author of the posts using a
:class:`~mongoengine.fields.ReferenceField` object. These are similar to foreign key
:class:`~mongoengine.ReferenceField` object. These are similar to foreign key
fields in traditional ORMs, and are automatically translated into references
when they are saved, and dereferenced when they are loaded.
Tags
^^^^
Now that we have our Post models figured out, how will we attach tags to them?
MongoDB allows us to store lists of items natively, so rather than having a
link table, we can just store a list of tags in each post. So, for both
efficiency and simplicity's sake, we'll store the tags as strings directly
within the post, rather than storing references to tags in a separate
collection. Especially as tags are generally very short (often even shorter
than a document's id), this denormalization won't impact the size of the
database very strongly. Let's take a look at the code of our modified
than a document's id), this denormalisation won't impact very strongly on the
size of our database. So let's take a look that the code our modified
:class:`Post` class::
class Post(Document):
@@ -136,19 +121,16 @@ database very strongly. Let's take a look at the code of our modified
author = ReferenceField(User)
tags = ListField(StringField(max_length=30))
The :class:`~mongoengine.fields.ListField` object that is used to define a Post's tags
The :class:`~mongoengine.ListField` object that is used to define a Post's tags
takes a field object as its first argument --- this means that you can have
lists of any type of field (including lists).
.. note:: We don't need to modify the specialized post types as they all
inherit from :class:`Post`.
lists of any type of field (including lists). Note that we don't need to
modify the specialised post types as they all inherit from :class:`Post`.
Comments
^^^^^^^^
A comment is typically associated with *one* post. In a relational database, to
display a post with its comments, we would have to retrieve the post from the
database and then query the database again for the comments associated with the
database, then query the database again for the comments associated with the
post. This works, but there is no real reason to be storing the comments
separately from their associated posts, other than to work around the
relational model. Using MongoDB we can store the comments as a list of
@@ -170,63 +152,39 @@ We can then store a list of comment documents in our post document::
tags = ListField(StringField(max_length=30))
comments = ListField(EmbeddedDocumentField(Comment))
Handling deletions of references
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The :class:`~mongoengine.fields.ReferenceField` object takes a keyword
`reverse_delete_rule` for handling deletion rules if the reference is deleted.
To delete all the posts if a user is deleted set the rule::
class Post(Document):
title = StringField(max_length=120, required=True)
author = ReferenceField(User, reverse_delete_rule=CASCADE)
tags = ListField(StringField(max_length=30))
comments = ListField(EmbeddedDocumentField(Comment))
See :class:`~mongoengine.fields.ReferenceField` for more information.
.. note::
MapFields and DictFields currently don't support automatic handling of
deleted references
Adding data to our Tumblelog
============================
Now that we've defined how our documents will be structured, let's start adding
some documents to the database. Firstly, we'll need to create a :class:`User`
object::
ross = User(email='ross@example.com', first_name='Ross', last_name='Lawley').save()
john = User(email='jdoe@example.com', first_name='John', last_name='Doe')
john.save()
.. note::
We could have also defined our user using attribute syntax::
Note that we could have also defined our user using attribute syntax::
ross = User(email='ross@example.com')
ross.first_name = 'Ross'
ross.last_name = 'Lawley'
ross.save()
john = User(email='jdoe@example.com')
john.first_name = 'John'
john.last_name = 'Doe'
john.save()
Assign another user to a variable called ``john``, just like we did above with
``ross``.
Now that we've got our users in the database, let's add a couple of posts::
Now that we've got our user in the database, let's add a couple of posts::
post1 = TextPost(title='Fun with MongoEngine', author=john)
post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
post1.tags = ['mongodb', 'mongoengine']
post1.save()
post2 = LinkPost(title='MongoEngine Documentation', author=ross)
post2.link_url = 'http://docs.mongoengine.com/'
post2 = LinkPost(title='MongoEngine Documentation', author=john)
post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
post2.tags = ['mongoengine']
post2.save()
.. note:: If you change a field on an object that has already been saved and
then call :meth:`save` again, the document will be updated.
Note that if you change a field on a object that has already been saved, then
call :meth:`save` again, the document will be updated.
Accessing our data
==================
So now we've got a couple of posts in our database, how do we display them?
Each document class (i.e. any class that inherits either directly or indirectly
from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is
@@ -234,17 +192,16 @@ used to access the documents in the database collection associated with that
class. So let's see how we can get our posts' titles::
for post in Post.objects:
print(post.title)
print post.title
Retrieving type-specific information
------------------------------------
This will print the titles of our posts, one on each line. But what if we want
This will print the titles of our posts, one on each line. But What if we want
to access the type-specific data (link_url, content, etc.)? One way is simply
to use the :attr:`objects` attribute of a subclass of :class:`Post`::
for post in TextPost.objects:
print(post.content)
print post.content
Using TextPost's :attr:`objects` attribute only returns documents that were
created using :class:`TextPost`. Actually, there is a more general rule here:
@@ -261,21 +218,22 @@ instances of :class:`Post` --- they were instances of the subclass of
practice::
for post in Post.objects:
print(post.title)
print('=' * len(post.title))
print post.title
print '=' * len(post.title)
if isinstance(post, TextPost):
print(post.content)
print post.content
if isinstance(post, LinkPost):
print('Link: {}'.format(post.link_url))
print 'Link:', post.link_url
print
This would print the title of each post, followed by the content if it was a
text post, and "Link: <url>" if it was a link post.
Searching our posts by tag
--------------------------
The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a
:class:`~mongoengine.queryset.QuerySet` object. This lazily queries the
database only when you need the data. It may also be filtered to narrow down
@@ -283,7 +241,7 @@ your query. Let's adjust our query so that only posts with the tag "mongodb"
are returned::
for post in Post.objects(tags='mongodb'):
print(post.title)
print post.title
There are also methods available on :class:`~mongoengine.queryset.QuerySet`
objects that allow different results to be returned, for example, calling
@@ -292,11 +250,5 @@ the first matched by the query you provide. Aggregation functions may also be
used on :class:`~mongoengine.queryset.QuerySet` objects::
num_posts = Post.objects(tags='mongodb').count()
print('Found {} posts with tag "mongodb"'.format(num_posts))
Learning more about MongoEngine
-------------------------------
If you got this far you've made a great start, so well done! The next step on
your MongoEngine journey is the `full user guide <guide/index.html>`_, where
you can learn in-depth about how to use MongoEngine and MongoDB.
print 'Found % posts with tag "mongodb"' % num_posts

View File

@@ -1,623 +0,0 @@
#########
Upgrading
#########
Development
***********
(Fill this out whenever you introduce breaking changes to MongoEngine)
0.14.0
******
This release includes a few bug fixes and a significant code cleanup. The most
important change is that `QuerySet.as_pymongo` no longer supports a
`coerce_types` mode. If you used it in the past, a) please let us know of your
use case, b) you'll need to override `as_pymongo` to get the desired outcome.
This release also makes the EmbeddedDocument not hashable by default. If you
use embedded documents in sets or dictionaries, you might have to override
`__hash__` and implement a hashing logic specific to your use case. See #1528
for the reason behind this change.
0.13.0
******
This release adds Unicode support to the `EmailField` and changes its
structure significantly. Previously, email addresses containing Unicode
characters didn't work at all. Starting with v0.13.0, domains with Unicode
characters are supported out of the box, meaning some emails that previously
didn't pass validation now do. Make sure the rest of your application can
accept such email addresses. Additionally, if you subclassed the `EmailField`
in your application and overrode `EmailField.EMAIL_REGEX`, you will have to
adjust your code to override `EmailField.USER_REGEX`, `EmailField.DOMAIN_REGEX`,
and potentially `EmailField.UTF8_USER_REGEX`.
0.12.0
******
This release includes various fixes for the `BaseQuerySet` methods and how they
are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size
to an already-existing queryset wouldn't modify the underlying PyMongo cursor.
This has been fixed now, so you'll need to make sure that your code didn't rely
on the broken implementation.
Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private
`_clone_into`. If you directly used that method in your code, you'll need to
rename its occurrences.
0.11.0
******
This release includes a major rehaul of MongoEngine's code quality and
introduces a few breaking changes. It also touches many different parts of
the package and although all the changes have been tested and scrutinized,
you're encouraged to thorougly test the upgrade.
First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`.
If you import or catch this exception, you'll need to rename it in your code.
Second breaking change drops Python v2.6 support. If you run MongoEngine on
that Python version, you'll need to upgrade it first.
Third breaking change drops an old backward compatibility measure where
`from mongoengine.base import ErrorClass` would work on top of
`from mongoengine.errors import ErrorClass` (where `ErrorClass` is e.g.
`ValidationError`). If you import any exceptions from `mongoengine.base`,
change it to `mongoengine.errors`.
0.10.8
******
This version fixed an issue where specifying a MongoDB URI host would override
more information than it should. These changes are minor, but they still
subtly modify the connection logic and thus you're encouraged to test your
MongoDB connection before shipping v0.10.8 in production.
0.10.7
******
`QuerySet.aggregate_sum` and `QuerySet.aggregate_average` are dropped. Use
`QuerySet.sum` and `QuerySet.average` instead which use the aggreation framework
by default from now on.
0.9.0
*****
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
pip uninstall pymongo
pip uninstall mongoengine
pip install pymongo==2.8
pip install mongoengine
0.8.7
*****
Calling reload on deleted / nonexistent documents now raises a DoesNotExist
exception.
0.8.2 to 0.8.3
**************
Minor change that may impact users:
DynamicDocument fields are now stored in creation order after any declared
fields. Previously they were stored alphabetically.
0.7 to 0.8
**********
There have been numerous backwards breaking changes in 0.8. The reasons for
these are to ensure that MongoEngine has sane defaults going forward and that it
performs the best it can out of the box. Where possible there have been
FutureWarnings to help get you ready for the change, but that hasn't been
possible for the whole of the release.
.. warning:: Breaking changes - test upgrading on a test system before putting
live. There maybe multiple manual steps in migrating and these are best honed
on a staging / test system.
Python and PyMongo
==================
MongoEngine requires python 2.6 (or above) and pymongo 2.5 (or above)
Data Model
==========
Inheritance
-----------
The inheritance model has changed, we no longer need to store an array of
:attr:`types` with the model we can just use the classname in :attr:`_cls`.
This means that you will have to update your indexes for each of your
inherited classes like so: ::
# 1. Declaration of the class
class Animal(Document):
name = StringField()
meta = {
'allow_inheritance': True,
'indexes': ['name']
}
# 2. Remove _types
collection = Animal._get_collection()
collection.update({}, {"$unset": {"_types": 1}}, multi=True)
# 3. Confirm extra data is removed
count = collection.find({'_types': {"$exists": True}}).count()
assert count == 0
# 4. Remove indexes
info = collection.index_information()
indexes_to_drop = [key for key, value in info.iteritems()
if '_types' in dict(value['key'])]
for index in indexes_to_drop:
collection.drop_index(index)
# 5. Recreate indexes
Animal.ensure_indexes()
Document Definition
-------------------
The default for inheritance has changed - it is now off by default and
:attr:`_cls` will not be stored automatically with the class. So if you extend
your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments`
you will need to declare :attr:`allow_inheritance` in the meta data like so: ::
class Animal(Document):
name = StringField()
meta = {'allow_inheritance': True}
Previously, if you had data in the database that wasn't defined in the Document
definition, it would set it as an attribute on the document. This is no longer
the case and the data is set only in the ``document._data`` dictionary: ::
>>> from mongoengine import *
>>> class Animal(Document):
... name = StringField()
...
>>> cat = Animal(name="kit", size="small")
# 0.7
>>> cat.size
u'small'
# 0.8
>>> cat.size
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'Animal' object has no attribute 'size'
The Document class has introduced a reserved function `clean()`, which will be
called before saving the document. If your document class happens to have a method
with the same name, please try to rename it.
def clean(self):
pass
ReferenceField
--------------
ReferenceFields now store ObjectIds by default - this is more efficient than
DBRefs as we already know what Document types they reference::
# Old code
class Animal(Document):
name = ReferenceField('self')
# New code to keep dbrefs
class Animal(Document):
name = ReferenceField('self', dbref=True)
To migrate all the references you need to touch each object and mark it as dirty
eg::
# Doc definition
class Person(Document):
name = StringField()
parent = ReferenceField('self')
friends = ListField(ReferenceField('self'))
# Mark all ReferenceFields as dirty and save
for p in Person.objects:
p._mark_as_changed('parent')
p._mark_as_changed('friends')
p.save()
`An example test migration for ReferenceFields is available on github
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/refrencefield_dbref_to_object_id.py>`_.
.. Note:: Internally mongoengine handles ReferenceFields the same, so they are
converted to DBRef on loading and ObjectIds or DBRefs depending on settings
on storage.
UUIDField
---------
UUIDFields now default to storing binary values::
# Old code
class Animal(Document):
uuid = UUIDField()
# New code
class Animal(Document):
uuid = UUIDField(binary=False)
To migrate all the uuids you need to touch each object and mark it as dirty
eg::
# Doc definition
class Animal(Document):
uuid = UUIDField()
# Mark all UUIDFields as dirty and save
for a in Animal.objects:
a._mark_as_changed('uuid')
a.save()
`An example test migration for UUIDFields is available on github
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/uuidfield_to_binary.py>`_.
DecimalField
------------
DecimalFields now store floats - previously it was storing strings and that
made it impossible to do comparisons when querying correctly.::
# Old code
class Person(Document):
balance = DecimalField()
# New code
class Person(Document):
balance = DecimalField(force_string=True)
To migrate all the DecimalFields you need to touch each object and mark it as dirty
eg::
# Doc definition
class Person(Document):
balance = DecimalField()
# Mark all DecimalField's as dirty and save
for p in Person.objects:
p._mark_as_changed('balance')
p.save()
.. note:: DecimalFields have also been improved with the addition of precision
and rounding. See :class:`~mongoengine.fields.DecimalField` for more information.
`An example test migration for DecimalFields is available on github
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/decimalfield_as_float.py>`_.
Cascading Saves
---------------
To improve performance document saves will no longer automatically cascade.
Any changes to a Document's references will either have to be saved manually or
you will have to explicitly tell it to cascade on save::
# At the class level:
class Person(Document):
meta = {'cascade': True}
# Or on save:
my_document.save(cascade=True)
Storage
-------
Document and Embedded Documents are now serialized based on declared field order.
Previously, the data was passed to mongodb as a dictionary and which meant that
order wasn't guaranteed - so things like ``$addToSet`` operations on
:class:`~mongoengine.EmbeddedDocument` could potentially fail in unexpected
ways.
If this impacts you, you may want to rewrite the objects using the
``doc.mark_as_dirty('field')`` pattern described above. If you are using a
compound primary key then you will need to ensure the order is fixed and match
your EmbeddedDocument to that order.
Querysets
=========
Attack of the clones
--------------------
Querysets now return clones and should no longer be considered editable in
place. This brings us in line with how Django's querysets work and removes a
long running gotcha. If you edit your querysets inplace you will have to
update your code like so: ::
# Old code:
mammals = Animal.objects(type="mammal")
mammals.filter(order="Carnivora") # Returns a cloned queryset that isn't assigned to anything - so this will break in 0.8
[m for m in mammals] # This will return all mammals in 0.8 as the 2nd filter returned a new queryset
# Update example a) assign queryset after a change:
mammals = Animal.objects(type="mammal")
carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so filter can be applied
[m for m in carnivores] # This will return all carnivores
# Update example b) chain the queryset:
mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals
[m for m in mammals] # This will return all carnivores
Len iterates the queryset
-------------------------
If you ever did `len(queryset)` it previously did a `count()` under the covers,
this caused some unusual issues. As `len(queryset)` is most often used by
`list(queryset)` we now cache the queryset results and use that for the length.
This isn't as performant as a `count()` and if you aren't iterating the
queryset you should upgrade to use count::
# Old code
len(Animal.objects(type="mammal"))
# New code
Animal.objects(type="mammal").count()
.only() now inline with .exclude()
----------------------------------
The behaviour of `.only()` was highly ambiguous, now it works in mirror fashion
to `.exclude()`. Chaining `.only()` calls will increase the fields required::
# Old code
Animal.objects().only(['type', 'name']).only('name', 'order') # Would have returned just `name`
# New code
Animal.objects().only('name')
# Note:
Animal.objects().only(['name']).only('order') # Now returns `name` *and* `order`
Client
======
PyMongo 2.4 came with a new connection client; MongoClient_ and started the
depreciation of the old :class:`~pymongo.connection.Connection`. MongoEngine
now uses the latest `MongoClient` for connections. By default operations were
`safe` but if you turned them off or used the connection directly this will
impact your queries.
Querysets
---------
Safe
^^^^
`safe` has been depreciated in the new MongoClient connection. Please use
`write_concern` instead. As `safe` always defaulted as `True` normally no code
change is required. To disable confirmation of the write just pass `{"w": 0}`
eg: ::
# Old
Animal(name="Dinasour").save(safe=False)
# new code:
Animal(name="Dinasour").save(write_concern={"w": 0})
Write Concern
^^^^^^^^^^^^^
`write_options` has been replaced with `write_concern` to bring it inline with
pymongo. To upgrade simply rename any instances where you used the `write_option`
keyword to `write_concern` like so::
# Old code:
Animal(name="Dinasour").save(write_options={"w": 2})
# new code:
Animal(name="Dinasour").save(write_concern={"w": 2})
Indexes
=======
Index methods are no longer tied to querysets but rather to the document class.
Although `QuerySet._ensure_indexes` and `QuerySet.ensure_index` still exist.
They should be replaced with :func:`~mongoengine.Document.ensure_indexes` /
:func:`~mongoengine.Document.ensure_index`.
SequenceFields
==============
:class:`~mongoengine.fields.SequenceField` now inherits from `BaseField` to
allow flexible storage of the calculated value. As such MIN and MAX settings
are no longer handled.
.. _MongoClient: http://blog.mongodb.org/post/36666163412/introducing-mongoclient
0.6 to 0.7
**********
Cascade saves
=============
Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set
to True. This is because in 0.8 it will default to False. If you require
cascading saves then either set it in the `meta` or pass
via `save` eg ::
# At the class level:
class Person(Document):
meta = {'cascade': True}
# Or in code:
my_document.save(cascade=True)
.. note::
Remember: cascading saves **do not** cascade through lists.
ReferenceFields
===============
ReferenceFields now can store references as ObjectId strings instead of DBRefs.
This will become the default in 0.8 and if `dbref` is not set a `FutureWarning`
will be raised.
To explicitly continue to use DBRefs change the `dbref` flag
to True ::
class Person(Document):
groups = ListField(ReferenceField(Group, dbref=True))
To migrate to using strings instead of DBRefs you will have to manually
migrate ::
# Step 1 - Migrate the model definition
class Group(Document):
author = ReferenceField(User, dbref=False)
members = ListField(ReferenceField(User, dbref=False))
# Step 2 - Migrate the data
for g in Group.objects():
g.author = g.author
g.members = g.members
g.save()
item_frequencies
================
In the 0.6 series we added support for null / zero / false values in
item_frequencies. A side effect was to return keys in the value they are
stored in rather than as string representations. Your code may need to be
updated to handle native types rather than strings keys for the results of
item frequency queries.
BinaryFields
============
Binary fields have been updated so that they are native binary types. If you
previously were doing `str` comparisons with binary field values you will have
to update and wrap the value in a `str`.
0.5 to 0.6
**********
Embedded Documents - if you had a `pk` field you will have to rename it from
`_id` to `pk` as pk is no longer a property of Embedded Documents.
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
an InvalidDocument error as they aren't currently supported.
Document._get_subclasses - Is no longer used and the class method has been
removed.
Document.objects.with_id - now raises an InvalidQueryError if used with a
filter.
FutureWarning - A future warning has been added to all inherited classes that
don't define :attr:`allow_inheritance` in their meta.
You may need to update pyMongo to 2.0 for use with Sharding.
0.4 to 0.5
**********
There have been the following backwards incompatibilities from 0.4 to 0.5. The
main areas of changed are: choices in fields, map_reduce and collection names.
Choice options:
===============
Are now expected to be an iterable of tuples, with the first element in each
tuple being the actual value to be stored. The second element is the
human-readable name for the option.
PyMongo / MongoDB
=================
map reduce now requires pymongo 1.11+- The pymongo `merge_output` and
`reduce_output` parameters, have been depreciated.
More methods now use map_reduce as db.eval is not supported for sharding as
such the following have been changed:
* :meth:`~mongoengine.queryset.QuerySet.sum`
* :meth:`~mongoengine.queryset.QuerySet.average`
* :meth:`~mongoengine.queryset.QuerySet.item_frequencies`
Default collection naming
=========================
Previously it was just lowercase, it's now much more pythonic and readable as
it's lowercase and underscores, previously ::
class MyAceDocument(Document):
pass
MyAceDocument._meta['collection'] == myacedocument
In 0.5 this will change to ::
class MyAceDocument(Document):
pass
MyAceDocument._get_collection_name() == my_ace_document
To upgrade use a Mixin class to set meta like so ::
class BaseMixin(object):
meta = {
'collection': lambda c: c.__name__.lower()
}
class MyAceDocument(Document, BaseMixin):
pass
MyAceDocument._get_collection_name() == "myacedocument"
Alternatively, you can rename your collections eg ::
from mongoengine.connection import _get_db
from mongoengine.base import _document_registry
def rename_collections():
db = _get_db()
failure = False
collection_names = [d._get_collection_name()
for d in _document_registry.values()]
for new_style_name in collection_names:
if not new_style_name: # embedded documents don't have collections
continue
old_style_name = new_style_name.replace('_', '')
if old_style_name == new_style_name:
continue # Nothing to do
existing = db.collection_names()
if old_style_name in existing:
if new_style_name in existing:
failure = True
print "FAILED to rename: %s to %s (already exists)" % (
old_style_name, new_style_name)
else:
db[old_style_name].rename(new_style_name)
print "Renamed: %s to %s" % (old_style_name,
new_style_name)
if failure:
print "Upgrading collection names failed"
else:
print "Upgraded collection names"
mongodb 1.8 > 2.0 +
===================
It's been reported that indexes may need to be recreated to the newer version of indexes.
To do this drop indexes and call ``ensure_indexes`` on each model.

View File

@@ -1,36 +1,24 @@
# Import submodules so that we can expose their __all__
from mongoengine import connection
from mongoengine import document
from mongoengine import errors
from mongoengine import fields
from mongoengine import queryset
from mongoengine import signals
import document
from document import *
import fields
from fields import *
import connection
from connection import *
import queryset
from queryset import *
# Import everything from each submodule so that it can be accessed via
# mongoengine, e.g. instead of `from mongoengine.connection import connect`,
# users can simply use `from mongoengine import connect`, or even
# `from mongoengine import *` and then `connect('testdb')`.
from mongoengine.connection import *
from mongoengine.document import *
from mongoengine.errors import *
from mongoengine.fields import *
from mongoengine.queryset import *
from mongoengine.signals import *
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
queryset.__all__)
__author__ = 'Harry Marr'
__all__ = (list(document.__all__) + list(fields.__all__) +
list(connection.__all__) + list(queryset.__all__) +
list(signals.__all__) + list(errors.__all__))
VERSION = (0, 15, 0)
VERSION = (0, 4, 1)
def get_version():
"""Return the VERSION as a string, e.g. for VERSION == (0, 10, 7),
return '0.10.7'.
"""
return '.'.join(map(str, VERSION))
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()

498
mongoengine/base.py Normal file
View File

@@ -0,0 +1,498 @@
from queryset import QuerySet, QuerySetManager
from queryset import DoesNotExist, MultipleObjectsReturned
import sys
import bson
import pymongo
_document_registry = {}
def get_document(name):
return _document_registry[name]
class ValidationError(Exception):
pass
class BaseField(object):
"""A base class for fields in a MongoDB document. Instances of this class
may be added to subclasses of `Document` to define a document's schema.
"""
# Fields may have _types inserted into indexes by default
_index_with_types = True
_geo_index = False
def __init__(self, db_field=None, name=None, required=False, default=None,
unique=False, unique_with=None, primary_key=False,
validation=None, choices=None):
self.db_field = (db_field or name) if not primary_key else '_id'
if name:
import warnings
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
warnings.warn(msg, DeprecationWarning)
self.name = None
self.required = required or primary_key
self.default = default
self.unique = bool(unique or unique_with)
self.unique_with = unique_with
self.primary_key = primary_key
self.validation = validation
self.choices = choices
def __get__(self, instance, owner):
"""Descriptor for retrieving a value from a field in a document. Do
any necessary conversion between Python and MongoDB types.
"""
if instance is None:
# Document class being used rather than a document object
return self
# Get value from document instance if available, if not use default
value = instance._data.get(self.name)
if value is None:
value = self.default
# Allow callable default values
if callable(value):
value = value()
return value
def __set__(self, instance, value):
"""Descriptor for assigning a value to a field in a document.
"""
instance._data[self.name] = value
def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type.
"""
return value
def to_mongo(self, value):
"""Convert a Python type to a MongoDB-compatible type.
"""
return self.to_python(value)
def prepare_query_value(self, op, value):
"""Prepare a value that is being used in a query for PyMongo.
"""
return value
def validate(self, value):
"""Perform validation on a value.
"""
pass
def _validate(self, value):
# check choices
if self.choices is not None:
if value not in self.choices:
raise ValidationError("Value must be one of %s."
% unicode(self.choices))
# check validation argument
if self.validation is not None:
if callable(self.validation):
if not self.validation(value):
raise ValidationError('Value does not match custom' \
'validation method.')
else:
raise ValueError('validation argument must be a callable.')
self.validate(value)
class ObjectIdField(BaseField):
"""An field wrapper around MongoDB's ObjectIds.
"""
def to_python(self, value):
return value
# return unicode(value)
def to_mongo(self, value):
if not isinstance(value, bson.objectid.ObjectId):
try:
return bson.objectid.ObjectId(unicode(value))
except Exception, e:
#e.message attribute has been deprecated since Python 2.6
raise ValidationError(unicode(e))
return value
def prepare_query_value(self, op, value):
return self.to_mongo(value)
def validate(self, value):
try:
bson.objectid.ObjectId(unicode(value))
except:
raise ValidationError('Invalid Object ID')
class DocumentMetaclass(type):
"""Metaclass for all documents.
"""
def __new__(cls, name, bases, attrs):
metaclass = attrs.get('__metaclass__')
super_new = super(DocumentMetaclass, cls).__new__
if metaclass and issubclass(metaclass, DocumentMetaclass):
return super_new(cls, name, bases, attrs)
doc_fields = {}
class_name = [name]
superclasses = {}
simple_class = True
for base in bases:
# Include all fields present in superclasses
if hasattr(base, '_fields'):
doc_fields.update(base._fields)
class_name.append(base._class_name)
# Get superclasses from superclass
superclasses[base._class_name] = base
superclasses.update(base._superclasses)
if hasattr(base, '_meta'):
# Ensure that the Document class may be subclassed -
# inheritance may be disabled to remove dependency on
# additional fields _cls and _types
if base._meta.get('allow_inheritance', True) == False:
raise ValueError('Document %s may not be subclassed' %
base.__name__)
else:
simple_class = False
meta = attrs.get('_meta', attrs.get('meta', {}))
if 'allow_inheritance' not in meta:
meta['allow_inheritance'] = True
# Only simple classes - direct subclasses of Document - may set
# allow_inheritance to False
if not simple_class and not meta['allow_inheritance']:
raise ValueError('Only direct subclasses of Document may set '
'"allow_inheritance" to False')
attrs['_meta'] = meta
attrs['_class_name'] = '.'.join(reversed(class_name))
attrs['_superclasses'] = superclasses
# Add the document's fields to the _fields attribute
for attr_name, attr_value in attrs.items():
if hasattr(attr_value, "__class__") and \
issubclass(attr_value.__class__, BaseField):
attr_value.name = attr_name
if not attr_value.db_field:
attr_value.db_field = attr_name
doc_fields[attr_name] = attr_value
attrs['_fields'] = doc_fields
new_class = super_new(cls, name, bases, attrs)
for field in new_class._fields.values():
field.owner_document = new_class
module = attrs.get('__module__')
base_excs = tuple(base.DoesNotExist for base in bases
if hasattr(base, 'DoesNotExist')) or (DoesNotExist,)
exc = subclass_exception('DoesNotExist', base_excs, module)
new_class.add_to_class('DoesNotExist', exc)
base_excs = tuple(base.MultipleObjectsReturned for base in bases
if hasattr(base, 'MultipleObjectsReturned'))
base_excs = base_excs or (MultipleObjectsReturned,)
exc = subclass_exception('MultipleObjectsReturned', base_excs, module)
new_class.add_to_class('MultipleObjectsReturned', exc)
global _document_registry
_document_registry[name] = new_class
return new_class
def add_to_class(self, name, value):
setattr(self, name, value)
class TopLevelDocumentMetaclass(DocumentMetaclass):
"""Metaclass for top-level documents (i.e. documents that have their own
collection in the database.
"""
def __new__(cls, name, bases, attrs):
super_new = super(TopLevelDocumentMetaclass, cls).__new__
# Classes defined in this package are abstract and should not have
# their own metadata with DB collection, etc.
# __metaclass__ is only set on the class with the __metaclass__
# attribute (i.e. it is not set on subclasses). This differentiates
# 'real' documents from the 'Document' class
if attrs.get('__metaclass__') == TopLevelDocumentMetaclass:
return super_new(cls, name, bases, attrs)
collection = name.lower()
id_field = None
base_indexes = []
base_meta = {}
# Subclassed documents inherit collection from superclass
for base in bases:
if hasattr(base, '_meta') and 'collection' in base._meta:
collection = base._meta['collection']
# Propagate index options.
for key in ('index_background', 'index_drop_dups', 'index_opts'):
if key in base._meta:
base_meta[key] = base._meta[key]
id_field = id_field or base._meta.get('id_field')
base_indexes += base._meta.get('indexes', [])
meta = {
'collection': collection,
'max_documents': None,
'max_size': None,
'ordering': [], # default ordering applied at runtime
'indexes': [], # indexes to be ensured at runtime
'id_field': id_field,
'index_background': False,
'index_drop_dups': False,
'index_opts': {},
'queryset_class': QuerySet,
}
meta.update(base_meta)
# Apply document-defined meta options
meta.update(attrs.get('meta', {}))
attrs['_meta'] = meta
# Set up collection manager, needs the class to have fields so use
# DocumentMetaclass before instantiating CollectionManager object
new_class = super_new(cls, name, bases, attrs)
# Provide a default queryset unless one has been manually provided
if not hasattr(new_class, 'objects'):
new_class.objects = QuerySetManager()
user_indexes = [QuerySet._build_index_spec(new_class, spec)
for spec in meta['indexes']] + base_indexes
new_class._meta['indexes'] = user_indexes
unique_indexes = []
for field_name, field in new_class._fields.items():
# Generate a list of indexes needed by uniqueness constraints
if field.unique:
field.required = True
unique_fields = [field.db_field]
# Add any unique_with fields to the back of the index spec
if field.unique_with:
if isinstance(field.unique_with, basestring):
field.unique_with = [field.unique_with]
# Convert unique_with field names to real field names
unique_with = []
for other_name in field.unique_with:
parts = other_name.split('.')
# Lookup real name
parts = QuerySet._lookup_field(new_class, parts)
name_parts = [part.db_field for part in parts]
unique_with.append('.'.join(name_parts))
# Unique field should be required
parts[-1].required = True
unique_fields += unique_with
# Add the new index to the list
index = [(f, pymongo.ASCENDING) for f in unique_fields]
unique_indexes.append(index)
# Check for custom primary key
if field.primary_key:
current_pk = new_class._meta['id_field']
if current_pk and current_pk != field_name:
raise ValueError('Cannot override primary key field')
if not current_pk:
new_class._meta['id_field'] = field_name
# Make 'Document.id' an alias to the real primary key field
new_class.id = field
new_class._meta['unique_indexes'] = unique_indexes
if not new_class._meta['id_field']:
new_class._meta['id_field'] = 'id'
new_class._fields['id'] = ObjectIdField(db_field='_id')
new_class.id = new_class._fields['id']
return new_class
class BaseDocument(object):
def __init__(self, **values):
self._data = {}
# Assign default values to instance
for attr_name in self._fields.keys():
# Use default value if present
value = getattr(self, attr_name, None)
setattr(self, attr_name, value)
# Assign initial values to instance
for attr_name in values.keys():
try:
setattr(self, attr_name, values.pop(attr_name))
except AttributeError:
pass
def validate(self):
"""Ensure that all fields' values are valid and that required fields
are present.
"""
# Get a list of tuples of field names and their current values
fields = [(field, getattr(self, name))
for name, field in self._fields.items()]
# Ensure that each field is matched to a valid value
for field, value in fields:
if value is not None:
try:
field._validate(value)
except (ValueError, AttributeError, AssertionError), e:
raise ValidationError('Invalid value for field of type "%s": %s'
% (field.__class__.__name__, value))
elif field.required:
raise ValidationError('Field "%s" is required' % field.name)
@classmethod
def _get_subclasses(cls):
"""Return a dictionary of all subclasses (found recursively).
"""
try:
subclasses = cls.__subclasses__()
except:
subclasses = cls.__subclasses__(cls)
all_subclasses = {}
for subclass in subclasses:
all_subclasses[subclass._class_name] = subclass
all_subclasses.update(subclass._get_subclasses())
return all_subclasses
@apply
def pk():
"""Primary key alias
"""
def fget(self):
return getattr(self, self._meta['id_field'])
def fset(self, value):
return setattr(self, self._meta['id_field'], value)
return property(fget, fset)
def __iter__(self):
return iter(self._fields)
def __getitem__(self, name):
"""Dictionary-style field access, return a field's value if present.
"""
try:
if name in self._fields:
return getattr(self, name)
except AttributeError:
pass
raise KeyError(name)
def __setitem__(self, name, value):
"""Dictionary-style field access, set a field's value.
"""
# Ensure that the field exists before settings its value
if name not in self._fields:
raise KeyError(name)
return setattr(self, name, value)
def __contains__(self, name):
try:
val = getattr(self, name)
return val is not None
except AttributeError:
return False
def __len__(self):
return len(self._data)
def __repr__(self):
try:
u = unicode(self)
except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]'
return u'<%s: %s>' % (self.__class__.__name__, u)
def __str__(self):
if hasattr(self, '__unicode__'):
return unicode(self).encode('utf-8')
return '%s object' % self.__class__.__name__
def to_mongo(self):
"""Return data dictionary ready for use with MongoDB.
"""
data = {}
for field_name, field in self._fields.items():
value = getattr(self, field_name, None)
if value is not None:
data[field.db_field] = field.to_mongo(value)
# Only add _cls and _types if allow_inheritance is not False
if not (hasattr(self, '_meta') and
self._meta.get('allow_inheritance', True) == False):
data['_cls'] = self._class_name
data['_types'] = self._superclasses.keys() + [self._class_name]
if data.has_key('_id') and not data['_id']:
del data['_id']
return data
@classmethod
def _from_son(cls, son):
"""Create an instance of a Document (subclass) from a PyMongo SON.
"""
# get the class name from the document, falling back to the given
# class if unavailable
class_name = son.get(u'_cls', cls._class_name)
data = dict((str(key), value) for key, value in son.items())
if '_types' in data:
del data['_types']
if '_cls' in data:
del data['_cls']
# Return correct subclass for document type
if class_name != cls._class_name:
subclasses = cls._get_subclasses()
if class_name not in subclasses:
# Type of document is probably more generic than the class
# that has been queried to return this SON
return None
cls = subclasses[class_name]
present_fields = data.keys()
for field_name, field in cls._fields.items():
if field.db_field in data:
value = data[field.db_field]
data[field_name] = (value if value is None
else field.to_python(value))
obj = cls(**data)
obj._present_fields = present_fields
return obj
def __eq__(self, other):
if isinstance(other, self.__class__) and hasattr(other, 'id'):
if self.id == other.id:
return True
return False
if sys.version_info < (2, 5):
# Prior to Python 2.5, Exception was an old-style class
def subclass_exception(name, parents, unused):
return types.ClassType(name, parents, {})
else:
def subclass_exception(name, parents, module):
return type(name, parents, {'__module__': module})

View File

@@ -1,28 +0,0 @@
# Base module is split into several files for convenience. Files inside of
# this module should import from a specific submodule (e.g.
# `from mongoengine.base.document import BaseDocument`), but all of the
# other modules should import directly from the top-level module (e.g.
# `from mongoengine.base import BaseDocument`). This approach is cleaner and
# also helps with cyclical import errors.
from mongoengine.base.common import *
from mongoengine.base.datastructures import *
from mongoengine.base.document import *
from mongoengine.base.fields import *
from mongoengine.base.metaclasses import *
__all__ = (
# common
'UPDATE_OPERATORS', '_document_registry', 'get_document',
# datastructures
'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference',
# document
'BaseDocument',
# fields
'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField',
# metaclasses
'DocumentMetaclass', 'TopLevelDocumentMetaclass'
)

View File

@@ -1,31 +0,0 @@
from mongoengine.errors import NotRegistered
__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert', 'min', 'max', 'rename'])
_document_registry = {}
def get_document(name):
"""Get a document class by name."""
doc = _document_registry.get(name, None)
if not doc:
# Possible old style name
single_end = name.split('.')[-1]
compound_end = '.%s' % single_end
possible_match = [k for k in _document_registry.keys()
if k.endswith(compound_end) or k == single_end]
if len(possible_match) == 1:
doc = _document_registry.get(possible_match.pop(), None)
if not doc:
raise NotRegistered("""
`%s` has not been registered in the document registry.
Importing the document class automatically registers it, has it
been imported?
""".strip() % name)
return doc

View File

@@ -1,487 +0,0 @@
import itertools
import weakref
from bson import DBRef
import six
from mongoengine.common import _import_class
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference')
class BaseDict(dict):
"""A special dict so we can watch any changes."""
_dereferenced = False
_instance = None
_name = None
def __init__(self, dict_items, instance, name):
Document = _import_class('Document')
EmbeddedDocument = _import_class('EmbeddedDocument')
if isinstance(instance, (Document, EmbeddedDocument)):
self._instance = weakref.proxy(instance)
self._name = name
super(BaseDict, self).__init__(dict_items)
def __getitem__(self, key, *args, **kwargs):
value = super(BaseDict, self).__getitem__(key)
EmbeddedDocument = _import_class('EmbeddedDocument')
if isinstance(value, EmbeddedDocument) and value._instance is None:
value._instance = self._instance
elif not isinstance(value, BaseDict) and isinstance(value, dict):
value = BaseDict(value, None, '%s.%s' % (self._name, key))
super(BaseDict, self).__setitem__(key, value)
value._instance = self._instance
elif not isinstance(value, BaseList) and isinstance(value, list):
value = BaseList(value, None, '%s.%s' % (self._name, key))
super(BaseDict, self).__setitem__(key, value)
value._instance = self._instance
return value
def __setitem__(self, key, value, *args, **kwargs):
self._mark_as_changed(key)
return super(BaseDict, self).__setitem__(key, value)
def __delete__(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).__delete__(*args, **kwargs)
def __delitem__(self, key, *args, **kwargs):
self._mark_as_changed(key)
return super(BaseDict, self).__delitem__(key)
def __delattr__(self, key, *args, **kwargs):
self._mark_as_changed(key)
return super(BaseDict, self).__delattr__(key)
def __getstate__(self):
self.instance = None
self._dereferenced = False
return self
def __setstate__(self, state):
self = state
return self
def clear(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).clear()
def pop(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).pop(*args, **kwargs)
def popitem(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).popitem()
def setdefault(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).setdefault(*args, **kwargs)
def update(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).update(*args, **kwargs)
def _mark_as_changed(self, key=None):
if hasattr(self._instance, '_mark_as_changed'):
if key:
self._instance._mark_as_changed('%s.%s' % (self._name, key))
else:
self._instance._mark_as_changed(self._name)
class BaseList(list):
"""A special list so we can watch any changes."""
_dereferenced = False
_instance = None
_name = None
def __init__(self, list_items, instance, name):
Document = _import_class('Document')
EmbeddedDocument = _import_class('EmbeddedDocument')
if isinstance(instance, (Document, EmbeddedDocument)):
self._instance = weakref.proxy(instance)
self._name = name
super(BaseList, self).__init__(list_items)
def __getitem__(self, key, *args, **kwargs):
value = super(BaseList, self).__getitem__(key)
EmbeddedDocument = _import_class('EmbeddedDocument')
if isinstance(value, EmbeddedDocument) and value._instance is None:
value._instance = self._instance
elif not isinstance(value, BaseDict) and isinstance(value, dict):
value = BaseDict(value, None, '%s.%s' % (self._name, key))
super(BaseList, self).__setitem__(key, value)
value._instance = self._instance
elif not isinstance(value, BaseList) and isinstance(value, list):
value = BaseList(value, None, '%s.%s' % (self._name, key))
super(BaseList, self).__setitem__(key, value)
value._instance = self._instance
return value
def __iter__(self):
for i in six.moves.range(self.__len__()):
yield self[i]
def __setitem__(self, key, value, *args, **kwargs):
if isinstance(key, slice):
self._mark_as_changed()
else:
self._mark_as_changed(key)
return super(BaseList, self).__setitem__(key, value)
def __delitem__(self, key, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).__delitem__(key)
def __setslice__(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).__setslice__(*args, **kwargs)
def __delslice__(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).__delslice__(*args, **kwargs)
def __getstate__(self):
self.instance = None
self._dereferenced = False
return self
def __setstate__(self, state):
self = state
return self
def __iadd__(self, other):
self._mark_as_changed()
return super(BaseList, self).__iadd__(other)
def __imul__(self, other):
self._mark_as_changed()
return super(BaseList, self).__imul__(other)
def append(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).append(*args, **kwargs)
def extend(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).extend(*args, **kwargs)
def insert(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).insert(*args, **kwargs)
def pop(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).pop(*args, **kwargs)
def remove(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).remove(*args, **kwargs)
def reverse(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).reverse()
def sort(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).sort(*args, **kwargs)
def _mark_as_changed(self, key=None):
if hasattr(self._instance, '_mark_as_changed'):
if key:
self._instance._mark_as_changed(
'%s.%s' % (self._name, key % len(self))
)
else:
self._instance._mark_as_changed(self._name)
class EmbeddedDocumentList(BaseList):
@classmethod
def __match_all(cls, embedded_doc, kwargs):
"""Return True if a given embedded doc matches all the filter
kwargs. If it doesn't return False.
"""
for key, expected_value in kwargs.items():
doc_val = getattr(embedded_doc, key)
if doc_val != expected_value and six.text_type(doc_val) != expected_value:
return False
return True
@classmethod
def __only_matches(cls, embedded_docs, kwargs):
"""Return embedded docs that match the filter kwargs."""
if not kwargs:
return embedded_docs
return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)]
def __init__(self, list_items, instance, name):
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
self._instance = instance
def filter(self, **kwargs):
"""
Filters the list by only including embedded documents with the
given keyword arguments.
:param kwargs: The keyword arguments corresponding to the fields to
filter on. *Multiple arguments are treated as if they are ANDed
together.*
:return: A new ``EmbeddedDocumentList`` containing the matching
embedded documents.
Raises ``AttributeError`` if a given keyword is not a valid field for
the embedded document class.
"""
values = self.__only_matches(self, kwargs)
return EmbeddedDocumentList(values, self._instance, self._name)
def exclude(self, **kwargs):
"""
Filters the list by excluding embedded documents with the given
keyword arguments.
:param kwargs: The keyword arguments corresponding to the fields to
exclude on. *Multiple arguments are treated as if they are ANDed
together.*
:return: A new ``EmbeddedDocumentList`` containing the non-matching
embedded documents.
Raises ``AttributeError`` if a given keyword is not a valid field for
the embedded document class.
"""
exclude = self.__only_matches(self, kwargs)
values = [item for item in self if item not in exclude]
return EmbeddedDocumentList(values, self._instance, self._name)
def count(self):
"""
The number of embedded documents in the list.
:return: The length of the list, equivalent to the result of ``len()``.
"""
return len(self)
def get(self, **kwargs):
"""
Retrieves an embedded document determined by the given keyword
arguments.
:param kwargs: The keyword arguments corresponding to the fields to
search on. *Multiple arguments are treated as if they are ANDed
together.*
:return: The embedded document matched by the given keyword arguments.
Raises ``DoesNotExist`` if the arguments used to query an embedded
document returns no results. ``MultipleObjectsReturned`` if more
than one result is returned.
"""
values = self.__only_matches(self, kwargs)
if len(values) == 0:
raise DoesNotExist(
'%s matching query does not exist.' % self._name
)
elif len(values) > 1:
raise MultipleObjectsReturned(
'%d items returned, instead of 1' % len(values)
)
return values[0]
def first(self):
"""Return the first embedded document in the list, or ``None``
if empty.
"""
if len(self) > 0:
return self[0]
def create(self, **values):
"""
Creates a new embedded document and saves it to the database.
.. note::
The embedded document changes are not automatically saved
to the database after calling this method.
:param values: A dictionary of values for the embedded document.
:return: The new embedded document instance.
"""
name = self._name
EmbeddedClass = self._instance._fields[name].field.document_type_obj
self._instance[self._name].append(EmbeddedClass(**values))
return self._instance[self._name][-1]
def save(self, *args, **kwargs):
"""
Saves the ancestor document.
:param args: Arguments passed up to the ancestor Document's save
method.
:param kwargs: Keyword arguments passed up to the ancestor Document's
save method.
"""
self._instance.save(*args, **kwargs)
def delete(self):
"""
Deletes the embedded documents from the database.
.. note::
The embedded document changes are not automatically saved
to the database after calling this method.
:return: The number of entries deleted.
"""
values = list(self)
for item in values:
self._instance[self._name].remove(item)
return len(values)
def update(self, **update):
"""
Updates the embedded documents with the given update values.
.. note::
The embedded document changes are not automatically saved
to the database after calling this method.
:param update: A dictionary of update values to apply to each
embedded document.
:return: The number of entries updated.
"""
if len(update) == 0:
return 0
values = list(self)
for item in values:
for k, v in update.items():
setattr(item, k, v)
return len(values)
class StrictDict(object):
__slots__ = ()
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
_classes = {}
def __init__(self, **kwargs):
for k, v in kwargs.iteritems():
setattr(self, k, v)
def __getitem__(self, key):
key = '_reserved_' + key if key in self._special_fields else key
try:
return getattr(self, key)
except AttributeError:
raise KeyError(key)
def __setitem__(self, key, value):
key = '_reserved_' + key if key in self._special_fields else key
return setattr(self, key, value)
def __contains__(self, key):
return hasattr(self, key)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def pop(self, key, default=None):
v = self.get(key, default)
try:
delattr(self, key)
except AttributeError:
pass
return v
def iteritems(self):
for key in self:
yield key, self[key]
def items(self):
return [(k, self[k]) for k in iter(self)]
def iterkeys(self):
return iter(self)
def keys(self):
return list(iter(self))
def __iter__(self):
return (key for key in self.__slots__ if hasattr(self, key))
def __len__(self):
return len(list(self.iteritems()))
def __eq__(self, other):
return self.items() == other.items()
def __ne__(self, other):
return self.items() != other.items()
@classmethod
def create(cls, allowed_keys):
allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys)
allowed_keys = frozenset(allowed_keys_tuple)
if allowed_keys not in cls._classes:
class SpecificStrictDict(cls):
__slots__ = allowed_keys_tuple
def __repr__(self):
return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items())
cls._classes[allowed_keys] = SpecificStrictDict
return cls._classes[allowed_keys]
class LazyReference(DBRef):
__slots__ = ('_cached_doc', 'passthrough', 'document_type')
def fetch(self, force=False):
if not self._cached_doc or force:
self._cached_doc = self.document_type.objects.get(pk=self.pk)
if not self._cached_doc:
raise DoesNotExist('Trying to dereference unknown document %s' % (self))
return self._cached_doc
@property
def pk(self):
return self.id
def __init__(self, document_type, pk, cached_doc=None, passthrough=False):
self.document_type = document_type
self._cached_doc = cached_doc
self.passthrough = passthrough
super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk)
def __getitem__(self, name):
if not self.passthrough:
raise KeyError()
document = self.fetch()
return document[name]
def __getattr__(self, name):
if not object.__getattribute__(self, 'passthrough'):
raise AttributeError()
document = self.fetch()
try:
return document[name]
except KeyError:
raise AttributeError()
def __repr__(self):
return "<LazyReference(%s, %r)>" % (self.document_type, self.pk)

File diff suppressed because it is too large Load Diff

View File

@@ -1,639 +0,0 @@
import operator
import warnings
import weakref
from bson import DBRef, ObjectId, SON
import pymongo
import six
from mongoengine.base.common import UPDATE_OPERATORS
from mongoengine.base.datastructures import (BaseDict, BaseList,
EmbeddedDocumentList)
from mongoengine.common import _import_class
from mongoengine.errors import ValidationError
__all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField',
'GeoJsonBaseField')
class BaseField(object):
"""A base class for fields in a MongoDB document. Instances of this class
may be added to subclasses of `Document` to define a document's schema.
.. versionchanged:: 0.5 - added verbose and help text
"""
name = None
_geo_index = False
_auto_gen = False # Call `generate` to generate a value
_auto_dereference = True
# These track each time a Field instance is created. Used to retain order.
# The auto_creation_counter is used for fields that MongoEngine implicitly
# creates, creation_counter is used for all user-specified fields.
creation_counter = 0
auto_creation_counter = -1
def __init__(self, db_field=None, name=None, required=False, default=None,
unique=False, unique_with=None, primary_key=False,
validation=None, choices=None, null=False, sparse=False,
**kwargs):
"""
:param db_field: The database field to store this field in
(defaults to the name of the field)
:param name: Deprecated - use db_field
:param required: If the field is required. Whether it has to have a
value or not. Defaults to False.
:param default: (optional) The default value for this field if no value
has been set (or if the value has been unset). It can be a
callable.
:param unique: Is the field value unique or not. Defaults to False.
:param unique_with: (optional) The other field this field should be
unique with.
:param primary_key: Mark this field as the primary key. Defaults to False.
:param validation: (optional) A callable to validate the value of the
field. Generally this is deprecated in favour of the
`FIELD.validate` method
:param choices: (optional) The valid choices
:param null: (optional) Is the field value can be null. If no and there is a default value
then the default value is set
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
means that uniqueness won't be enforced for `None` values
:param **kwargs: (optional) Arbitrary indirection-free metadata for
this field can be supplied as additional keyword arguments and
accessed as attributes of the field. Must not conflict with any
existing attributes. Common metadata includes `verbose_name` and
`help_text`.
"""
self.db_field = (db_field or name) if not primary_key else '_id'
if name:
msg = 'Field\'s "name" attribute deprecated in favour of "db_field"'
warnings.warn(msg, DeprecationWarning)
self.required = required or primary_key
self.default = default
self.unique = bool(unique or unique_with)
self.unique_with = unique_with
self.primary_key = primary_key
self.validation = validation
self.choices = choices
self.null = null
self.sparse = sparse
self._owner_document = None
# Make sure db_field is a string (if it's explicitly defined).
if (
self.db_field is not None and
not isinstance(self.db_field, six.string_types)
):
raise TypeError('db_field should be a string.')
# Make sure db_field doesn't contain any forbidden characters.
if isinstance(self.db_field, six.string_types) and (
'.' in self.db_field or
'\0' in self.db_field or
self.db_field.startswith('$')
):
raise ValueError(
'field names cannot contain dots (".") or null characters '
'("\\0"), and they must not start with a dollar sign ("$").'
)
# Detect and report conflicts between metadata and base properties.
conflicts = set(dir(self)) & set(kwargs)
if conflicts:
raise TypeError('%s already has attribute(s): %s' % (
self.__class__.__name__, ', '.join(conflicts)))
# Assign metadata to the instance
# This efficient method is available because no __slots__ are defined.
self.__dict__.update(kwargs)
# Adjust the appropriate creation counter, and save our local copy.
if self.db_field == '_id':
self.creation_counter = BaseField.auto_creation_counter
BaseField.auto_creation_counter -= 1
else:
self.creation_counter = BaseField.creation_counter
BaseField.creation_counter += 1
def __get__(self, instance, owner):
"""Descriptor for retrieving a value from a field in a document.
"""
if instance is None:
# Document class being used rather than a document object
return self
# Get value from document instance if available
return instance._data.get(self.name)
def __set__(self, instance, value):
"""Descriptor for assigning a value to a field in a document.
"""
# If setting to None and there is a default
# Then set the value to the default value
if value is None:
if self.null:
value = None
elif self.default is not None:
value = self.default
if callable(value):
value = value()
if instance._initialised:
try:
if (self.name not in instance._data or
instance._data[self.name] != value):
instance._mark_as_changed(self.name)
except Exception:
# Values cant be compared eg: naive and tz datetimes
# So mark it as changed
instance._mark_as_changed(self.name)
EmbeddedDocument = _import_class('EmbeddedDocument')
if isinstance(value, EmbeddedDocument):
value._instance = weakref.proxy(instance)
elif isinstance(value, (list, tuple)):
for v in value:
if isinstance(v, EmbeddedDocument):
v._instance = weakref.proxy(instance)
instance._data[self.name] = value
def error(self, message='', errors=None, field_name=None):
"""Raise a ValidationError."""
field_name = field_name if field_name else self.name
raise ValidationError(message, errors=errors, field_name=field_name)
def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type."""
return value
def to_mongo(self, value):
"""Convert a Python type to a MongoDB-compatible type."""
return self.to_python(value)
def _to_mongo_safe_call(self, value, use_db_field=True, fields=None):
"""Helper method to call to_mongo with proper inputs."""
f_inputs = self.to_mongo.__code__.co_varnames
ex_vars = {}
if 'fields' in f_inputs:
ex_vars['fields'] = fields
if 'use_db_field' in f_inputs:
ex_vars['use_db_field'] = use_db_field
return self.to_mongo(value, **ex_vars)
def prepare_query_value(self, op, value):
"""Prepare a value that is being used in a query for PyMongo."""
if op in UPDATE_OPERATORS:
self.validate(value)
return value
def validate(self, value, clean=True):
"""Perform validation on a value."""
pass
def _validate_choices(self, value):
Document = _import_class('Document')
EmbeddedDocument = _import_class('EmbeddedDocument')
choice_list = self.choices
if isinstance(next(iter(choice_list)), (list, tuple)):
# next(iter) is useful for sets
choice_list = [k for k, _ in choice_list]
# Choices which are other types of Documents
if isinstance(value, (Document, EmbeddedDocument)):
if not any(isinstance(value, c) for c in choice_list):
self.error(
'Value must be an instance of %s' % (
six.text_type(choice_list)
)
)
# Choices which are types other than Documents
elif value not in choice_list:
self.error('Value must be one of %s' % six.text_type(choice_list))
def _validate(self, value, **kwargs):
# Check the Choices Constraint
if self.choices:
self._validate_choices(value)
# check validation argument
if self.validation is not None:
if callable(self.validation):
if not self.validation(value):
self.error('Value does not match custom validation method')
else:
raise ValueError('validation argument for "%s" must be a '
'callable.' % self.name)
self.validate(value, **kwargs)
@property
def owner_document(self):
return self._owner_document
def _set_owner_document(self, owner_document):
self._owner_document = owner_document
@owner_document.setter
def owner_document(self, owner_document):
self._set_owner_document(owner_document)
class ComplexBaseField(BaseField):
"""Handles complex fields, such as lists / dictionaries.
Allows for nesting of embedded documents inside complex types.
Handles the lazy dereferencing of a queryset by lazily dereferencing all
items in a list / dict rather than one at a time.
.. versionadded:: 0.5
"""
field = None
def __get__(self, instance, owner):
"""Descriptor to automatically dereference references."""
if instance is None:
# Document class being used rather than a document object
return self
ReferenceField = _import_class('ReferenceField')
GenericReferenceField = _import_class('GenericReferenceField')
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
dereference = (self._auto_dereference and
(self.field is None or isinstance(self.field,
(GenericReferenceField, ReferenceField))))
_dereference = _import_class('DeReference')()
self._auto_dereference = instance._fields[self.name]._auto_dereference
if instance._initialised and dereference and instance._data.get(self.name):
instance._data[self.name] = _dereference(
instance._data.get(self.name), max_depth=1, instance=instance,
name=self.name
)
value = super(ComplexBaseField, self).__get__(instance, owner)
# Convert lists / values so we can watch for any changes on them
if isinstance(value, (list, tuple)):
if (issubclass(type(self), EmbeddedDocumentListField) and
not isinstance(value, EmbeddedDocumentList)):
value = EmbeddedDocumentList(value, instance, self.name)
elif not isinstance(value, BaseList):
value = BaseList(value, instance, self.name)
instance._data[self.name] = value
elif isinstance(value, dict) and not isinstance(value, BaseDict):
value = BaseDict(value, instance, self.name)
instance._data[self.name] = value
if (self._auto_dereference and instance._initialised and
isinstance(value, (BaseList, BaseDict)) and
not value._dereferenced):
value = _dereference(
value, max_depth=1, instance=instance, name=self.name
)
value._dereferenced = True
instance._data[self.name] = value
return value
def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type."""
if isinstance(value, six.string_types):
return value
if hasattr(value, 'to_python'):
return value.to_python()
is_list = False
if not hasattr(value, 'items'):
try:
is_list = True
value = {k: v for k, v in enumerate(value)}
except TypeError: # Not iterable return the value
return value
if self.field:
self.field._auto_dereference = self._auto_dereference
value_dict = {key: self.field.to_python(item)
for key, item in value.items()}
else:
Document = _import_class('Document')
value_dict = {}
for k, v in value.items():
if isinstance(v, Document):
# We need the id from the saved object to create the DBRef
if v.pk is None:
self.error('You can only reference documents once they'
' have been saved to the database')
collection = v._get_collection_name()
value_dict[k] = DBRef(collection, v.pk)
elif hasattr(v, 'to_python'):
value_dict[k] = v.to_python()
else:
value_dict[k] = self.to_python(v)
if is_list: # Convert back to a list
return [v for _, v in sorted(value_dict.items(),
key=operator.itemgetter(0))]
return value_dict
def to_mongo(self, value, use_db_field=True, fields=None):
"""Convert a Python type to a MongoDB-compatible type."""
Document = _import_class('Document')
EmbeddedDocument = _import_class('EmbeddedDocument')
GenericReferenceField = _import_class('GenericReferenceField')
if isinstance(value, six.string_types):
return value
if hasattr(value, 'to_mongo'):
if isinstance(value, Document):
return GenericReferenceField().to_mongo(value)
cls = value.__class__
val = value.to_mongo(use_db_field, fields)
# If it's a document that is not inherited add _cls
if isinstance(value, EmbeddedDocument):
val['_cls'] = cls.__name__
return val
is_list = False
if not hasattr(value, 'items'):
try:
is_list = True
value = {k: v for k, v in enumerate(value)}
except TypeError: # Not iterable return the value
return value
if self.field:
value_dict = {
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
for key, item in value.iteritems()
}
else:
value_dict = {}
for k, v in value.iteritems():
if isinstance(v, Document):
# We need the id from the saved object to create the DBRef
if v.pk is None:
self.error('You can only reference documents once they'
' have been saved to the database')
# If its a document that is not inheritable it won't have
# any _cls data so make it a generic reference allows
# us to dereference
meta = getattr(v, '_meta', {})
allow_inheritance = meta.get('allow_inheritance')
if not allow_inheritance and not self.field:
value_dict[k] = GenericReferenceField().to_mongo(v)
else:
collection = v._get_collection_name()
value_dict[k] = DBRef(collection, v.pk)
elif hasattr(v, 'to_mongo'):
cls = v.__class__
val = v.to_mongo(use_db_field, fields)
# If it's a document that is not inherited add _cls
if isinstance(v, (Document, EmbeddedDocument)):
val['_cls'] = cls.__name__
value_dict[k] = val
else:
value_dict[k] = self.to_mongo(v, use_db_field, fields)
if is_list: # Convert back to a list
return [v for _, v in sorted(value_dict.items(),
key=operator.itemgetter(0))]
return value_dict
def validate(self, value):
"""If field is provided ensure the value is valid."""
errors = {}
if self.field:
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
sequence = value.iteritems()
else:
sequence = enumerate(value)
for k, v in sequence:
try:
self.field._validate(v)
except ValidationError as error:
errors[k] = error.errors or error
except (ValueError, AssertionError) as error:
errors[k] = error
if errors:
field_class = self.field.__class__.__name__
self.error('Invalid %s item (%s)' % (field_class, value),
errors=errors)
# Don't allow empty values if required
if self.required and not value:
self.error('Field is required and cannot be empty')
def prepare_query_value(self, op, value):
return self.to_mongo(value)
def lookup_member(self, member_name):
if self.field:
return self.field.lookup_member(member_name)
return None
def _set_owner_document(self, owner_document):
if self.field:
self.field.owner_document = owner_document
self._owner_document = owner_document
class ObjectIdField(BaseField):
"""A field wrapper around MongoDB's ObjectIds."""
def to_python(self, value):
try:
if not isinstance(value, ObjectId):
value = ObjectId(value)
except Exception:
pass
return value
def to_mongo(self, value):
if not isinstance(value, ObjectId):
try:
return ObjectId(six.text_type(value))
except Exception as e:
# e.message attribute has been deprecated since Python 2.6
self.error(six.text_type(e))
return value
def prepare_query_value(self, op, value):
return self.to_mongo(value)
def validate(self, value):
try:
ObjectId(six.text_type(value))
except Exception:
self.error('Invalid Object ID')
class GeoJsonBaseField(BaseField):
"""A geo json field storing a geojson style object.
.. versionadded:: 0.8
"""
_geo_index = pymongo.GEOSPHERE
_type = 'GeoBase'
def __init__(self, auto_index=True, *args, **kwargs):
"""
:param bool auto_index: Automatically create a '2dsphere' index.\
Defaults to `True`.
"""
self._name = '%sField' % self._type
if not auto_index:
self._geo_index = False
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
def validate(self, value):
"""Validate the GeoJson object based on its type."""
if isinstance(value, dict):
if set(value.keys()) == set(['type', 'coordinates']):
if value['type'] != self._type:
self.error('%s type must be "%s"' %
(self._name, self._type))
return self.validate(value['coordinates'])
else:
self.error('%s can only accept a valid GeoJson dictionary'
' or lists of (x, y)' % self._name)
return
elif not isinstance(value, (list, tuple)):
self.error('%s can only accept lists of [x, y]' % self._name)
return
validate = getattr(self, '_validate_%s' % self._type.lower())
error = validate(value)
if error:
self.error(error)
def _validate_polygon(self, value, top_level=True):
if not isinstance(value, (list, tuple)):
return 'Polygons must contain list of linestrings'
# Quick and dirty validator
try:
value[0][0][0]
except (TypeError, IndexError):
return 'Invalid Polygon must contain at least one valid linestring'
errors = []
for val in value:
error = self._validate_linestring(val, False)
if not error and val[0] != val[-1]:
error = 'LineStrings must start and end at the same point'
if error and error not in errors:
errors.append(error)
if errors:
if top_level:
return 'Invalid Polygon:\n%s' % ', '.join(errors)
else:
return '%s' % ', '.join(errors)
def _validate_linestring(self, value, top_level=True):
"""Validate a linestring."""
if not isinstance(value, (list, tuple)):
return 'LineStrings must contain list of coordinate pairs'
# Quick and dirty validator
try:
value[0][0]
except (TypeError, IndexError):
return 'Invalid LineString must contain at least one valid point'
errors = []
for val in value:
error = self._validate_point(val)
if error and error not in errors:
errors.append(error)
if errors:
if top_level:
return 'Invalid LineString:\n%s' % ', '.join(errors)
else:
return '%s' % ', '.join(errors)
def _validate_point(self, value):
"""Validate each set of coords"""
if not isinstance(value, (list, tuple)):
return 'Points must be a list of coordinate pairs'
elif not len(value) == 2:
return 'Value (%s) must be a two-dimensional point' % repr(value)
elif (not isinstance(value[0], (float, int)) or
not isinstance(value[1], (float, int))):
return 'Both values (%s) in point must be float or int' % repr(value)
def _validate_multipoint(self, value):
if not isinstance(value, (list, tuple)):
return 'MultiPoint must be a list of Point'
# Quick and dirty validator
try:
value[0][0]
except (TypeError, IndexError):
return 'Invalid MultiPoint must contain at least one valid point'
errors = []
for point in value:
error = self._validate_point(point)
if error and error not in errors:
errors.append(error)
if errors:
return '%s' % ', '.join(errors)
def _validate_multilinestring(self, value, top_level=True):
if not isinstance(value, (list, tuple)):
return 'MultiLineString must be a list of LineString'
# Quick and dirty validator
try:
value[0][0][0]
except (TypeError, IndexError):
return 'Invalid MultiLineString must contain at least one valid linestring'
errors = []
for linestring in value:
error = self._validate_linestring(linestring, False)
if error and error not in errors:
errors.append(error)
if errors:
if top_level:
return 'Invalid MultiLineString:\n%s' % ', '.join(errors)
else:
return '%s' % ', '.join(errors)
def _validate_multipolygon(self, value):
if not isinstance(value, (list, tuple)):
return 'MultiPolygon must be a list of Polygon'
# Quick and dirty validator
try:
value[0][0][0][0]
except (TypeError, IndexError):
return 'Invalid MultiPolygon must contain at least one valid Polygon'
errors = []
for polygon in value:
error = self._validate_polygon(polygon, False)
if error and error not in errors:
errors.append(error)
if errors:
return 'Invalid MultiPolygon:\n%s' % ', '.join(errors)
def to_mongo(self, value):
if isinstance(value, dict):
return value
return SON([('type', self._type), ('coordinates', value)])

View File

@@ -1,452 +0,0 @@
import warnings
import six
from mongoengine.base.common import _document_registry
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
from mongoengine.common import _import_class
from mongoengine.errors import InvalidDocumentError
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
MultipleObjectsReturned,
QuerySetManager)
__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
class DocumentMetaclass(type):
"""Metaclass for all documents."""
# TODO lower complexity of this method
def __new__(cls, name, bases, attrs):
flattened_bases = cls._get_bases(bases)
super_new = super(DocumentMetaclass, cls).__new__
# If a base class just call super
metaclass = attrs.get('my_metaclass')
if metaclass and issubclass(metaclass, DocumentMetaclass):
return super_new(cls, name, bases, attrs)
attrs['_is_document'] = attrs.get('_is_document', False)
attrs['_cached_reference_fields'] = []
# EmbeddedDocuments could have meta data for inheritance
if 'meta' in attrs:
attrs['_meta'] = attrs.pop('meta')
# EmbeddedDocuments should inherit meta data
if '_meta' not in attrs:
meta = MetaDict()
for base in flattened_bases[::-1]:
# Add any mixin metadata from plain objects
if hasattr(base, 'meta'):
meta.merge(base.meta)
elif hasattr(base, '_meta'):
meta.merge(base._meta)
attrs['_meta'] = meta
attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract
# If allow_inheritance is True, add a "_cls" string field to the attrs
if attrs['_meta'].get('allow_inheritance'):
StringField = _import_class('StringField')
attrs['_cls'] = StringField()
# Handle document Fields
# Merge all fields from subclasses
doc_fields = {}
for base in flattened_bases[::-1]:
if hasattr(base, '_fields'):
doc_fields.update(base._fields)
# Standard object mixin - merge in any Fields
if not hasattr(base, '_meta'):
base_fields = {}
for attr_name, attr_value in base.__dict__.iteritems():
if not isinstance(attr_value, BaseField):
continue
attr_value.name = attr_name
if not attr_value.db_field:
attr_value.db_field = attr_name
base_fields[attr_name] = attr_value
doc_fields.update(base_fields)
# Discover any document fields
field_names = {}
for attr_name, attr_value in attrs.iteritems():
if not isinstance(attr_value, BaseField):
continue
attr_value.name = attr_name
if not attr_value.db_field:
attr_value.db_field = attr_name
doc_fields[attr_name] = attr_value
# Count names to ensure no db_field redefinitions
field_names[attr_value.db_field] = field_names.get(
attr_value.db_field, 0) + 1
# Ensure no duplicate db_fields
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
if duplicate_db_fields:
msg = ('Multiple db_fields defined for: %s ' %
', '.join(duplicate_db_fields))
raise InvalidDocumentError(msg)
# Set _fields and db_field maps
attrs['_fields'] = doc_fields
attrs['_db_field_map'] = {k: getattr(v, 'db_field', k)
for k, v in doc_fields.items()}
attrs['_reverse_db_field_map'] = {
v: k for k, v in attrs['_db_field_map'].items()
}
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
(v.creation_counter, v.name)
for v in doc_fields.itervalues()))
#
# Set document hierarchy
#
superclasses = ()
class_name = [name]
for base in flattened_bases:
if (not getattr(base, '_is_base_cls', True) and
not getattr(base, '_meta', {}).get('abstract', True)):
# Collate hierarchy for _cls and _subclasses
class_name.append(base.__name__)
if hasattr(base, '_meta'):
# Warn if allow_inheritance isn't set and prevent
# inheritance of classes where inheritance is set to False
allow_inheritance = base._meta.get('allow_inheritance')
if not allow_inheritance and not base._meta.get('abstract'):
raise ValueError('Document %s may not be subclassed' %
base.__name__)
# Get superclasses from last base superclass
document_bases = [b for b in flattened_bases
if hasattr(b, '_class_name')]
if document_bases:
superclasses = document_bases[0]._superclasses
superclasses += (document_bases[0]._class_name, )
_cls = '.'.join(reversed(class_name))
attrs['_class_name'] = _cls
attrs['_superclasses'] = superclasses
attrs['_subclasses'] = (_cls, )
attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
# Create the new_class
new_class = super_new(cls, name, bases, attrs)
# Set _subclasses
for base in document_bases:
if _cls not in base._subclasses:
base._subclasses += (_cls,)
base._types = base._subclasses # TODO depreciate _types
(Document, EmbeddedDocument, DictField,
CachedReferenceField) = cls._import_classes()
if issubclass(new_class, Document):
new_class._collection = None
# Add class to the _document_registry
_document_registry[new_class._class_name] = new_class
# In Python 2, User-defined methods objects have special read-only
# attributes 'im_func' and 'im_self' which contain the function obj
# and class instance object respectively. With Python 3 these special
# attributes have been replaced by __func__ and __self__. The Blinker
# module continues to use im_func and im_self, so the code below
# copies __func__ into im_func and __self__ into im_self for
# classmethod objects in Document derived classes.
if six.PY3:
for val in new_class.__dict__.values():
if isinstance(val, classmethod):
f = val.__get__(new_class)
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
f.__dict__.update({'im_func': getattr(f, '__func__')})
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
f.__dict__.update({'im_self': getattr(f, '__self__')})
# Handle delete rules
for field in new_class._fields.itervalues():
f = field
if f.owner_document is None:
f.owner_document = new_class
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
if isinstance(f, CachedReferenceField):
if issubclass(new_class, EmbeddedDocument):
raise InvalidDocumentError('CachedReferenceFields is not '
'allowed in EmbeddedDocuments')
if not f.document_type:
raise InvalidDocumentError(
'Document is not available to sync')
if f.auto_sync:
f.start_listener()
f.document_type._cached_reference_fields.append(f)
if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
delete_rule = getattr(f.field,
'reverse_delete_rule',
DO_NOTHING)
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
msg = ('Reverse delete rules are not supported '
'for %s (field: %s)' %
(field.__class__.__name__, field.name))
raise InvalidDocumentError(msg)
f = field.field
if delete_rule != DO_NOTHING:
if issubclass(new_class, EmbeddedDocument):
msg = ('Reverse delete rules are not supported for '
'EmbeddedDocuments (field: %s)' % field.name)
raise InvalidDocumentError(msg)
f.document_type.register_delete_rule(new_class,
field.name, delete_rule)
if (field.name and hasattr(Document, field.name) and
EmbeddedDocument not in new_class.mro()):
msg = ('%s is a document method and not a valid '
'field name' % field.name)
raise InvalidDocumentError(msg)
return new_class
def add_to_class(self, name, value):
setattr(self, name, value)
@classmethod
def _get_bases(cls, bases):
if isinstance(bases, BasesTuple):
return bases
seen = []
bases = cls.__get_bases(bases)
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
return BasesTuple(unique_bases)
@classmethod
def __get_bases(cls, bases):
for base in bases:
if base is object:
continue
yield base
for child_base in cls.__get_bases(base.__bases__):
yield child_base
@classmethod
def _import_classes(cls):
Document = _import_class('Document')
EmbeddedDocument = _import_class('EmbeddedDocument')
DictField = _import_class('DictField')
CachedReferenceField = _import_class('CachedReferenceField')
return Document, EmbeddedDocument, DictField, CachedReferenceField
class TopLevelDocumentMetaclass(DocumentMetaclass):
"""Metaclass for top-level documents (i.e. documents that have their own
collection in the database.
"""
def __new__(cls, name, bases, attrs):
flattened_bases = cls._get_bases(bases)
super_new = super(TopLevelDocumentMetaclass, cls).__new__
# Set default _meta data if base class, otherwise get user defined meta
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
# defaults
attrs['_meta'] = {
'abstract': True,
'max_documents': None,
'max_size': None,
'ordering': [], # default ordering applied at runtime
'indexes': [], # indexes to be ensured at runtime
'id_field': None,
'index_background': False,
'index_drop_dups': False,
'index_opts': None,
'delete_rules': None,
# allow_inheritance can be True, False, and None. True means
# "allow inheritance", False means "don't allow inheritance",
# None means "do whatever your parent does, or don't allow
# inheritance if you're a top-level class".
'allow_inheritance': None,
}
attrs['_is_base_cls'] = True
attrs['_meta'].update(attrs.get('meta', {}))
else:
attrs['_meta'] = attrs.get('meta', {})
# Explicitly set abstract to false unless set
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
attrs['_is_base_cls'] = False
# Set flag marking as document class - as opposed to an object mixin
attrs['_is_document'] = True
# Ensure queryset_class is inherited
if 'objects' in attrs:
manager = attrs['objects']
if hasattr(manager, 'queryset_class'):
attrs['_meta']['queryset_class'] = manager.queryset_class
# Clean up top level meta
if 'meta' in attrs:
del attrs['meta']
# Find the parent document class
parent_doc_cls = [b for b in flattened_bases
if b.__class__ == TopLevelDocumentMetaclass]
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
# Prevent classes setting collection different to their parents
# If parent wasn't an abstract class
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
not parent_doc_cls._meta.get('abstract', True)):
msg = 'Trying to set a collection on a subclass (%s)' % name
warnings.warn(msg, SyntaxWarning)
del attrs['_meta']['collection']
# Ensure abstract documents have abstract bases
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
if (parent_doc_cls and
not parent_doc_cls._meta.get('abstract', False)):
msg = 'Abstract document cannot have non-abstract base'
raise ValueError(msg)
return super_new(cls, name, bases, attrs)
# Merge base class metas.
# Uses a special MetaDict that handles various merging rules
meta = MetaDict()
for base in flattened_bases[::-1]:
# Add any mixin metadata from plain objects
if hasattr(base, 'meta'):
meta.merge(base.meta)
elif hasattr(base, '_meta'):
meta.merge(base._meta)
# Set collection in the meta if its callable
if (getattr(base, '_is_document', False) and
not base._meta.get('abstract')):
collection = meta.get('collection', None)
if callable(collection):
meta['collection'] = collection(base)
meta.merge(attrs.get('_meta', {})) # Top level meta
# Only simple classes (i.e. direct subclasses of Document) may set
# allow_inheritance to False. If the base Document allows inheritance,
# none of its subclasses can override allow_inheritance to False.
simple_class = all([b._meta.get('abstract')
for b in flattened_bases if hasattr(b, '_meta')])
if (
not simple_class and
meta['allow_inheritance'] is False and
not meta['abstract']
):
raise ValueError('Only direct subclasses of Document may set '
'"allow_inheritance" to False')
# Set default collection name
if 'collection' not in meta:
meta['collection'] = ''.join('_%s' % c if c.isupper() else c
for c in name).strip('_').lower()
attrs['_meta'] = meta
# Call super and get the new class
new_class = super_new(cls, name, bases, attrs)
meta = new_class._meta
# Set index specifications
meta['index_specs'] = new_class._build_index_specs(meta['indexes'])
# If collection is a callable - call it and set the value
collection = meta.get('collection')
if callable(collection):
new_class._meta['collection'] = collection(new_class)
# Provide a default queryset unless exists or one has been set
if 'objects' not in dir(new_class):
new_class.objects = QuerySetManager()
# Validate the fields and set primary key if needed
for field_name, field in new_class._fields.iteritems():
if field.primary_key:
# Ensure only one primary key is set
current_pk = new_class._meta.get('id_field')
if current_pk and current_pk != field_name:
raise ValueError('Cannot override primary key field')
# Set primary key
if not current_pk:
new_class._meta['id_field'] = field_name
new_class.id = field
# Set primary key if not defined by the document
new_class._auto_id_field = getattr(parent_doc_cls,
'_auto_id_field', False)
if not new_class._meta.get('id_field'):
# After 0.10, find not existing names, instead of overwriting
id_name, id_db_name = cls.get_auto_id_names(new_class)
new_class._auto_id_field = True
new_class._meta['id_field'] = id_name
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
new_class._fields[id_name].name = id_name
new_class.id = new_class._fields[id_name]
new_class._db_field_map[id_name] = id_db_name
new_class._reverse_db_field_map[id_db_name] = id_name
# Prepend id field to _fields_ordered
new_class._fields_ordered = (id_name, ) + new_class._fields_ordered
# Merge in exceptions with parent hierarchy
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
module = attrs.get('__module__')
for exc in exceptions_to_merge:
name = exc.__name__
parents = tuple(getattr(base, name) for base in flattened_bases
if hasattr(base, name)) or (exc,)
# Create new exception and set to new_class
exception = type(name, parents, {'__module__': module})
setattr(new_class, name, exception)
return new_class
@classmethod
def get_auto_id_names(cls, new_class):
id_name, id_db_name = ('id', '_id')
if id_name not in new_class._fields and \
id_db_name not in (v.db_field for v in new_class._fields.values()):
return id_name, id_db_name
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
while id_name in new_class._fields or \
id_db_name in (v.db_field for v in new_class._fields.values()):
id_name = '{0}_{1}'.format(id_basename, i)
id_db_name = '{0}_{1}'.format(id_db_basename, i)
i += 1
return id_name, id_db_name
class MetaDict(dict):
"""Custom dictionary for meta classes.
Handles the merging of set indexes
"""
_merge_options = ('indexes',)
def merge(self, new_options):
for k, v in new_options.iteritems():
if k in self._merge_options:
self[k] = self.get(k, []) + v
else:
self[k] = v
class BasesTuple(tuple):
"""Special class to handle introspection of bases tuple in __new__"""
pass

View File

@@ -1,58 +0,0 @@
_class_registry_cache = {}
_field_list_cache = []
def _import_class(cls_name):
"""Cache mechanism for imports.
Due to complications of circular imports mongoengine needs to do lots of
inline imports in functions. This is inefficient as classes are
imported repeated throughout the mongoengine code. This is
compounded by some recursive functions requiring inline imports.
:mod:`mongoengine.common` provides a single point to import all these
classes. Circular imports aren't an issue as it dynamically imports the
class when first needed. Subsequent calls to the
:func:`~mongoengine.common._import_class` can then directly retrieve the
class from the :data:`mongoengine.common._class_registry_cache`.
"""
if cls_name in _class_registry_cache:
return _class_registry_cache.get(cls_name)
doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument',
'MapReduceDocument')
# Field Classes
if not _field_list_cache:
from mongoengine.fields import __all__ as fields
_field_list_cache.extend(fields)
from mongoengine.base.fields import __all__ as fields
_field_list_cache.extend(fields)
field_classes = _field_list_cache
queryset_classes = ('OperationError',)
deref_classes = ('DeReference',)
if cls_name == 'BaseDocument':
from mongoengine.base import document as module
import_classes = ['BaseDocument']
elif cls_name in doc_classes:
from mongoengine import document as module
import_classes = doc_classes
elif cls_name in field_classes:
from mongoengine import fields as module
import_classes = field_classes
elif cls_name in queryset_classes:
from mongoengine import queryset as module
import_classes = queryset_classes
elif cls_name in deref_classes:
from mongoengine import dereference as module
import_classes = deref_classes
else:
raise ValueError('No import set for: ' % cls_name)
for cls in import_classes:
_class_registry_cache[cls] = getattr(module, cls)
return _class_registry_cache.get(cls_name)

View File

@@ -1,263 +1,71 @@
from pymongo import MongoClient, ReadPreference, uri_parser
import six
from pymongo import Connection
import multiprocessing
from mongoengine.python_support import IS_PYMONGO_3
__all__ = ['MongoEngineConnectionError', 'connect', 'register_connection',
'DEFAULT_CONNECTION_NAME']
__all__ = ['ConnectionError', 'connect']
DEFAULT_CONNECTION_NAME = 'default'
_connection_defaults = {
'host': 'localhost',
'port': 27017,
}
_connection = {}
_connection_settings = _connection_defaults.copy()
if IS_PYMONGO_3:
READ_PREFERENCE = ReadPreference.PRIMARY
else:
from pymongo import MongoReplicaSetClient
READ_PREFERENCE = False
_db_name = None
_db_username = None
_db_password = None
_db = {}
class MongoEngineConnectionError(Exception):
"""Error raised when the database connection can't be established or
when a connection with a requested alias can't be retrieved.
"""
class ConnectionError(Exception):
pass
_connection_settings = {}
_connections = {}
_dbs = {}
def register_connection(alias, name=None, host=None, port=None,
read_preference=READ_PREFERENCE,
username=None, password=None,
authentication_source=None,
authentication_mechanism=None,
**kwargs):
"""Add a connection.
:param alias: the name that will be used to refer to this connection
throughout MongoEngine
:param name: the name of the specific database to use
:param host: the host name of the :program:`mongod` instance to connect to
:param port: the port that the :program:`mongod` instance is running on
:param read_preference: The read preference for the collection
** Added pymongo 2.1
:param username: username to authenticate with
:param password: password to authenticate with
:param authentication_source: database to authenticate against
:param authentication_mechanism: database authentication mechanisms.
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
:param is_mock: explicitly use mongomock for this connection
(can also be done by using `mongomock://` as db host prefix)
:param kwargs: ad-hoc parameters to be passed into the pymongo driver,
for example maxpoolsize, tz_aware, etc. See the documentation
for pymongo's `MongoClient` for a full list.
.. versionchanged:: 0.10.6 - added mongomock support
"""
conn_settings = {
'name': name or 'test',
'host': host or 'localhost',
'port': port or 27017,
'read_preference': read_preference,
'username': username,
'password': password,
'authentication_source': authentication_source,
'authentication_mechanism': authentication_mechanism
}
conn_host = conn_settings['host']
# Host can be a list or a string, so if string, force to a list.
if isinstance(conn_host, six.string_types):
conn_host = [conn_host]
resolved_hosts = []
for entity in conn_host:
# Handle Mongomock
if entity.startswith('mongomock://'):
conn_settings['is_mock'] = True
# `mongomock://` is not a valid url prefix and must be replaced by `mongodb://`
resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1))
# Handle URI style connections, only updating connection params which
# were explicitly specified in the URI.
elif '://' in entity:
uri_dict = uri_parser.parse_uri(entity)
resolved_hosts.append(entity)
if uri_dict.get('database'):
conn_settings['name'] = uri_dict.get('database')
for param in ('read_preference', 'username', 'password'):
if uri_dict.get(param):
conn_settings[param] = uri_dict[param]
uri_options = uri_dict['options']
if 'replicaset' in uri_options:
conn_settings['replicaSet'] = uri_options['replicaset']
if 'authsource' in uri_options:
conn_settings['authentication_source'] = uri_options['authsource']
if 'authmechanism' in uri_options:
conn_settings['authentication_mechanism'] = uri_options['authmechanism']
else:
resolved_hosts.append(entity)
conn_settings['host'] = resolved_hosts
# Deprecated parameters that should not be passed on
kwargs.pop('slaves', None)
kwargs.pop('is_slave', None)
conn_settings.update(kwargs)
_connection_settings[alias] = conn_settings
def disconnect(alias=DEFAULT_CONNECTION_NAME):
"""Close the connection with a given alias."""
if alias in _connections:
get_connection(alias=alias).close()
del _connections[alias]
if alias in _dbs:
del _dbs[alias]
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
"""Return a connection with a given alias."""
def _get_connection(reconnect=False):
global _connection
identity = get_identity()
# Connect to the database if not already connected
if reconnect:
disconnect(alias)
# If the requested alias already exists in the _connections list, return
# it immediately.
if alias in _connections:
return _connections[alias]
# Validate that the requested alias exists in the _connection_settings.
# Raise MongoEngineConnectionError if it doesn't.
if alias not in _connection_settings:
if alias == DEFAULT_CONNECTION_NAME:
msg = 'You have not defined a default connection'
else:
msg = 'Connection with alias "%s" has not been defined' % alias
raise MongoEngineConnectionError(msg)
def _clean_settings(settings_dict):
# set literal more efficient than calling set function
irrelevant_fields_set = {
'name', 'username', 'password',
'authentication_source', 'authentication_mechanism'
}
return {
k: v for k, v in settings_dict.items()
if k not in irrelevant_fields_set
}
# Retrieve a copy of the connection settings associated with the requested
# alias and remove the database name and authentication info (we don't
# care about them at this point).
conn_settings = _clean_settings(_connection_settings[alias].copy())
# Determine if we should use PyMongo's or mongomock's MongoClient.
is_mock = conn_settings.pop('is_mock', False)
if is_mock:
if _connection.get(identity) is None or reconnect:
try:
import mongomock
except ImportError:
raise RuntimeError('You need mongomock installed to mock '
'MongoEngine.')
connection_class = mongomock.MongoClient
else:
connection_class = MongoClient
_connection[identity] = Connection(**_connection_settings)
except:
raise ConnectionError('Cannot connect to the database')
return _connection[identity]
# For replica set connections with PyMongo 2.x, use
# MongoReplicaSetClient.
# TODO remove this once we stop supporting PyMongo 2.x.
if 'replicaSet' in conn_settings and not IS_PYMONGO_3:
connection_class = MongoReplicaSetClient
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
def _get_db(reconnect=False):
global _db, _connection
identity = get_identity()
# Connect if not already connected
if _connection.get(identity) is None or reconnect:
_connection[identity] = _get_connection(reconnect=reconnect)
# hosts_or_uri has to be a string, so if 'host' was provided
# as a list, join its parts and separate them by ','
if isinstance(conn_settings['hosts_or_uri'], list):
conn_settings['hosts_or_uri'] = ','.join(
conn_settings['hosts_or_uri'])
if _db.get(identity) is None or reconnect:
# _db_name will be None if the user hasn't called connect()
if _db_name is None:
raise ConnectionError('Not connected to the database')
# Discard port since it can't be used on MongoReplicaSetClient
conn_settings.pop('port', None)
# Get DB from current connection and authenticate if necessary
_db[identity] = _connection[identity][_db_name]
if _db_username and _db_password:
_db[identity].authenticate(_db_username, _db_password)
# Iterate over all of the connection settings and if a connection with
# the same parameters is already established, use it instead of creating
# a new one.
existing_connection = None
connection_settings_iterator = (
(db_alias, settings.copy())
for db_alias, settings in _connection_settings.items()
)
for db_alias, connection_settings in connection_settings_iterator:
connection_settings = _clean_settings(connection_settings)
if conn_settings == connection_settings and _connections.get(db_alias):
existing_connection = _connections[db_alias]
break
return _db[identity]
# If an existing connection was found, assign it to the new alias
if existing_connection:
_connections[alias] = existing_connection
else:
# Otherwise, create the new connection for this alias. Raise
# MongoEngineConnectionError if it can't be established.
try:
_connections[alias] = connection_class(**conn_settings)
except Exception as e:
raise MongoEngineConnectionError(
'Cannot connect to database %s :\n%s' % (alias, e))
return _connections[alias]
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
if reconnect:
disconnect(alias)
if alias not in _dbs:
conn = get_connection(alias)
conn_settings = _connection_settings[alias]
db = conn[conn_settings['name']]
auth_kwargs = {'source': conn_settings['authentication_source']}
if conn_settings['authentication_mechanism'] is not None:
auth_kwargs['mechanism'] = conn_settings['authentication_mechanism']
# Authenticate if necessary
if conn_settings['username'] and (conn_settings['password'] or
conn_settings['authentication_mechanism'] == 'MONGODB-X509'):
db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs)
_dbs[alias] = db
return _dbs[alias]
def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
"""Connect to the database specified by the 'db' argument.
Connection settings may be provided here as well if the database is not
running on the default port on localhost. If authentication is needed,
provide username and password arguments as well.
Multiple databases are supported by using aliases. Provide a separate
`alias` to connect to a different instance of :program:`mongod`.
See the docstring for `register_connection` for more details about all
supported kwargs.
.. versionchanged:: 0.6 - added multiple database support.
def get_identity():
identity = multiprocessing.current_process()._identity
identity = 0 if not identity else identity[0]
return identity
def connect(db, username=None, password=None, **kwargs):
"""Connect to the database specified by the 'db' argument. Connection
settings may be provided here as well if the database is not running on
the default port on localhost. If authentication is needed, provide
username and password arguments as well.
"""
if alias not in _connections:
register_connection(alias, db, **kwargs)
global _connection_settings, _db_name, _db_username, _db_password, _db
_connection_settings = dict(_connection_defaults, **kwargs)
_db_name = db
_db_username = username
_db_password = password
return _get_db(reconnect=True)
return get_connection(alias)
# Support old naming convention
_get_connection = get_connection
_get_db = get_db

View File

@@ -1,217 +0,0 @@
from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
__all__ = ('switch_db', 'switch_collection', 'no_dereference',
'no_sub_classes', 'query_counter')
class switch_db(object):
"""switch_db alias context manager.
Example ::
# Register connections
register_connection('default', 'mongoenginetest')
register_connection('testdb-1', 'mongoenginetest2')
class Group(Document):
name = StringField()
Group(name='test').save() # Saves in the default db
with switch_db(Group, 'testdb-1') as Group:
Group(name='hello testdb!').save() # Saves in testdb-1
"""
def __init__(self, cls, db_alias):
"""Construct the switch_db context manager
:param cls: the class to change the registered db
:param db_alias: the name of the specific database to use
"""
self.cls = cls
self.collection = cls._get_collection()
self.db_alias = db_alias
self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)
def __enter__(self):
"""Change the db_alias and clear the cached collection."""
self.cls._meta['db_alias'] = self.db_alias
self.cls._collection = None
return self.cls
def __exit__(self, t, value, traceback):
"""Reset the db_alias and collection."""
self.cls._meta['db_alias'] = self.ori_db_alias
self.cls._collection = self.collection
class switch_collection(object):
"""switch_collection alias context manager.
Example ::
class Group(Document):
name = StringField()
Group(name='test').save() # Saves in the default db
with switch_collection(Group, 'group1') as Group:
Group(name='hello testdb!').save() # Saves in group1 collection
"""
def __init__(self, cls, collection_name):
"""Construct the switch_collection context manager.
:param cls: the class to change the registered db
:param collection_name: the name of the collection to use
"""
self.cls = cls
self.ori_collection = cls._get_collection()
self.ori_get_collection_name = cls._get_collection_name
self.collection_name = collection_name
def __enter__(self):
"""Change the _get_collection_name and clear the cached collection."""
@classmethod
def _get_collection_name(cls):
return self.collection_name
self.cls._get_collection_name = _get_collection_name
self.cls._collection = None
return self.cls
def __exit__(self, t, value, traceback):
"""Reset the collection."""
self.cls._collection = self.ori_collection
self.cls._get_collection_name = self.ori_get_collection_name
class no_dereference(object):
"""no_dereference context manager.
Turns off all dereferencing in Documents for the duration of the context
manager::
with no_dereference(Group) as Group:
Group.objects.find()
"""
def __init__(self, cls):
"""Construct the no_dereference context manager.
:param cls: the class to turn dereferencing off on
"""
self.cls = cls
ReferenceField = _import_class('ReferenceField')
GenericReferenceField = _import_class('GenericReferenceField')
ComplexBaseField = _import_class('ComplexBaseField')
self.deref_fields = [k for k, v in self.cls._fields.iteritems()
if isinstance(v, (ReferenceField,
GenericReferenceField,
ComplexBaseField))]
def __enter__(self):
"""Change the objects default and _auto_dereference values."""
for field in self.deref_fields:
self.cls._fields[field]._auto_dereference = False
return self.cls
def __exit__(self, t, value, traceback):
"""Reset the default and _auto_dereference values."""
for field in self.deref_fields:
self.cls._fields[field]._auto_dereference = True
return self.cls
class no_sub_classes(object):
"""no_sub_classes context manager.
Only returns instances of this class and no sub (inherited) classes::
with no_sub_classes(Group) as Group:
Group.objects.find()
"""
def __init__(self, cls):
"""Construct the no_sub_classes context manager.
:param cls: the class to turn querying sub classes on
"""
self.cls = cls
def __enter__(self):
"""Change the objects default and _auto_dereference values."""
self.cls._all_subclasses = self.cls._subclasses
self.cls._subclasses = (self.cls,)
return self.cls
def __exit__(self, t, value, traceback):
"""Reset the default and _auto_dereference values."""
self.cls._subclasses = self.cls._all_subclasses
delattr(self.cls, '_all_subclasses')
return self.cls
class query_counter(object):
"""Query_counter context manager to get the number of queries."""
def __init__(self):
"""Construct the query_counter."""
self.counter = 0
self.db = get_db()
def __enter__(self):
"""On every with block we need to drop the profile collection."""
self.db.set_profiling_level(0)
self.db.system.profile.drop()
self.db.set_profiling_level(2)
return self
def __exit__(self, t, value, traceback):
"""Reset the profiling level."""
self.db.set_profiling_level(0)
def __eq__(self, value):
"""== Compare querycounter."""
counter = self._get_count()
return value == counter
def __ne__(self, value):
"""!= Compare querycounter."""
return not self.__eq__(value)
def __lt__(self, value):
"""< Compare querycounter."""
return self._get_count() < value
def __le__(self, value):
"""<= Compare querycounter."""
return self._get_count() <= value
def __gt__(self, value):
"""> Compare querycounter."""
return self._get_count() > value
def __ge__(self, value):
""">= Compare querycounter."""
return self._get_count() >= value
def __int__(self):
"""int representation."""
return self._get_count()
def __repr__(self):
"""repr query_counter as the number of queries."""
return u"%s" % self._get_count()
def _get_count(self):
"""Get the number of queries."""
ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}}
count = self.db.system.profile.find(ignore_query).count() - self.counter
self.counter += 1
return count

View File

@@ -1,241 +0,0 @@
from bson import DBRef, SON
import six
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
TopLevelDocumentMetaclass, get_document)
from mongoengine.connection import get_db
from mongoengine.document import Document, EmbeddedDocument
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
from mongoengine.queryset import QuerySet
class DeReference(object):
def __call__(self, items, max_depth=1, instance=None, name=None):
"""
Cheaply dereferences the items to a set depth.
Also handles the conversion of complex data types.
:param items: The iterable (dict, list, queryset) to be dereferenced.
:param max_depth: The maximum depth to recurse to
:param instance: The owning instance used for tracking changes by
:class:`~mongoengine.base.ComplexBaseField`
:param name: The name of the field, used for tracking changes by
:class:`~mongoengine.base.ComplexBaseField`
:param get: A boolean determining if being called by __get__
"""
if items is None or isinstance(items, six.string_types):
return items
# cheapest way to convert a queryset to a list
# list(queryset) uses a count() query to determine length
if isinstance(items, QuerySet):
items = [i for i in items]
self.max_depth = max_depth
doc_type = None
if instance and isinstance(instance, (Document, EmbeddedDocument,
TopLevelDocumentMetaclass)):
doc_type = instance._fields.get(name)
while hasattr(doc_type, 'field'):
doc_type = doc_type.field
if isinstance(doc_type, ReferenceField):
field = doc_type
doc_type = doc_type.document_type
is_list = not hasattr(items, 'items')
if is_list and all([i.__class__ == doc_type for i in items]):
return items
elif not is_list and all(
[i.__class__ == doc_type for i in items.values()]):
return items
elif not field.dbref:
if not hasattr(items, 'items'):
def _get_items(items):
new_items = []
for v in items:
if isinstance(v, list):
new_items.append(_get_items(v))
elif not isinstance(v, (DBRef, Document)):
new_items.append(field.to_python(v))
else:
new_items.append(v)
return new_items
items = _get_items(items)
else:
items = {
k: (v if isinstance(v, (DBRef, Document))
else field.to_python(v))
for k, v in items.iteritems()
}
self.reference_map = self._find_references(items)
self.object_map = self._fetch_objects(doc_type=doc_type)
return self._attach_objects(items, 0, instance, name)
def _find_references(self, items, depth=0):
"""
Recursively finds all db references to be dereferenced
:param items: The iterable (dict, list, queryset)
:param depth: The current depth of recursion
"""
reference_map = {}
if not items or depth >= self.max_depth:
return reference_map
# Determine the iterator to use
if isinstance(items, dict):
iterator = items.values()
else:
iterator = items
# Recursively find dbreferences
depth += 1
for item in iterator:
if isinstance(item, (Document, EmbeddedDocument)):
for field_name, field in item._fields.iteritems():
v = item._data.get(field_name, None)
if isinstance(v, DBRef):
reference_map.setdefault(field.document_type, set()).add(v.id)
elif isinstance(v, (dict, SON)) and '_ref' in v:
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
references = self._find_references(v, depth)
for key, refs in references.iteritems():
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
key = field_cls
reference_map.setdefault(key, set()).update(refs)
elif isinstance(item, DBRef):
reference_map.setdefault(item.collection, set()).add(item.id)
elif isinstance(item, (dict, SON)) and '_ref' in item:
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
references = self._find_references(item, depth - 1)
for key, refs in references.iteritems():
reference_map.setdefault(key, set()).update(refs)
return reference_map
def _fetch_objects(self, doc_type=None):
"""Fetch all references and convert to their document objects
"""
object_map = {}
for collection, dbrefs in self.reference_map.iteritems():
if hasattr(collection, 'objects'): # We have a document class for the refs
col_name = collection._get_collection_name()
refs = [dbref for dbref in dbrefs
if (col_name, dbref) not in object_map]
references = collection.objects.in_bulk(refs)
for key, doc in references.iteritems():
object_map[(col_name, key)] = doc
else: # Generic reference: use the refs data to convert to document
if isinstance(doc_type, (ListField, DictField, MapField,)):
continue
refs = [dbref for dbref in dbrefs
if (collection, dbref) not in object_map]
if doc_type:
references = doc_type._get_db()[collection].find({'_id': {'$in': refs}})
for ref in references:
doc = doc_type._from_son(ref)
object_map[(collection, doc.id)] = doc
else:
references = get_db()[collection].find({'_id': {'$in': refs}})
for ref in references:
if '_cls' in ref:
doc = get_document(ref['_cls'])._from_son(ref)
elif doc_type is None:
doc = get_document(
''.join(x.capitalize()
for x in collection.split('_')))._from_son(ref)
else:
doc = doc_type._from_son(ref)
object_map[(collection, doc.id)] = doc
return object_map
def _attach_objects(self, items, depth=0, instance=None, name=None):
"""
Recursively finds all db references to be dereferenced
:param items: The iterable (dict, list, queryset)
:param depth: The current depth of recursion
:param instance: The owning instance used for tracking changes by
:class:`~mongoengine.base.ComplexBaseField`
:param name: The name of the field, used for tracking changes by
:class:`~mongoengine.base.ComplexBaseField`
"""
if not items:
if isinstance(items, (BaseDict, BaseList)):
return items
if instance:
if isinstance(items, dict):
return BaseDict(items, instance, name)
else:
return BaseList(items, instance, name)
if isinstance(items, (dict, SON)):
if '_ref' in items:
return self.object_map.get(
(items['_ref'].collection, items['_ref'].id), items)
elif '_cls' in items:
doc = get_document(items['_cls'])._from_son(items)
_cls = doc._data.pop('_cls', None)
del items['_cls']
doc._data = self._attach_objects(doc._data, depth, doc, None)
if _cls is not None:
doc._data['_cls'] = _cls
return doc
if not hasattr(items, 'items'):
is_list = True
list_type = BaseList
if isinstance(items, EmbeddedDocumentList):
list_type = EmbeddedDocumentList
as_tuple = isinstance(items, tuple)
iterator = enumerate(items)
data = []
else:
is_list = False
iterator = items.iteritems()
data = {}
depth += 1
for k, v in iterator:
if is_list:
data.append(v)
else:
data[k] = v
if k in self.object_map and not is_list:
data[k] = self.object_map[k]
elif isinstance(v, (Document, EmbeddedDocument)):
for field_name in v._fields:
v = data[k]._data.get(field_name, None)
if isinstance(v, DBRef):
data[k]._data[field_name] = self.object_map.get(
(v.collection, v.id), v)
elif isinstance(v, (dict, SON)) and '_ref' in v:
data[k]._data[field_name] = self.object_map.get(
(v['_ref'].collection, v['_ref'].id), v)
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name)
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
item_name = '%s.%s' % (name, k) if name else name
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
elif hasattr(v, 'id'):
data[k] = self.object_map.get((v.collection, v.id), v)
if instance and name:
if is_list:
return tuple(data) if as_tuple else list_type(data, instance, name)
return BaseDict(data, instance, name)
depth += 1
return data

View File

120
mongoengine/django/auth.py Normal file
View File

@@ -0,0 +1,120 @@
from mongoengine import *
from django.utils.hashcompat import md5_constructor, sha_constructor
from django.utils.encoding import smart_str
from django.contrib.auth.models import AnonymousUser
import datetime
REDIRECT_FIELD_NAME = 'next'
def get_hexdigest(algorithm, salt, raw_password):
raw_password, salt = smart_str(raw_password), smart_str(salt)
if algorithm == 'md5':
return md5_constructor(salt + raw_password).hexdigest()
elif algorithm == 'sha1':
return sha_constructor(salt + raw_password).hexdigest()
raise ValueError('Got unknown password algorithm type in password')
class User(Document):
"""A User document that aims to mirror most of the API specified by Django
at http://docs.djangoproject.com/en/dev/topics/auth/#users
"""
username = StringField(max_length=30, required=True)
first_name = StringField(max_length=30)
last_name = StringField(max_length=30)
email = StringField()
password = StringField(max_length=128)
is_staff = BooleanField(default=False)
is_active = BooleanField(default=True)
is_superuser = BooleanField(default=False)
last_login = DateTimeField(default=datetime.datetime.now)
date_joined = DateTimeField(default=datetime.datetime.now)
def __unicode__(self):
return self.username
def get_full_name(self):
"""Returns the users first and last names, separated by a space.
"""
full_name = u'%s %s' % (self.first_name or '', self.last_name or '')
return full_name.strip()
def is_anonymous(self):
return False
def is_authenticated(self):
return True
def set_password(self, raw_password):
"""Sets the user's password - always use this rather than directly
assigning to :attr:`~mongoengine.django.auth.User.password` as the
password is hashed before storage.
"""
from random import random
algo = 'sha1'
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
hash = get_hexdigest(algo, salt, raw_password)
self.password = '%s$%s$%s' % (algo, salt, hash)
self.save()
return self
def check_password(self, raw_password):
"""Checks the user's password against a provided password - always use
this rather than directly comparing to
:attr:`~mongoengine.django.auth.User.password` as the password is
hashed before storage.
"""
algo, salt, hash = self.password.split('$')
return hash == get_hexdigest(algo, salt, raw_password)
@classmethod
def create_user(cls, username, password, email=None):
"""Create (and save) a new user with the given username, password and
email address.
"""
now = datetime.datetime.now()
# Normalize the address by lowercasing the domain part of the email
# address.
if email is not None:
try:
email_name, domain_part = email.strip().split('@', 1)
except ValueError:
pass
else:
email = '@'.join([email_name, domain_part.lower()])
user = User(username=username, email=email, date_joined=now)
user.set_password(password)
user.save()
return user
def get_and_delete_messages(self):
return []
class MongoEngineBackend(object):
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
"""
def authenticate(self, username=None, password=None):
user = User.objects(username=username).first()
if user:
if password and user.check_password(password):
return user
return None
def get_user(self, user_id):
return User.objects.with_id(user_id)
def get_user(userid):
"""Returns a User object from an id (User.id). Django's equivalent takes
request, but taking an id instead leaves it up to the developer to store
the id in any way they want (session, signed cookie, etc.)
"""
if not userid:
return AnonymousUser()
return MongoEngineBackend().get_user(userid) or AnonymousUser()

View File

@@ -0,0 +1,63 @@
from django.contrib.sessions.backends.base import SessionBase, CreateError
from django.core.exceptions import SuspiciousOperation
from django.utils.encoding import force_unicode
from mongoengine.document import Document
from mongoengine import fields
from mongoengine.queryset import OperationError
from datetime import datetime
class MongoSession(Document):
session_key = fields.StringField(primary_key=True, max_length=40)
session_data = fields.StringField()
expire_date = fields.DateTimeField()
meta = {'collection': 'django_session', 'allow_inheritance': False}
class SessionStore(SessionBase):
"""A MongoEngine-based session store for Django.
"""
def load(self):
try:
s = MongoSession.objects(session_key=self.session_key,
expire_date__gt=datetime.now())[0]
return self.decode(force_unicode(s.session_data))
except (IndexError, SuspiciousOperation):
self.create()
return {}
def exists(self, session_key):
return bool(MongoSession.objects(session_key=session_key).first())
def create(self):
while True:
self.session_key = self._get_new_session_key()
try:
self.save(must_create=True)
except CreateError:
continue
self.modified = True
self._session_cache = {}
return
def save(self, must_create=False):
s = MongoSession(session_key=self.session_key)
s.session_data = self.encode(self._get_session(no_load=must_create))
s.expire_date = self.get_expiry_date()
try:
s.save(force_insert=must_create, safe=True)
except OperationError:
if must_create:
raise CreateError
raise
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
MongoSession.objects(session_key=session_key).delete()

View File

@@ -0,0 +1,45 @@
from django.http import Http404
from mongoengine.queryset import QuerySet
from mongoengine.base import BaseDocument
def _get_queryset(cls):
"""Inspired by django.shortcuts.*"""
if isinstance(cls, QuerySet):
return cls
else:
return cls.objects
def get_document_or_404(cls, *args, **kwargs):
"""
Uses get() to return an document, or raises a Http404 exception if the document
does not exist.
cls may be a Document or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
object is found.
Inspired by django.shortcuts.*
"""
queryset = _get_queryset(cls)
try:
return queryset.get(*args, **kwargs)
except queryset._document.DoesNotExist:
raise Http404('No %s matches the given query.' % queryset._document._class_name)
def get_list_or_404(cls, *args, **kwargs):
"""
Uses filter() to return a list of documents, or raise a Http404 exception if
the list is empty.
cls may be a Document or QuerySet object. All other passed
arguments and keyword arguments are used in the filter() query.
Inspired by django.shortcuts.*
"""
queryset = _get_queryset(cls)
obj_list = list(queryset.filter(*args, **kwargs))
if not obj_list:
raise Http404('No %s matches the given query.' % queryset._document._class_name)
return obj_list

View File

@@ -0,0 +1,112 @@
import os
import itertools
import urlparse
from mongoengine import *
from django.conf import settings
from django.core.files.storage import Storage
from django.core.exceptions import ImproperlyConfigured
class FileDocument(Document):
"""A document used to store a single file in GridFS.
"""
file = FileField()
class GridFSStorage(Storage):
"""A custom storage backend to store files in GridFS
"""
def __init__(self, base_url=None):
if base_url is None:
base_url = settings.MEDIA_URL
self.base_url = base_url
self.document = FileDocument
self.field = 'file'
def delete(self, name):
"""Deletes the specified file from the storage system.
"""
if self.exists(name):
doc = self.document.objects.first()
field = getattr(doc, self.field)
self._get_doc_with_name(name).delete() # Delete the FileField
field.delete() # Delete the FileDocument
def exists(self, name):
"""Returns True if a file referened by the given name already exists in the
storage system, or False if the name is available for a new file.
"""
doc = self._get_doc_with_name(name)
if doc:
field = getattr(doc, self.field)
return bool(field.name)
else:
return False
def listdir(self, path=None):
"""Lists the contents of the specified path, returning a 2-tuple of lists;
the first item being directories, the second item being files.
"""
def name(doc):
return getattr(doc, self.field).name
docs = self.document.objects
return [], [name(d) for d in docs if name(d)]
def size(self, name):
"""Returns the total size, in bytes, of the file specified by name.
"""
doc = self._get_doc_with_name(name)
if doc:
return getattr(doc, self.field).length
else:
raise ValueError("No such file or directory: '%s'" % name)
def url(self, name):
"""Returns an absolute URL where the file's contents can be accessed
directly by a web browser.
"""
if self.base_url is None:
raise ValueError("This file is not accessible via a URL.")
return urlparse.urljoin(self.base_url, name).replace('\\', '/')
def _get_doc_with_name(self, name):
"""Find the documents in the store with the given name
"""
docs = self.document.objects
doc = [d for d in docs if getattr(d, self.field).name == name]
if doc:
return doc[0]
else:
return None
def _open(self, name, mode='rb'):
doc = self._get_doc_with_name(name)
if doc:
return getattr(doc, self.field)
else:
raise ValueError("No file found with the name '%s'." % name)
def get_available_name(self, name):
"""Returns a filename that's free on the target storage system, and
available for new content to be written to.
"""
file_root, file_ext = os.path.splitext(name)
# If the filename already exists, add an underscore and a number (before
# the file extension, if one exists) to the filename until the generated
# filename doesn't exist.
count = itertools.count(1)
while self.exists(name):
# file_ext includes the dot.
name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext))
return name
def _save(self, name, content):
doc = self.document()
getattr(doc, self.field).put(content, filename=name)
doc.save()
return name

View File

@@ -0,0 +1,21 @@
#coding: utf-8
from django.test import TestCase
from django.conf import settings
from mongoengine import connect
class MongoTestCase(TestCase):
"""
TestCase class that clear the collection between the tests
"""
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
def __init__(self, methodName='runtest'):
self.db = connect(self.db_name)
super(MongoTestCase, self).__init__(methodName)
def _post_teardown(self):
super(MongoTestCase, self)._post_teardown()
for collection in self.db.collection_names():
if collection == 'system.indexes':
continue
self.db.drop_collection(collection)

File diff suppressed because it is too large Load Diff

View File

@@ -1,145 +0,0 @@
from collections import defaultdict
import six
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
'OperationError', 'NotUniqueError', 'FieldDoesNotExist',
'ValidationError', 'SaveConditionError')
class NotRegistered(Exception):
pass
class InvalidDocumentError(Exception):
pass
class LookUpError(AttributeError):
pass
class DoesNotExist(Exception):
pass
class MultipleObjectsReturned(Exception):
pass
class InvalidQueryError(Exception):
pass
class OperationError(Exception):
pass
class NotUniqueError(OperationError):
pass
class SaveConditionError(OperationError):
pass
class FieldDoesNotExist(Exception):
"""Raised when trying to set a field
not declared in a :class:`~mongoengine.Document`
or an :class:`~mongoengine.EmbeddedDocument`.
To avoid this behavior on data loading,
you should set the :attr:`strict` to ``False``
in the :attr:`meta` dictionary.
"""
class ValidationError(AssertionError):
"""Validation exception.
May represent an error validating a field or a
document containing fields with validation errors.
:ivar errors: A dictionary of errors for fields within this
document or list, or None if the error is for an
individual field.
"""
errors = {}
field_name = None
_message = None
def __init__(self, message='', **kwargs):
self.errors = kwargs.get('errors', {})
self.field_name = kwargs.get('field_name')
self.message = message
def __str__(self):
return six.text_type(self.message)
def __repr__(self):
return '%s(%s,)' % (self.__class__.__name__, self.message)
def __getattribute__(self, name):
message = super(ValidationError, self).__getattribute__(name)
if name == 'message':
if self.field_name:
message = '%s' % message
if self.errors:
message = '%s(%s)' % (message, self._format_errors())
return message
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
def to_dict(self):
"""Returns a dictionary of all errors within a document
Keys are field names or list indices and values are the
validation error messages, or a nested dictionary of
errors for an embedded document or list.
"""
def build_dict(source):
errors_dict = {}
if not source:
return errors_dict
if isinstance(source, dict):
for field_name, error in source.iteritems():
errors_dict[field_name] = build_dict(error)
elif isinstance(source, ValidationError) and source.errors:
return build_dict(source.errors)
else:
return six.text_type(source)
return errors_dict
if not self.errors:
return {}
return build_dict(self.errors)
def _format_errors(self):
"""Returns a string listing all errors within a document"""
def generate_key(value, prefix=''):
if isinstance(value, list):
value = ' '.join([generate_key(k) for k in value])
elif isinstance(value, dict):
value = ' '.join(
[generate_key(v, k) for k, v in value.iteritems()])
results = '%s.%s' % (prefix, value) if prefix else value
return results
error_dict = defaultdict(list)
for k, v in self.to_dict().iteritems():
error_dict[generate_key(v)].append(k)
return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()])

File diff suppressed because it is too large Load Diff

View File

@@ -1,25 +0,0 @@
"""
Helper functions, constants, and types to aid with Python v2.7 - v3.x and
PyMongo v2.7 - v3.x support.
"""
import pymongo
import six
if pymongo.version_tuple[0] < 3:
IS_PYMONGO_3 = False
else:
IS_PYMONGO_3 = True
# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3.
StringIO = six.BytesIO
# Additionally for Py2, try to use the faster cStringIO, if available
if not six.PY3:
try:
import cStringIO
except ImportError:
pass
else:
StringIO = cStringIO.StringIO

1221
mongoengine/queryset.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +0,0 @@
from mongoengine.errors import *
from mongoengine.queryset.field_list import *
from mongoengine.queryset.manager import *
from mongoengine.queryset.queryset import *
from mongoengine.queryset.transform import *
from mongoengine.queryset.visitor import *
# Expose just the public subset of all imported objects and constants.
__all__ = (
'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager',
'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL',
# Errors that might be related to a queryset, mostly here for backward
# compatibility
'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned',
'NotUniqueError', 'OperationError',
)

File diff suppressed because it is too large Load Diff

View File

@@ -1,85 +0,0 @@
__all__ = ('QueryFieldList',)
class QueryFieldList(object):
"""Object that handles combinations of .only() and .exclude() calls"""
ONLY = 1
EXCLUDE = 0
def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False):
"""The QueryFieldList builder
:param fields: A list of fields used in `.only()` or `.exclude()`
:param value: How to handle the fields; either `ONLY` or `EXCLUDE`
:param always_include: Any fields to always_include eg `_cls`
:param _only_called: Has `.only()` been called? If so its a set of fields
otherwise it performs a union.
"""
self.value = value
self.fields = set(fields or [])
self.always_include = set(always_include or [])
self._id = None
self._only_called = _only_called
self.slice = {}
def __add__(self, f):
if isinstance(f.value, dict):
for field in f.fields:
self.slice[field] = f.value
if not self.fields:
self.fields = f.fields
elif not self.fields:
self.fields = f.fields
self.value = f.value
self.slice = {}
elif self.value is self.ONLY and f.value is self.ONLY:
self._clean_slice()
if self._only_called:
self.fields = self.fields.union(f.fields)
else:
self.fields = f.fields
elif self.value is self.EXCLUDE and f.value is self.EXCLUDE:
self.fields = self.fields.union(f.fields)
self._clean_slice()
elif self.value is self.ONLY and f.value is self.EXCLUDE:
self.fields -= f.fields
self._clean_slice()
elif self.value is self.EXCLUDE and f.value is self.ONLY:
self.value = self.ONLY
self.fields = f.fields - self.fields
self._clean_slice()
if '_id' in f.fields:
self._id = f.value
if self.always_include:
if self.value is self.ONLY and self.fields:
if sorted(self.slice.keys()) != sorted(self.fields):
self.fields = self.fields.union(self.always_include)
else:
self.fields -= self.always_include
if getattr(f, '_only_called', False):
self._only_called = True
return self
def __nonzero__(self):
return bool(self.fields)
def as_dict(self):
field_list = {field: self.value for field in self.fields}
if self.slice:
field_list.update(self.slice)
if self._id is not None:
field_list['_id'] = self._id
return field_list
def reset(self):
self.fields = set([])
self.slice = {}
self.value = self.ONLY
def _clean_slice(self):
if self.slice:
for field in set(self.slice.keys()) - self.fields:
del self.slice[field]

View File

@@ -1,57 +0,0 @@
from functools import partial
from mongoengine.queryset.queryset import QuerySet
__all__ = ('queryset_manager', 'QuerySetManager')
class QuerySetManager(object):
"""
The default QuerySet Manager.
Custom QuerySet Manager functions can extend this class and users can
add extra queryset functionality. Any custom manager methods must accept a
:class:`~mongoengine.Document` class as its first argument, and a
:class:`~mongoengine.queryset.QuerySet` as its second argument.
The method function should return a :class:`~mongoengine.queryset.QuerySet`
, probably the same one that was passed in, but modified in some way.
"""
get_queryset = None
default = QuerySet
def __init__(self, queryset_func=None):
if queryset_func:
self.get_queryset = queryset_func
def __get__(self, instance, owner):
"""Descriptor for instantiating a new QuerySet object when
Document.objects is accessed.
"""
if instance is not None:
# Document object being used rather than a document class
return self
# owner is the document that contains the QuerySetManager
queryset_class = owner._meta.get('queryset_class', self.default)
queryset = queryset_class(owner, owner._get_collection())
if self.get_queryset:
arg_count = self.get_queryset.func_code.co_argcount
if arg_count == 1:
queryset = self.get_queryset(queryset)
elif arg_count == 2:
queryset = self.get_queryset(owner, queryset)
else:
queryset = partial(self.get_queryset, owner, queryset)
return queryset
def queryset_manager(func):
"""Decorator that allows you to define custom QuerySet managers on
:class:`~mongoengine.Document` classes. The manager must be a function that
accepts a :class:`~mongoengine.Document` class as its first argument, and a
:class:`~mongoengine.queryset.QuerySet` as its second argument. The method
function should return a :class:`~mongoengine.queryset.QuerySet`, probably
the same one that was passed in, but modified in some way.
"""
return QuerySetManager(func)

View File

@@ -1,195 +0,0 @@
import six
from mongoengine.errors import OperationError
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
NULLIFY, PULL)
__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE',
'DENY', 'PULL')
# The maximum number of items to display in a QuerySet.__repr__
REPR_OUTPUT_SIZE = 20
ITER_CHUNK_SIZE = 100
class QuerySet(BaseQuerySet):
"""The default queryset, that builds queries and handles a set of results
returned from a query.
Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as
the results.
"""
_has_more = True
_len = None
_result_cache = None
def __iter__(self):
"""Iteration utilises a results cache which iterates the cursor
in batches of ``ITER_CHUNK_SIZE``.
If ``self._has_more`` the cursor hasn't been exhausted so cache then
batch. Otherwise iterate the result_cache.
"""
self._iter = True
if self._has_more:
return self._iter_results()
# iterating over the cache.
return iter(self._result_cache)
def __len__(self):
"""Since __len__ is called quite frequently (for example, as part of
list(qs)), we populate the result cache and cache the length.
"""
if self._len is not None:
return self._len
# Populate the result cache with *all* of the docs in the cursor
if self._has_more:
list(self._iter_results())
# Cache the length of the complete result cache and return it
self._len = len(self._result_cache)
return self._len
def __repr__(self):
"""Provide a string representation of the QuerySet"""
if self._iter:
return '.. queryset mid-iteration ..'
self._populate_cache()
data = self._result_cache[:REPR_OUTPUT_SIZE + 1]
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = '...(remaining elements truncated)...'
return repr(data)
def _iter_results(self):
"""A generator for iterating over the result cache.
Also populates the cache if there are more possible results to
yield. Raises StopIteration when there are no more results.
"""
if self._result_cache is None:
self._result_cache = []
pos = 0
while True:
# For all positions lower than the length of the current result
# cache, serve the docs straight from the cache w/o hitting the
# database.
# XXX it's VERY important to compute the len within the `while`
# condition because the result cache might expand mid-iteration
# (e.g. if we call len(qs) inside a loop that iterates over the
# queryset). Fortunately len(list) is O(1) in Python, so this
# doesn't cause performance issues.
while pos < len(self._result_cache):
yield self._result_cache[pos]
pos += 1
# Raise StopIteration if we already established there were no more
# docs in the db cursor.
if not self._has_more:
raise StopIteration
# Otherwise, populate more of the cache and repeat.
if len(self._result_cache) <= pos:
self._populate_cache()
def _populate_cache(self):
"""
Populates the result cache with ``ITER_CHUNK_SIZE`` more entries
(until the cursor is exhausted).
"""
if self._result_cache is None:
self._result_cache = []
# Skip populating the cache if we already established there are no
# more docs to pull from the database.
if not self._has_more:
return
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
# the result cache.
try:
for _ in six.moves.range(ITER_CHUNK_SIZE):
self._result_cache.append(self.next())
except StopIteration:
# Getting this exception means there are no more docs in the
# db cursor. Set _has_more to False so that we can use that
# information in other places.
self._has_more = False
def count(self, with_limit_and_skip=False):
"""Count the selected elements in the query.
:param with_limit_and_skip (optional): take any :meth:`limit` or
:meth:`skip` that has been applied to this cursor into account when
getting the count
"""
if with_limit_and_skip is False:
return super(QuerySet, self).count(with_limit_and_skip)
if self._len is None:
self._len = super(QuerySet, self).count(with_limit_and_skip)
return self._len
def no_cache(self):
"""Convert to a non-caching queryset
.. versionadded:: 0.8.3 Convert to non caching queryset
"""
if self._result_cache is not None:
raise OperationError('QuerySet already cached')
return self._clone_into(QuerySetNoCache(self._document,
self._collection))
class QuerySetNoCache(BaseQuerySet):
"""A non caching QuerySet"""
def cache(self):
"""Convert to a caching queryset
.. versionadded:: 0.8.3 Convert to caching queryset
"""
return self._clone_into(QuerySet(self._document, self._collection))
def __repr__(self):
"""Provides the string representation of the QuerySet
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
"""
if self._iter:
return '.. queryset mid-iteration ..'
data = []
for _ in six.moves.range(REPR_OUTPUT_SIZE + 1):
try:
data.append(self.next())
except StopIteration:
break
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = '...(remaining elements truncated)...'
self.rewind()
return repr(data)
def __iter__(self):
queryset = self
if queryset._iter:
queryset = self.clone()
queryset.rewind()
return queryset
class QuerySetNoDeRef(QuerySet):
"""Special no_dereference QuerySet"""
def __dereference(items, max_depth=1, instance=None, name=None):
return items

View File

@@ -1,441 +0,0 @@
from collections import defaultdict
from bson import ObjectId, SON
from bson.dbref import DBRef
import pymongo
import six
from mongoengine.base import UPDATE_OPERATORS
from mongoengine.common import _import_class
from mongoengine.connection import get_connection
from mongoengine.errors import InvalidQueryError
from mongoengine.python_support import IS_PYMONGO_3
__all__ = ('query', 'update')
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
'all', 'size', 'exists', 'not', 'elemMatch', 'type')
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
'within_box', 'within_polygon', 'near', 'near_sphere',
'max_distance', 'min_distance', 'geo_within', 'geo_within_box',
'geo_within_polygon', 'geo_within_center',
'geo_within_sphere', 'geo_intersects')
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith',
'exact', 'iexact')
CUSTOM_OPERATORS = ('match',)
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
STRING_OPERATORS + CUSTOM_OPERATORS)
# TODO make this less complex
def query(_doc_cls=None, **kwargs):
"""Transform a query from Django-style format to Mongo format."""
mongo_query = {}
merge_query = defaultdict(list)
for key, value in sorted(kwargs.items()):
if key == '__raw__':
mongo_query.update(value)
continue
parts = key.rsplit('__')
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
parts = [part for part in parts if not part.isdigit()]
# Check for an operator and transform to mongo-style if there is
op = None
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
op = parts.pop()
# Allow to escape operator-like field name by __
if len(parts) > 1 and parts[-1] == '':
parts.pop()
negate = False
if len(parts) > 1 and parts[-1] == 'not':
parts.pop()
negate = True
if _doc_cls:
# Switch field names to proper names [set in Field(name='foo')]
try:
fields = _doc_cls._lookup_field(parts)
except Exception as e:
raise InvalidQueryError(e)
parts = []
CachedReferenceField = _import_class('CachedReferenceField')
GenericReferenceField = _import_class('GenericReferenceField')
cleaned_fields = []
for field in fields:
append_field = True
if isinstance(field, six.string_types):
parts.append(field)
append_field = False
# is last and CachedReferenceField
elif isinstance(field, CachedReferenceField) and fields[-1] == field:
parts.append('%s._id' % field.db_field)
else:
parts.append(field.db_field)
if append_field:
cleaned_fields.append(field)
# Convert value to proper value
field = cleaned_fields[-1]
singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not']
singular_ops += STRING_OPERATORS
if op in singular_ops:
if isinstance(field, six.string_types):
if (op in STRING_OPERATORS and
isinstance(value, six.string_types)):
StringField = _import_class('StringField')
value = StringField.prepare_query_value(op, value)
else:
value = field
else:
value = field.prepare_query_value(op, value)
if isinstance(field, CachedReferenceField) and value:
value = value['_id']
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
# Raise an error if the in/nin/all/near param is not iterable. We need a
# special check for BaseDocument, because - although it's iterable - using
# it as such in the context of this method is most definitely a mistake.
BaseDocument = _import_class('BaseDocument')
if isinstance(value, BaseDocument):
raise TypeError("When using the `in`, `nin`, `all`, or "
"`near`-operators you can\'t use a "
"`Document`, you must wrap your object "
"in a list (object -> [object]).")
elif not hasattr(value, '__iter__'):
raise TypeError("The `in`, `nin`, `all`, or "
"`near`-operators must be applied to an "
"iterable (e.g. a list).")
else:
value = [field.prepare_query_value(op, v) for v in value]
# If we're querying a GenericReferenceField, we need to alter the
# key depending on the value:
# * If the value is a DBRef, the key should be "field_name._ref".
# * If the value is an ObjectId, the key should be "field_name._ref.$id".
if isinstance(field, GenericReferenceField):
if isinstance(value, DBRef):
parts[-1] += '._ref'
elif isinstance(value, ObjectId):
parts[-1] += '._ref.$id'
# if op and op not in COMPARISON_OPERATORS:
if op:
if op in GEO_OPERATORS:
value = _geo_operator(field, op, value)
elif op in ('match', 'elemMatch'):
ListField = _import_class('ListField')
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
if (
isinstance(value, dict) and
isinstance(field, ListField) and
isinstance(field.field, EmbeddedDocumentField)
):
value = query(field.field.document_type, **value)
else:
value = field.prepare_query_value(op, value)
value = {'$elemMatch': value}
elif op in CUSTOM_OPERATORS:
NotImplementedError('Custom method "%s" has not '
'been implemented' % op)
elif op not in STRING_OPERATORS:
value = {'$' + op: value}
if negate:
value = {'$not': value}
for i, part in indices:
parts.insert(i, part)
key = '.'.join(parts)
if op is None or key not in mongo_query:
mongo_query[key] = value
elif key in mongo_query:
if isinstance(mongo_query[key], dict):
mongo_query[key].update(value)
# $max/minDistance needs to come last - convert to SON
value_dict = mongo_query[key]
if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \
('$near' in value_dict or '$nearSphere' in value_dict):
value_son = SON()
for k, v in value_dict.iteritems():
if k == '$maxDistance' or k == '$minDistance':
continue
value_son[k] = v
# Required for MongoDB >= 2.6, may fail when combining
# PyMongo 3+ and MongoDB < 2.6
near_embedded = False
for near_op in ('$near', '$nearSphere'):
if isinstance(value_dict.get(near_op), dict) and (
IS_PYMONGO_3 or get_connection().max_wire_version > 1):
value_son[near_op] = SON(value_son[near_op])
if '$maxDistance' in value_dict:
value_son[near_op][
'$maxDistance'] = value_dict['$maxDistance']
if '$minDistance' in value_dict:
value_son[near_op][
'$minDistance'] = value_dict['$minDistance']
near_embedded = True
if not near_embedded:
if '$maxDistance' in value_dict:
value_son['$maxDistance'] = value_dict['$maxDistance']
if '$minDistance' in value_dict:
value_son['$minDistance'] = value_dict['$minDistance']
mongo_query[key] = value_son
else:
# Store for manually merging later
merge_query[key].append(value)
# The queryset has been filter in such a way we must manually merge
for k, v in merge_query.items():
merge_query[k].append(mongo_query[k])
del mongo_query[k]
if isinstance(v, list):
value = [{k: val} for val in v]
if '$and' in mongo_query.keys():
mongo_query['$and'].extend(value)
else:
mongo_query['$and'] = value
return mongo_query
def update(_doc_cls=None, **update):
"""Transform an update spec from Django-style format to Mongo
format.
"""
mongo_update = {}
for key, value in update.items():
if key == '__raw__':
mongo_update.update(value)
continue
parts = key.split('__')
# if there is no operator, default to 'set'
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
parts.insert(0, 'set')
# Check for an operator and transform to mongo-style if there is
op = None
if parts[0] in UPDATE_OPERATORS:
op = parts.pop(0)
# Convert Pythonic names to Mongo equivalents
if op in ('push_all', 'pull_all'):
op = op.replace('_all', 'All')
elif op == 'dec':
# Support decrement by flipping a positive value's sign
# and using 'inc'
op = 'inc'
value = -value
elif op == 'add_to_set':
op = 'addToSet'
elif op == 'set_on_insert':
op = 'setOnInsert'
match = None
if parts[-1] in COMPARISON_OPERATORS:
match = parts.pop()
# Allow to escape operator-like field name by __
if len(parts) > 1 and parts[-1] == '':
parts.pop()
if _doc_cls:
# Switch field names to proper names [set in Field(name='foo')]
try:
fields = _doc_cls._lookup_field(parts)
except Exception as e:
raise InvalidQueryError(e)
parts = []
cleaned_fields = []
appended_sub_field = False
for field in fields:
append_field = True
if isinstance(field, six.string_types):
# Convert the S operator to $
if field == 'S':
field = '$'
parts.append(field)
append_field = False
else:
parts.append(field.db_field)
if append_field:
appended_sub_field = False
cleaned_fields.append(field)
if hasattr(field, 'field'):
cleaned_fields.append(field.field)
appended_sub_field = True
# Convert value to proper value
if appended_sub_field:
field = cleaned_fields[-2]
else:
field = cleaned_fields[-1]
GeoJsonBaseField = _import_class('GeoJsonBaseField')
if isinstance(field, GeoJsonBaseField):
value = field.to_mongo(value)
if op == 'push' and isinstance(value, (list, tuple, set)):
value = [field.prepare_query_value(op, v) for v in value]
elif op in (None, 'set', 'push', 'pull'):
if field.required or value is not None:
value = field.prepare_query_value(op, value)
elif op in ('pushAll', 'pullAll'):
value = [field.prepare_query_value(op, v) for v in value]
elif op in ('addToSet', 'setOnInsert'):
if isinstance(value, (list, tuple, set)):
value = [field.prepare_query_value(op, v) for v in value]
elif field.required or value is not None:
value = field.prepare_query_value(op, value)
elif op == 'unset':
value = 1
if match:
match = '$' + match
value = {match: value}
key = '.'.join(parts)
if not op:
raise InvalidQueryError('Updates must supply an operation '
'eg: set__FIELD=value')
if 'pull' in op and '.' in key:
# Dot operators don't work on pull operations
# unless they point to a list field
# Otherwise it uses nested dict syntax
if op == 'pullAll':
raise InvalidQueryError('pullAll operations only support '
'a single field depth')
# Look for the last list field and use dot notation until there
field_classes = [c.__class__ for c in cleaned_fields]
field_classes.reverse()
ListField = _import_class('ListField')
if ListField in field_classes:
# Join all fields via dot notation to the last ListField
# Then process as normal
last_listField = len(
cleaned_fields) - field_classes.index(ListField)
key = '.'.join(parts[:last_listField])
parts = parts[last_listField:]
parts.insert(0, key)
parts.reverse()
for key in parts:
value = {key: value}
elif op == 'addToSet' and isinstance(value, list):
value = {key: {'$each': value}}
elif op == 'push':
if parts[-1].isdigit():
key = parts[0]
position = int(parts[-1])
# $position expects an iterable. If pushing a single value,
# wrap it in a list.
if not isinstance(value, (set, tuple, list)):
value = [value]
value = {key: {'$each': value, '$position': position}}
elif isinstance(value, list):
value = {key: {'$each': value}}
else:
value = {key: value}
else:
value = {key: value}
key = '$' + op
if key not in mongo_update:
mongo_update[key] = value
elif key in mongo_update and isinstance(mongo_update[key], dict):
mongo_update[key].update(value)
return mongo_update
def _geo_operator(field, op, value):
"""Helper to return the query for a given geo query."""
if op == 'max_distance':
value = {'$maxDistance': value}
elif op == 'min_distance':
value = {'$minDistance': value}
elif field._geo_index == pymongo.GEO2D:
if op == 'within_distance':
value = {'$within': {'$center': value}}
elif op == 'within_spherical_distance':
value = {'$within': {'$centerSphere': value}}
elif op == 'within_polygon':
value = {'$within': {'$polygon': value}}
elif op == 'near':
value = {'$near': value}
elif op == 'near_sphere':
value = {'$nearSphere': value}
elif op == 'within_box':
value = {'$within': {'$box': value}}
else:
raise NotImplementedError('Geo method "%s" has not been '
'implemented for a GeoPointField' % op)
else:
if op == 'geo_within':
value = {'$geoWithin': _infer_geometry(value)}
elif op == 'geo_within_box':
value = {'$geoWithin': {'$box': value}}
elif op == 'geo_within_polygon':
value = {'$geoWithin': {'$polygon': value}}
elif op == 'geo_within_center':
value = {'$geoWithin': {'$center': value}}
elif op == 'geo_within_sphere':
value = {'$geoWithin': {'$centerSphere': value}}
elif op == 'geo_intersects':
value = {'$geoIntersects': _infer_geometry(value)}
elif op == 'near':
value = {'$near': _infer_geometry(value)}
else:
raise NotImplementedError(
'Geo method "%s" has not been implemented for a %s '
% (op, field._name)
)
return value
def _infer_geometry(value):
"""Helper method that tries to infer the $geometry shape for a
given value.
"""
if isinstance(value, dict):
if '$geometry' in value:
return value
elif 'coordinates' in value and 'type' in value:
return {'$geometry': value}
raise InvalidQueryError('Invalid $geometry dictionary should have '
'type and coordinates keys')
elif isinstance(value, (list, set)):
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
# TODO: should both TypeError and IndexError be alike interpreted?
try:
value[0][0][0]
return {'$geometry': {'type': 'Polygon', 'coordinates': value}}
except (TypeError, IndexError):
pass
try:
value[0][0]
return {'$geometry': {'type': 'LineString', 'coordinates': value}}
except (TypeError, IndexError):
pass
try:
value[0]
return {'$geometry': {'type': 'Point', 'coordinates': value}}
except (TypeError, IndexError):
pass
raise InvalidQueryError('Invalid $geometry data. Can be either a '
'dictionary or (nested) lists of coordinate(s)')

View File

@@ -1,159 +0,0 @@
import copy
from mongoengine.errors import InvalidQueryError
from mongoengine.queryset import transform
__all__ = ('Q',)
class QNodeVisitor(object):
"""Base visitor class for visiting Q-object nodes in a query tree.
"""
def visit_combination(self, combination):
"""Called by QCombination objects.
"""
return combination
def visit_query(self, query):
"""Called by (New)Q objects.
"""
return query
class DuplicateQueryConditionsError(InvalidQueryError):
pass
class SimplificationVisitor(QNodeVisitor):
"""Simplifies query trees by combining unnecessary 'and' connection nodes
into a single Q-object.
"""
def visit_combination(self, combination):
if combination.operation == combination.AND:
# The simplification only applies to 'simple' queries
if all(isinstance(node, Q) for node in combination.children):
queries = [n.query for n in combination.children]
try:
return Q(**self._query_conjunction(queries))
except DuplicateQueryConditionsError:
# Cannot be simplified
pass
return combination
def _query_conjunction(self, queries):
"""Merges query dicts - effectively &ing them together.
"""
query_ops = set()
combined_query = {}
for query in queries:
ops = set(query.keys())
# Make sure that the same operation isn't applied more than once
# to a single field
intersection = ops.intersection(query_ops)
if intersection:
raise DuplicateQueryConditionsError()
query_ops.update(ops)
combined_query.update(copy.deepcopy(query))
return combined_query
class QueryCompilerVisitor(QNodeVisitor):
"""Compiles the nodes in a query tree to a PyMongo-compatible query
dictionary.
"""
def __init__(self, document):
self.document = document
def visit_combination(self, combination):
operator = '$and'
if combination.operation == combination.OR:
operator = '$or'
return {operator: combination.children}
def visit_query(self, query):
return transform.query(self.document, **query.query)
class QNode(object):
"""Base class for nodes in query trees."""
AND = 0
OR = 1
def to_query(self, document):
query = self.accept(SimplificationVisitor())
query = query.accept(QueryCompilerVisitor(document))
return query
def accept(self, visitor):
raise NotImplementedError
def _combine(self, other, operation):
"""Combine this node with another node into a QCombination
object.
"""
if getattr(other, 'empty', True):
return self
if self.empty:
return other
return QCombination(operation, [self, other])
@property
def empty(self):
return False
def __or__(self, other):
return self._combine(other, self.OR)
def __and__(self, other):
return self._combine(other, self.AND)
class QCombination(QNode):
"""Represents the combination of several conditions by a given
logical operator.
"""
def __init__(self, operation, children):
self.operation = operation
self.children = []
for node in children:
# If the child is a combination of the same type, we can merge its
# children directly into this combinations children
if isinstance(node, QCombination) and node.operation == operation:
self.children += node.children
else:
self.children.append(node)
def accept(self, visitor):
for i in range(len(self.children)):
if isinstance(self.children[i], QNode):
self.children[i] = self.children[i].accept(visitor)
return visitor.visit_combination(self)
@property
def empty(self):
return not bool(self.children)
class Q(QNode):
"""A simple query object, used in a query tree to build up more complex
query structures.
"""
def __init__(self, **query):
self.query = query
def accept(self, visitor):
return visitor.visit_query(self)
@property
def empty(self):
return not bool(self.query)

View File

@@ -1,48 +0,0 @@
__all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
'post_save', 'pre_delete', 'post_delete')
signals_available = False
try:
from blinker import Namespace
signals_available = True
except ImportError:
class Namespace(object):
def signal(self, name, doc=None):
return _FakeSignal(name, doc)
class _FakeSignal(object):
"""If blinker is unavailable, create a fake class with the same
interface that allows sending of signals but will fail with an
error on anything else. Instead of doing anything on send, it
will just ignore the arguments and do nothing instead.
"""
def __init__(self, name, doc=None):
self.name = name
self.__doc__ = doc
def _fail(self, *args, **kwargs):
raise RuntimeError('signalling support is unavailable '
'because the blinker library is '
'not installed.')
send = lambda *a, **kw: None # noqa
connect = disconnect = has_receivers_for = receivers_for = \
temporarily_connected_to = _fail
del _fail
# the namespace for code signals. If you are not mongoengine code, do
# not put signals in here. Create your own namespace instead.
_signals = Namespace()
pre_init = _signals.signal('pre_init')
post_init = _signals.signal('post_init')
pre_save = _signals.signal('pre_save')
pre_save_post_validation = _signals.signal('pre_save_post_validation')
post_save = _signals.signal('post_save')
pre_delete = _signals.signal('pre_delete')
post_delete = _signals.signal('post_delete')
pre_bulk_insert = _signals.signal('pre_bulk_insert')
post_bulk_insert = _signals.signal('post_bulk_insert')

View File

@@ -1,54 +0,0 @@
# sitelib for noarch packages, sitearch for others (remove the unneeded one)
%{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")}
%{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")}
%define srcname mongoengine
Name: python-%{srcname}
Version: 0.8.7
Release: 1%{?dist}
Summary: A Python Document-Object Mapper for working with MongoDB
Group: Development/Libraries
License: MIT
URL: https://github.com/MongoEngine/mongoengine
Source0: %{srcname}-%{version}.tar.bz2
BuildRequires: python-devel
BuildRequires: python-setuptools
Requires: mongodb
Requires: pymongo
Requires: python-blinker
Requires: python-imaging
%description
MongoEngine is an ORM-like layer on top of PyMongo.
%prep
%setup -q -n %{srcname}-%{version}
%build
# Remove CFLAGS=... for noarch packages (unneeded)
CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build
%install
rm -rf $RPM_BUILD_ROOT
%{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT
%clean
rm -rf $RPM_BUILD_ROOT
%files
%defattr(-,root,root,-)
%doc docs AUTHORS LICENSE README.rst
# For noarch packages: sitelib
%{python_sitelib}/*
# For arch-specific packages: sitearch
# %{python_sitearch}/*
%changelog
* See: http://docs.mongoengine.org/en/latest/changelog.html

View File

@@ -1,7 +0,0 @@
nose
pymongo>=2.7.1
six==1.10.0
flake8
flake8-import-order
Sphinx==1.5.5
sphinx-rtd-theme==0.2.4

View File

@@ -1,11 +0,0 @@
[nosetests]
verbosity=2
detailed-errors=1
tests=tests
cover-package=mongoengine
[flake8]
ignore=E501,F401,F403,F405,I201
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
max-complexity=47
application-import-names=mongoengine,tests

View File

@@ -1,39 +1,27 @@
from setuptools import setup, find_packages
import os
import sys
from setuptools import find_packages, setup
# Hack to silence atexit traceback in newer python versions
DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB"
LONG_DESCRIPTION = None
try:
import multiprocessing
except ImportError:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
DESCRIPTION = (
'MongoEngine is a Python Object-Document '
'Mapper for working with MongoDB.'
)
try:
with open('README.rst') as fin:
LONG_DESCRIPTION = fin.read()
except Exception:
LONG_DESCRIPTION = None
def get_version(version_tuple):
"""Return the version tuple as a string, e.g. for (0, 10, 7),
return '0.10.7'.
"""
return '.'.join(map(str, version_tuple))
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
# Dirty hack to get version number from monogengine/__init__.py - we can't
# Dirty hack to get version number from monogengine/__init__.py - we can't
# import it as it depends on PyMongo and PyMongo isn't installed until this
# file is read
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
print VERSION
CLASSIFIERS = [
'Development Status :: 4 - Beta',
@@ -41,47 +29,22 @@ CLASSIFIERS = [
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
extra_opts = {
'packages': find_packages(exclude=['tests', 'tests.*']),
'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0']
}
if sys.version_info[0] == 3:
extra_opts['use_2to3'] = True
if 'test' in sys.argv or 'nosetests' in sys.argv:
extra_opts['packages'] = find_packages()
extra_opts['package_data'] = {
'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']}
else:
extra_opts['tests_require'] += ['python-dateutil']
setup(
name='mongoengine',
version=VERSION,
author='Harry Marr',
author_email='harry.marr@gmail.com',
maintainer="Stefan Wojcik",
maintainer_email="wojcikstefan@gmail.com",
url='http://mongoengine.org/',
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
license='MIT',
include_package_data=True,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=['pymongo>=2.7.1', 'six'],
test_suite='nose.collector',
**extra_opts
setup(name='mongoengine',
version=VERSION,
packages=find_packages(),
author='Harry Marr',
author_email='harry.marr@{nospam}gmail.com',
url='http://hmarr.com/mongoengine/',
license='MIT',
include_package_data=True,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=['pymongo'],
test_suite='tests',
)

View File

@@ -1,4 +0,0 @@
from all_warnings import AllWarnings
from document import *
from queryset import *
from fields import *

View File

@@ -1,42 +0,0 @@
"""
This test has been put into a module. This is because it tests warnings that
only get triggered on first hit. This way we can ensure its imported into the
top level and called first by the test suite.
"""
import unittest
import warnings
from mongoengine import *
__all__ = ('AllWarnings', )
class AllWarnings(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.warning_list = []
self.showwarning_default = warnings.showwarning
warnings.showwarning = self.append_to_warning_list
def append_to_warning_list(self, message, category, *args):
self.warning_list.append({"message": message,
"category": category})
def tearDown(self):
# restore default handling of warnings
warnings.showwarning = self.showwarning_default
def test_document_collection_syntax_warning(self):
class NonAbstractBase(Document):
meta = {'allow_inheritance': True}
class InheritedDocumentFailTest(NonAbstractBase):
meta = {'collection': 'fail'}
warning = self.warning_list[0]
self.assertEqual(SyntaxWarning, warning["category"])
self.assertEqual('non_abstract_base',
InheritedDocumentFailTest._get_collection_name())

633
tests/document.py Normal file
View File

@@ -0,0 +1,633 @@
import unittest
from datetime import datetime
import bson
import pymongo
from mongoengine import *
from mongoengine.connection import _get_db
class DocumentTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = _get_db()
class Person(Document):
name = StringField()
age = IntField()
self.Person = Person
def test_drop_collection(self):
"""Ensure that the collection may be dropped from the database.
"""
self.Person(name='Test').save()
collection = self.Person._meta['collection']
self.assertTrue(collection in self.db.collection_names())
self.Person.drop_collection()
self.assertFalse(collection in self.db.collection_names())
def test_definition(self):
"""Ensure that document may be defined using fields.
"""
name_field = StringField()
age_field = IntField()
class Person(Document):
name = name_field
age = age_field
non_field = True
self.assertEqual(Person._fields['name'], name_field)
self.assertEqual(Person._fields['age'], age_field)
self.assertFalse('non_field' in Person._fields)
self.assertTrue('id' in Person._fields)
# Test iteration over fields
fields = list(Person())
self.assertTrue('name' in fields and 'age' in fields)
# Ensure Document isn't treated like an actual document
self.assertFalse(hasattr(Document, '_fields'))
def test_get_superclasses(self):
"""Ensure that the correct list of superclasses is assembled.
"""
class Animal(Document): pass
class Fish(Animal): pass
class Mammal(Animal): pass
class Human(Mammal): pass
class Dog(Mammal): pass
mammal_superclasses = {'Animal': Animal}
self.assertEqual(Mammal._superclasses, mammal_superclasses)
dog_superclasses = {
'Animal': Animal,
'Animal.Mammal': Mammal,
}
self.assertEqual(Dog._superclasses, dog_superclasses)
def test_get_subclasses(self):
"""Ensure that the correct list of subclasses is retrieved by the
_get_subclasses method.
"""
class Animal(Document): pass
class Fish(Animal): pass
class Mammal(Animal): pass
class Human(Mammal): pass
class Dog(Mammal): pass
mammal_subclasses = {
'Animal.Mammal.Dog': Dog,
'Animal.Mammal.Human': Human
}
self.assertEqual(Mammal._get_subclasses(), mammal_subclasses)
animal_subclasses = {
'Animal.Fish': Fish,
'Animal.Mammal': Mammal,
'Animal.Mammal.Dog': Dog,
'Animal.Mammal.Human': Human
}
self.assertEqual(Animal._get_subclasses(), animal_subclasses)
def test_polymorphic_queries(self):
"""Ensure that the correct subclasses are returned from a query"""
class Animal(Document): pass
class Fish(Animal): pass
class Mammal(Animal): pass
class Human(Mammal): pass
class Dog(Mammal): pass
Animal().save()
Fish().save()
Mammal().save()
Human().save()
Dog().save()
classes = [obj.__class__ for obj in Animal.objects]
self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog])
classes = [obj.__class__ for obj in Mammal.objects]
self.assertEqual(classes, [Mammal, Human, Dog])
classes = [obj.__class__ for obj in Human.objects]
self.assertEqual(classes, [Human])
Animal.drop_collection()
def test_inheritance(self):
"""Ensure that document may inherit fields from a superclass document.
"""
class Employee(self.Person):
salary = IntField()
self.assertTrue('name' in Employee._fields)
self.assertTrue('salary' in Employee._fields)
self.assertEqual(Employee._meta['collection'],
self.Person._meta['collection'])
# Ensure that MRO error is not raised
class A(Document): pass
class B(A): pass
class C(B): pass
def test_allow_inheritance(self):
"""Ensure that inheritance may be disabled on simple classes and that
_cls and _types will not be used.
"""
class Animal(Document):
meta = {'allow_inheritance': False}
name = StringField()
Animal.drop_collection()
def create_dog_class():
class Dog(Animal):
pass
self.assertRaises(ValueError, create_dog_class)
# Check that _cls etc aren't present on simple documents
dog = Animal(name='dog')
dog.save()
collection = self.db[Animal._meta['collection']]
obj = collection.find_one()
self.assertFalse('_cls' in obj)
self.assertFalse('_types' in obj)
Animal.drop_collection()
def create_employee_class():
class Employee(self.Person):
meta = {'allow_inheritance': False}
self.assertRaises(ValueError, create_employee_class)
# Test the same for embedded documents
class Comment(EmbeddedDocument):
content = StringField()
meta = {'allow_inheritance': False}
def create_special_comment():
class SpecialComment(Comment):
pass
self.assertRaises(ValueError, create_special_comment)
comment = Comment(content='test')
self.assertFalse('_cls' in comment.to_mongo())
self.assertFalse('_types' in comment.to_mongo())
def test_collection_name(self):
"""Ensure that a collection with a specified name may be used.
"""
collection = 'personCollTest'
if collection in self.db.collection_names():
self.db.drop_collection(collection)
class Person(Document):
name = StringField()
meta = {'collection': collection}
user = Person(name="Test User")
user.save()
self.assertTrue(collection in self.db.collection_names())
user_obj = self.db[collection].find_one()
self.assertEqual(user_obj['name'], "Test User")
user_obj = Person.objects[0]
self.assertEqual(user_obj.name, "Test User")
Person.drop_collection()
self.assertFalse(collection in self.db.collection_names())
def test_inherited_collections(self):
"""Ensure that subclassed documents don't override parents' collections.
"""
class Drink(Document):
name = StringField()
class AlcoholicDrink(Drink):
meta = {'collection': 'booze'}
class Drinker(Document):
drink = GenericReferenceField()
Drink.drop_collection()
AlcoholicDrink.drop_collection()
Drinker.drop_collection()
red_bull = Drink(name='Red Bull')
red_bull.save()
programmer = Drinker(drink=red_bull)
programmer.save()
beer = AlcoholicDrink(name='Beer')
beer.save()
real_person = Drinker(drink=beer)
real_person.save()
self.assertEqual(Drinker.objects[0].drink.name, red_bull.name)
self.assertEqual(Drinker.objects[1].drink.name, beer.name)
def test_capped_collection(self):
"""Ensure that capped collections work properly.
"""
class Log(Document):
date = DateTimeField(default=datetime.now)
meta = {
'max_documents': 10,
'max_size': 90000,
}
Log.drop_collection()
# Ensure that the collection handles up to its maximum
for i in range(10):
Log().save()
self.assertEqual(len(Log.objects), 10)
# Check that extra documents don't increase the size
Log().save()
self.assertEqual(len(Log.objects), 10)
options = Log.objects._collection.options()
self.assertEqual(options['capped'], True)
self.assertEqual(options['max'], 10)
self.assertEqual(options['size'], 90000)
# Check that the document cannot be redefined with different options
def recreate_log_document():
class Log(Document):
date = DateTimeField(default=datetime.now)
meta = {
'max_documents': 11,
}
# Create the collection by accessing Document.objects
Log.objects
self.assertRaises(InvalidCollectionError, recreate_log_document)
Log.drop_collection()
def test_indexes(self):
"""Ensure that indexes are used when meta[indexes] is specified.
"""
class BlogPost(Document):
date = DateTimeField(db_field='addDate', default=datetime.now)
category = StringField()
tags = ListField(StringField())
meta = {
'indexes': [
'-date',
'tags',
('category', '-date')
],
}
BlogPost.drop_collection()
info = BlogPost.objects._collection.index_information()
# _id, types, '-date', 'tags', ('cat', 'date')
self.assertEqual(len(info), 5)
# Indexes are lazy so use list() to perform query
list(BlogPost.objects)
info = BlogPost.objects._collection.index_information()
info = [value['key'] for key, value in info.iteritems()]
self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)]
in info)
self.assertTrue([('_types', 1), ('addDate', -1)] in info)
# tags is a list field so it shouldn't have _types in the index
self.assertTrue([('tags', 1)] in info)
class ExtendedBlogPost(BlogPost):
title = StringField()
meta = {'indexes': ['title']}
BlogPost.drop_collection()
list(ExtendedBlogPost.objects)
info = ExtendedBlogPost.objects._collection.index_information()
info = [value['key'] for key, value in info.iteritems()]
self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)]
in info)
self.assertTrue([('_types', 1), ('addDate', -1)] in info)
self.assertTrue([('_types', 1), ('title', 1)] in info)
BlogPost.drop_collection()
def test_unique(self):
"""Ensure that uniqueness constraints are applied to fields.
"""
class BlogPost(Document):
title = StringField()
slug = StringField(unique=True)
BlogPost.drop_collection()
post1 = BlogPost(title='test1', slug='test')
post1.save()
# Two posts with the same slug is not allowed
post2 = BlogPost(title='test2', slug='test')
self.assertRaises(OperationError, post2.save)
class Date(EmbeddedDocument):
year = IntField(db_field='yr')
class BlogPost(Document):
title = StringField()
date = EmbeddedDocumentField(Date)
slug = StringField(unique_with='date.year')
BlogPost.drop_collection()
post1 = BlogPost(title='test1', date=Date(year=2009), slug='test')
post1.save()
# day is different so won't raise exception
post2 = BlogPost(title='test2', date=Date(year=2010), slug='test')
post2.save()
# Now there will be two docs with the same slug and the same day: fail
post3 = BlogPost(title='test3', date=Date(year=2010), slug='test')
self.assertRaises(OperationError, post3.save)
BlogPost.drop_collection()
def test_custom_id_field(self):
"""Ensure that documents may be created with custom primary keys.
"""
class User(Document):
username = StringField(primary_key=True)
name = StringField()
User.drop_collection()
self.assertEqual(User._fields['username'].db_field, '_id')
self.assertEqual(User._meta['id_field'], 'username')
def create_invalid_user():
User(name='test').save() # no primary key field
self.assertRaises(ValidationError, create_invalid_user)
def define_invalid_user():
class EmailUser(User):
email = StringField(primary_key=True)
self.assertRaises(ValueError, define_invalid_user)
class EmailUser(User):
email = StringField()
user = User(username='test', name='test user')
user.save()
user_obj = User.objects.first()
self.assertEqual(user_obj.id, 'test')
self.assertEqual(user_obj.pk, 'test')
user_son = User.objects._collection.find_one()
self.assertEqual(user_son['_id'], 'test')
self.assertTrue('username' not in user_son['_id'])
User.drop_collection()
user = User(pk='mongo', name='mongo user')
user.save()
user_obj = User.objects.first()
self.assertEqual(user_obj.id, 'mongo')
self.assertEqual(user_obj.pk, 'mongo')
user_son = User.objects._collection.find_one()
self.assertEqual(user_son['_id'], 'mongo')
self.assertTrue('username' not in user_son['_id'])
User.drop_collection()
def test_creation(self):
"""Ensure that document may be created using keyword arguments.
"""
person = self.Person(name="Test User", age=30)
self.assertEqual(person.name, "Test User")
self.assertEqual(person.age, 30)
def test_reload(self):
"""Ensure that attributes may be reloaded.
"""
person = self.Person(name="Test User", age=20)
person.save()
person_obj = self.Person.objects.first()
person_obj.name = "Mr Test User"
person_obj.age = 21
person_obj.save()
self.assertEqual(person.name, "Test User")
self.assertEqual(person.age, 20)
person.reload()
self.assertEqual(person.name, "Mr Test User")
self.assertEqual(person.age, 21)
def test_dictionary_access(self):
"""Ensure that dictionary-style field access works properly.
"""
person = self.Person(name='Test User', age=30)
self.assertEquals(person['name'], 'Test User')
self.assertRaises(KeyError, person.__getitem__, 'salary')
self.assertRaises(KeyError, person.__setitem__, 'salary', 50)
person['name'] = 'Another User'
self.assertEquals(person['name'], 'Another User')
# Length = length(assigned fields + id)
self.assertEquals(len(person), 3)
self.assertTrue('age' in person)
person.age = None
self.assertFalse('age' in person)
self.assertFalse('nationality' in person)
def test_embedded_document(self):
"""Ensure that embedded documents are set up correctly.
"""
class Comment(EmbeddedDocument):
content = StringField()
self.assertTrue('content' in Comment._fields)
self.assertFalse('id' in Comment._fields)
self.assertFalse('collection' in Comment._meta)
def test_embedded_document_validation(self):
"""Ensure that embedded documents may be validated.
"""
class Comment(EmbeddedDocument):
date = DateTimeField()
content = StringField(required=True)
comment = Comment()
self.assertRaises(ValidationError, comment.validate)
comment.content = 'test'
comment.validate()
comment.date = 4
self.assertRaises(ValidationError, comment.validate)
comment.date = datetime.now()
comment.validate()
def test_save(self):
"""Ensure that a document may be saved in the database.
"""
# Create person object and save it to the database
person = self.Person(name='Test User', age=30)
person.save()
# Ensure that the object is in the database
collection = self.db[self.Person._meta['collection']]
person_obj = collection.find_one({'name': 'Test User'})
self.assertEqual(person_obj['name'], 'Test User')
self.assertEqual(person_obj['age'], 30)
self.assertEqual(person_obj['_id'], person.id)
# Test skipping validation on save
class Recipient(Document):
email = EmailField(required=True)
recipient = Recipient(email='root@localhost')
self.assertRaises(ValidationError, recipient.save)
try:
recipient.save(validate=False)
except ValidationError:
fail()
def test_delete(self):
"""Ensure that document may be deleted using the delete method.
"""
person = self.Person(name="Test User", age=30)
person.save()
self.assertEqual(len(self.Person.objects), 1)
person.delete()
self.assertEqual(len(self.Person.objects), 0)
def test_save_custom_id(self):
"""Ensure that a document may be saved with a custom _id.
"""
# Create person object and save it to the database
person = self.Person(name='Test User', age=30,
id='497ce96f395f2f052a494fd4')
person.save()
# Ensure that the object is in the database with the correct _id
collection = self.db[self.Person._meta['collection']]
person_obj = collection.find_one({'name': 'Test User'})
self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4')
def test_save_custom_pk(self):
"""Ensure that a document may be saved with a custom _id using pk alias.
"""
# Create person object and save it to the database
person = self.Person(name='Test User', age=30,
pk='497ce96f395f2f052a494fd4')
person.save()
# Ensure that the object is in the database with the correct _id
collection = self.db[self.Person._meta['collection']]
person_obj = collection.find_one({'name': 'Test User'})
self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4')
def test_save_list(self):
"""Ensure that a list field may be properly saved.
"""
class Comment(EmbeddedDocument):
content = StringField()
class BlogPost(Document):
content = StringField()
comments = ListField(EmbeddedDocumentField(Comment))
tags = ListField(StringField())
BlogPost.drop_collection()
post = BlogPost(content='Went for a walk today...')
post.tags = tags = ['fun', 'leisure']
comments = [Comment(content='Good for you'), Comment(content='Yay.')]
post.comments = comments
post.save()
collection = self.db[BlogPost._meta['collection']]
post_obj = collection.find_one()
self.assertEqual(post_obj['tags'], tags)
for comment_obj, comment in zip(post_obj['comments'], comments):
self.assertEqual(comment_obj['content'], comment['content'])
BlogPost.drop_collection()
def test_save_embedded_document(self):
"""Ensure that a document with an embedded document field may be
saved in the database.
"""
class EmployeeDetails(EmbeddedDocument):
position = StringField()
class Employee(self.Person):
salary = IntField()
details = EmbeddedDocumentField(EmployeeDetails)
# Create employee object and save it to the database
employee = Employee(name='Test Employee', age=50, salary=20000)
employee.details = EmployeeDetails(position='Developer')
employee.save()
# Ensure that the object is in the database
collection = self.db[self.Person._meta['collection']]
employee_obj = collection.find_one({'name': 'Test Employee'})
self.assertEqual(employee_obj['name'], 'Test Employee')
self.assertEqual(employee_obj['age'], 50)
# Ensure that the 'details' embedded object saved correctly
self.assertEqual(employee_obj['details']['position'], 'Developer')
def test_save_reference(self):
"""Ensure that a document reference field may be saved in the database.
"""
class BlogPost(Document):
meta = {'collection': 'blogpost_1'}
content = StringField()
author = ReferenceField(self.Person)
BlogPost.drop_collection()
author = self.Person(name='Test User')
author.save()
post = BlogPost(content='Watched some TV today... how exciting.')
# Should only reference author when saving
post.author = author
post.save()
post_obj = BlogPost.objects.first()
# Test laziness
self.assertTrue(isinstance(post_obj._data['author'],
bson.dbref.DBRef))
self.assertTrue(isinstance(post_obj.author, self.Person))
self.assertEqual(post_obj.author.name, 'Test User')
# Ensure that the dereferenced object may be changed and saved
post_obj.author.age = 25
post_obj.author.save()
author = list(self.Person.objects(name='Test User'))[-1]
self.assertEqual(author.age, 25)
BlogPost.drop_collection()
def tearDown(self):
self.Person.drop_collection()
if __name__ == '__main__':
unittest.main()

View File

@@ -1,13 +0,0 @@
import unittest
from class_methods import *
from delta import *
from dynamic import *
from indexes import *
from inheritance import *
from instance import *
from json_serialisation import *
from validation import *
if __name__ == '__main__':
unittest.main()

View File

@@ -1,350 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
from mongoengine import *
from mongoengine.queryset import NULLIFY, PULL
from mongoengine.connection import get_db
__all__ = ("ClassMethodsTest", )
class ClassMethodsTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
class Person(Document):
name = StringField()
age = IntField()
non_field = True
meta = {"allow_inheritance": True}
self.Person = Person
def tearDown(self):
for collection in self.db.collection_names():
if 'system.' in collection:
continue
self.db.drop_collection(collection)
def test_definition(self):
"""Ensure that document may be defined using fields.
"""
self.assertEqual(['_cls', 'age', 'id', 'name'],
sorted(self.Person._fields.keys()))
self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"],
sorted([x.__class__.__name__ for x in
self.Person._fields.values()]))
def test_get_db(self):
"""Ensure that get_db returns the expected db.
"""
db = self.Person._get_db()
self.assertEqual(self.db, db)
def test_get_collection_name(self):
"""Ensure that get_collection_name returns the expected collection
name.
"""
collection_name = 'person'
self.assertEqual(collection_name, self.Person._get_collection_name())
def test_get_collection(self):
"""Ensure that get_collection returns the expected collection.
"""
collection_name = 'person'
collection = self.Person._get_collection()
self.assertEqual(self.db[collection_name], collection)
def test_drop_collection(self):
"""Ensure that the collection may be dropped from the database.
"""
collection_name = 'person'
self.Person(name='Test').save()
self.assertTrue(collection_name in self.db.collection_names())
self.Person.drop_collection()
self.assertFalse(collection_name in self.db.collection_names())
def test_register_delete_rule(self):
"""Ensure that register delete rule adds a delete rule to the document
meta.
"""
class Job(Document):
employee = ReferenceField(self.Person)
self.assertEqual(self.Person._meta.get('delete_rules'), None)
self.Person.register_delete_rule(Job, 'employee', NULLIFY)
self.assertEqual(self.Person._meta['delete_rules'],
{(Job, 'employee'): NULLIFY})
def test_compare_indexes(self):
""" Ensure that the indexes are properly created and that
compare_indexes identifies the missing/extra indexes
"""
class BlogPost(Document):
author = StringField()
title = StringField()
description = StringField()
tags = StringField()
meta = {
'indexes': [('author', 'title')]
}
BlogPost.drop_collection()
BlogPost.ensure_indexes()
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
BlogPost.ensure_index(['author', 'description'])
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] })
BlogPost._get_collection().drop_index('author_1_description_1')
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
BlogPost._get_collection().drop_index('author_1_title_1')
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] })
def test_compare_indexes_inheritance(self):
""" Ensure that the indexes are properly created and that
compare_indexes identifies the missing/extra indexes for subclassed
documents (_cls included)
"""
class BlogPost(Document):
author = StringField()
title = StringField()
description = StringField()
meta = {
'allow_inheritance': True
}
class BlogPostWithTags(BlogPost):
tags = StringField()
tag_list = ListField(StringField())
meta = {
'indexes': [('author', 'tags')]
}
BlogPost.drop_collection()
BlogPost.ensure_indexes()
BlogPostWithTags.ensure_indexes()
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
BlogPostWithTags.ensure_index(['author', 'tag_list'])
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] })
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1')
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1')
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] })
def test_compare_indexes_multiple_subclasses(self):
""" Ensure that compare_indexes behaves correctly if called from a
class, which base class has multiple subclasses
"""
class BlogPost(Document):
author = StringField()
title = StringField()
description = StringField()
meta = {
'allow_inheritance': True
}
class BlogPostWithTags(BlogPost):
tags = StringField()
tag_list = ListField(StringField())
meta = {
'indexes': [('author', 'tags')]
}
class BlogPostWithCustomField(BlogPost):
custom = DictField()
meta = {
'indexes': [('author', 'custom')]
}
BlogPost.ensure_indexes()
BlogPostWithTags.ensure_indexes()
BlogPostWithCustomField.ensure_indexes()
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] })
self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] })
def test_list_indexes_inheritance(self):
""" ensure that all of the indexes are listed regardless of the super-
or sub-class that we call it from
"""
class BlogPost(Document):
author = StringField()
title = StringField()
description = StringField()
meta = {
'allow_inheritance': True
}
class BlogPostWithTags(BlogPost):
tags = StringField()
meta = {
'indexes': [('author', 'tags')]
}
class BlogPostWithTagsAndExtraText(BlogPostWithTags):
extra_text = StringField()
meta = {
'indexes': [('author', 'tags', 'extra_text')]
}
BlogPost.drop_collection()
BlogPost.ensure_indexes()
BlogPostWithTags.ensure_indexes()
BlogPostWithTagsAndExtraText.ensure_indexes()
self.assertEqual(BlogPost.list_indexes(),
BlogPostWithTags.list_indexes())
self.assertEqual(BlogPost.list_indexes(),
BlogPostWithTagsAndExtraText.list_indexes())
self.assertEqual(BlogPost.list_indexes(),
[[('_cls', 1), ('author', 1), ('tags', 1)],
[('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)],
[(u'_id', 1)], [('_cls', 1)]])
def test_register_delete_rule_inherited(self):
class Vaccine(Document):
name = StringField(required=True)
meta = {"indexes": ["name"]}
class Animal(Document):
family = StringField(required=True)
vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL))
meta = {"allow_inheritance": True, "indexes": ["family"]}
class Cat(Animal):
name = StringField(required=True)
self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL)
self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL)
def test_collection_naming(self):
"""Ensure that a collection with a specified name may be used.
"""
class DefaultNamingTest(Document):
pass
self.assertEqual('default_naming_test',
DefaultNamingTest._get_collection_name())
class CustomNamingTest(Document):
meta = {'collection': 'pimp_my_collection'}
self.assertEqual('pimp_my_collection',
CustomNamingTest._get_collection_name())
class DynamicNamingTest(Document):
meta = {'collection': lambda c: "DYNAMO"}
self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name())
# Use Abstract class to handle backwards compatibility
class BaseDocument(Document):
meta = {
'abstract': True,
'collection': lambda c: c.__name__.lower()
}
class OldNamingConvention(BaseDocument):
pass
self.assertEqual('oldnamingconvention',
OldNamingConvention._get_collection_name())
class InheritedAbstractNamingTest(BaseDocument):
meta = {'collection': 'wibble'}
self.assertEqual('wibble',
InheritedAbstractNamingTest._get_collection_name())
# Mixin tests
class BaseMixin(object):
meta = {
'collection': lambda c: c.__name__.lower()
}
class OldMixinNamingConvention(Document, BaseMixin):
pass
self.assertEqual('oldmixinnamingconvention',
OldMixinNamingConvention._get_collection_name())
class BaseMixin(object):
meta = {
'collection': lambda c: c.__name__.lower()
}
class BaseDocument(Document, BaseMixin):
meta = {'allow_inheritance': True}
class MyDocument(BaseDocument):
pass
self.assertEqual('basedocument', MyDocument._get_collection_name())
def test_custom_collection_name_operations(self):
"""Ensure that a collection with a specified name is used as expected.
"""
collection_name = 'personCollTest'
class Person(Document):
name = StringField()
meta = {'collection': collection_name}
Person(name="Test User").save()
self.assertTrue(collection_name in self.db.collection_names())
user_obj = self.db[collection_name].find_one()
self.assertEqual(user_obj['name'], "Test User")
user_obj = Person.objects[0]
self.assertEqual(user_obj.name, "Test User")
Person.drop_collection()
self.assertFalse(collection_name in self.db.collection_names())
def test_collection_name_and_primary(self):
"""Ensure that a collection with a specified name may be used.
"""
class Person(Document):
name = StringField(primary_key=True)
meta = {'collection': 'app'}
Person(name="Test User").save()
user_obj = Person.objects.first()
self.assertEqual(user_obj.name, "Test User")
Person.drop_collection()
if __name__ == '__main__':
unittest.main()

View File

@@ -1,867 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
from bson import SON
from mongoengine import *
from mongoengine.connection import get_db
__all__ = ("DeltaTest",)
class DeltaTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
class Person(Document):
name = StringField()
age = IntField()
non_field = True
meta = {"allow_inheritance": True}
self.Person = Person
def tearDown(self):
for collection in self.db.collection_names():
if 'system.' in collection:
continue
self.db.drop_collection(collection)
def test_delta(self):
self.delta(Document)
self.delta(DynamicDocument)
def delta(self, DocClass):
class Doc(DocClass):
string_field = StringField()
int_field = IntField()
dict_field = DictField()
list_field = ListField()
Doc.drop_collection()
doc = Doc()
doc.save()
doc = Doc.objects.first()
self.assertEqual(doc._get_changed_fields(), [])
self.assertEqual(doc._delta(), ({}, {}))
doc.string_field = 'hello'
self.assertEqual(doc._get_changed_fields(), ['string_field'])
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
doc._changed_fields = []
doc.int_field = 1
self.assertEqual(doc._get_changed_fields(), ['int_field'])
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
doc._changed_fields = []
dict_value = {'hello': 'world', 'ping': 'pong'}
doc.dict_field = dict_value
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
doc._changed_fields = []
list_value = ['1', 2, {'hello': 'world'}]
doc.list_field = list_value
self.assertEqual(doc._get_changed_fields(), ['list_field'])
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
# Test unsetting
doc._changed_fields = []
doc.dict_field = {}
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
doc._changed_fields = []
doc.list_field = []
self.assertEqual(doc._get_changed_fields(), ['list_field'])
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
def test_delta_recursive(self):
self.delta_recursive(Document, EmbeddedDocument)
self.delta_recursive(DynamicDocument, EmbeddedDocument)
self.delta_recursive(Document, DynamicEmbeddedDocument)
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
def delta_recursive(self, DocClass, EmbeddedClass):
class Embedded(EmbeddedClass):
id = StringField()
string_field = StringField()
int_field = IntField()
dict_field = DictField()
list_field = ListField()
class Doc(DocClass):
string_field = StringField()
int_field = IntField()
dict_field = DictField()
list_field = ListField()
embedded_field = EmbeddedDocumentField(Embedded)
Doc.drop_collection()
doc = Doc()
doc.save()
doc = Doc.objects.first()
self.assertEqual(doc._get_changed_fields(), [])
self.assertEqual(doc._delta(), ({}, {}))
embedded_1 = Embedded()
embedded_1.id = "010101"
embedded_1.string_field = 'hello'
embedded_1.int_field = 1
embedded_1.dict_field = {'hello': 'world'}
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field = embedded_1
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
embedded_delta = {
'id': "010101",
'string_field': 'hello',
'int_field': 1,
'dict_field': {'hello': 'world'},
'list_field': ['1', 2, {'hello': 'world'}]
}
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
self.assertEqual(doc._delta(),
({'embedded_field': embedded_delta}, {}))
doc.save()
doc = doc.reload(10)
doc.embedded_field.dict_field = {}
self.assertEqual(doc._get_changed_fields(),
['embedded_field.dict_field'])
self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.dict_field, {})
doc.embedded_field.list_field = []
self.assertEqual(doc._get_changed_fields(),
['embedded_field.list_field'])
self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field, [])
embedded_2 = Embedded()
embedded_2.string_field = 'hello'
embedded_2.int_field = 1
embedded_2.dict_field = {'hello': 'world'}
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field.list_field = ['1', 2, embedded_2]
self.assertEqual(doc._get_changed_fields(),
['embedded_field.list_field'])
self.assertEqual(doc.embedded_field._delta(), ({
'list_field': ['1', 2, {
'_cls': 'Embedded',
'string_field': 'hello',
'dict_field': {'hello': 'world'},
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
}]
}, {}))
self.assertEqual(doc._delta(), ({
'embedded_field.list_field': ['1', 2, {
'_cls': 'Embedded',
'string_field': 'hello',
'dict_field': {'hello': 'world'},
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
}]
}, {}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[0], '1')
self.assertEqual(doc.embedded_field.list_field[1], 2)
for k in doc.embedded_field.list_field[2]._fields:
self.assertEqual(doc.embedded_field.list_field[2][k],
embedded_2[k])
doc.embedded_field.list_field[2].string_field = 'world'
self.assertEqual(doc._get_changed_fields(),
['embedded_field.list_field.2.string_field'])
self.assertEqual(doc.embedded_field._delta(),
({'list_field.2.string_field': 'world'}, {}))
self.assertEqual(doc._delta(),
({'embedded_field.list_field.2.string_field': 'world'}, {}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[2].string_field,
'world')
# Test multiple assignments
doc.embedded_field.list_field[2].string_field = 'hello world'
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
self.assertEqual(doc._get_changed_fields(),
['embedded_field.list_field.2'])
self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': {
'_cls': 'Embedded',
'string_field': 'hello world',
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
'dict_field': {'hello': 'world'}}
}, {}))
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': {
'_cls': 'Embedded',
'string_field': 'hello world',
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
'dict_field': {'hello': 'world'}}
}, {}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[2].string_field,
'hello world')
# Test list native methods
doc.embedded_field.list_field[2].list_field.pop(0)
self.assertEqual(doc._delta(),
({'embedded_field.list_field.2.list_field':
[2, {'hello': 'world'}]}, {}))
doc.save()
doc = doc.reload(10)
doc.embedded_field.list_field[2].list_field.append(1)
self.assertEqual(doc._delta(),
({'embedded_field.list_field.2.list_field':
[2, {'hello': 'world'}, 1]}, {}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[2].list_field,
[2, {'hello': 'world'}, 1])
doc.embedded_field.list_field[2].list_field.sort(key=str)
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[2].list_field,
[1, 2, {'hello': 'world'}])
del doc.embedded_field.list_field[2].list_field[2]['hello']
self.assertEqual(doc._delta(),
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
doc.save()
doc = doc.reload(10)
del doc.embedded_field.list_field[2].list_field
self.assertEqual(doc._delta(),
({}, {'embedded_field.list_field.2.list_field': 1}))
doc.save()
doc = doc.reload(10)
doc.dict_field['Embedded'] = embedded_1
doc.save()
doc = doc.reload(10)
doc.dict_field['Embedded'].string_field = 'Hello World'
self.assertEqual(doc._get_changed_fields(),
['dict_field.Embedded.string_field'])
self.assertEqual(doc._delta(),
({'dict_field.Embedded.string_field': 'Hello World'}, {}))
def test_circular_reference_deltas(self):
self.circular_reference_deltas(Document, Document)
self.circular_reference_deltas(Document, DynamicDocument)
self.circular_reference_deltas(DynamicDocument, Document)
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
def circular_reference_deltas(self, DocClass1, DocClass2):
class Person(DocClass1):
name = StringField()
owns = ListField(ReferenceField('Organization'))
class Organization(DocClass2):
name = StringField()
owner = ReferenceField('Person')
Person.drop_collection()
Organization.drop_collection()
person = Person(name="owner").save()
organization = Organization(name="company").save()
person.owns.append(organization)
organization.owner = person
person.save()
organization.save()
p = Person.objects[0].select_related()
o = Organization.objects.first()
self.assertEqual(p.owns[0], o)
self.assertEqual(o.owner, p)
def test_circular_reference_deltas_2(self):
self.circular_reference_deltas_2(Document, Document)
self.circular_reference_deltas_2(Document, DynamicDocument)
self.circular_reference_deltas_2(DynamicDocument, Document)
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True):
class Person(DocClass1):
name = StringField()
owns = ListField(ReferenceField('Organization', dbref=dbref))
employer = ReferenceField('Organization', dbref=dbref)
class Organization(DocClass2):
name = StringField()
owner = ReferenceField('Person', dbref=dbref)
employees = ListField(ReferenceField('Person', dbref=dbref))
Person.drop_collection()
Organization.drop_collection()
person = Person(name="owner").save()
employee = Person(name="employee").save()
organization = Organization(name="company").save()
person.owns.append(organization)
organization.owner = person
organization.employees.append(employee)
employee.employer = organization
person.save()
organization.save()
employee.save()
p = Person.objects.get(name="owner")
e = Person.objects.get(name="employee")
o = Organization.objects.first()
self.assertEqual(p.owns[0], o)
self.assertEqual(o.owner, p)
self.assertEqual(e.employer, o)
return person, organization, employee
def test_delta_db_field(self):
self.delta_db_field(Document)
self.delta_db_field(DynamicDocument)
def delta_db_field(self, DocClass):
class Doc(DocClass):
string_field = StringField(db_field='db_string_field')
int_field = IntField(db_field='db_int_field')
dict_field = DictField(db_field='db_dict_field')
list_field = ListField(db_field='db_list_field')
Doc.drop_collection()
doc = Doc()
doc.save()
doc = Doc.objects.first()
self.assertEqual(doc._get_changed_fields(), [])
self.assertEqual(doc._delta(), ({}, {}))
doc.string_field = 'hello'
self.assertEqual(doc._get_changed_fields(), ['db_string_field'])
self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {}))
doc._changed_fields = []
doc.int_field = 1
self.assertEqual(doc._get_changed_fields(), ['db_int_field'])
self.assertEqual(doc._delta(), ({'db_int_field': 1}, {}))
doc._changed_fields = []
dict_value = {'hello': 'world', 'ping': 'pong'}
doc.dict_field = dict_value
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {}))
doc._changed_fields = []
list_value = ['1', 2, {'hello': 'world'}]
doc.list_field = list_value
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {}))
# Test unsetting
doc._changed_fields = []
doc.dict_field = {}
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1}))
doc._changed_fields = []
doc.list_field = []
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
self.assertEqual(doc._delta(), ({}, {'db_list_field': 1}))
# Test it saves that data
doc = Doc()
doc.save()
doc.string_field = 'hello'
doc.int_field = 1
doc.dict_field = {'hello': 'world'}
doc.list_field = ['1', 2, {'hello': 'world'}]
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.string_field, 'hello')
self.assertEqual(doc.int_field, 1)
self.assertEqual(doc.dict_field, {'hello': 'world'})
self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}])
def test_delta_recursive_db_field(self):
self.delta_recursive_db_field(Document, EmbeddedDocument)
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
def delta_recursive_db_field(self, DocClass, EmbeddedClass):
class Embedded(EmbeddedClass):
string_field = StringField(db_field='db_string_field')
int_field = IntField(db_field='db_int_field')
dict_field = DictField(db_field='db_dict_field')
list_field = ListField(db_field='db_list_field')
class Doc(DocClass):
string_field = StringField(db_field='db_string_field')
int_field = IntField(db_field='db_int_field')
dict_field = DictField(db_field='db_dict_field')
list_field = ListField(db_field='db_list_field')
embedded_field = EmbeddedDocumentField(Embedded,
db_field='db_embedded_field')
Doc.drop_collection()
doc = Doc()
doc.save()
doc = Doc.objects.first()
self.assertEqual(doc._get_changed_fields(), [])
self.assertEqual(doc._delta(), ({}, {}))
embedded_1 = Embedded()
embedded_1.string_field = 'hello'
embedded_1.int_field = 1
embedded_1.dict_field = {'hello': 'world'}
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field = embedded_1
self.assertEqual(doc._get_changed_fields(), ['db_embedded_field'])
embedded_delta = {
'db_string_field': 'hello',
'db_int_field': 1,
'db_dict_field': {'hello': 'world'},
'db_list_field': ['1', 2, {'hello': 'world'}]
}
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
self.assertEqual(doc._delta(),
({'db_embedded_field': embedded_delta}, {}))
doc.save()
doc = doc.reload(10)
doc.embedded_field.dict_field = {}
self.assertEqual(doc._get_changed_fields(),
['db_embedded_field.db_dict_field'])
self.assertEqual(doc.embedded_field._delta(),
({}, {'db_dict_field': 1}))
self.assertEqual(doc._delta(),
({}, {'db_embedded_field.db_dict_field': 1}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.dict_field, {})
doc.embedded_field.list_field = []
self.assertEqual(doc._get_changed_fields(),
['db_embedded_field.db_list_field'])
self.assertEqual(doc.embedded_field._delta(),
({}, {'db_list_field': 1}))
self.assertEqual(doc._delta(),
({}, {'db_embedded_field.db_list_field': 1}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field, [])
embedded_2 = Embedded()
embedded_2.string_field = 'hello'
embedded_2.int_field = 1
embedded_2.dict_field = {'hello': 'world'}
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field.list_field = ['1', 2, embedded_2]
self.assertEqual(doc._get_changed_fields(),
['db_embedded_field.db_list_field'])
self.assertEqual(doc.embedded_field._delta(), ({
'db_list_field': ['1', 2, {
'_cls': 'Embedded',
'db_string_field': 'hello',
'db_dict_field': {'hello': 'world'},
'db_int_field': 1,
'db_list_field': ['1', 2, {'hello': 'world'}],
}]
}, {}))
self.assertEqual(doc._delta(), ({
'db_embedded_field.db_list_field': ['1', 2, {
'_cls': 'Embedded',
'db_string_field': 'hello',
'db_dict_field': {'hello': 'world'},
'db_int_field': 1,
'db_list_field': ['1', 2, {'hello': 'world'}],
}]
}, {}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[0], '1')
self.assertEqual(doc.embedded_field.list_field[1], 2)
for k in doc.embedded_field.list_field[2]._fields:
self.assertEqual(doc.embedded_field.list_field[2][k],
embedded_2[k])
doc.embedded_field.list_field[2].string_field = 'world'
self.assertEqual(doc._get_changed_fields(),
['db_embedded_field.db_list_field.2.db_string_field'])
self.assertEqual(doc.embedded_field._delta(),
({'db_list_field.2.db_string_field': 'world'}, {}))
self.assertEqual(doc._delta(),
({'db_embedded_field.db_list_field.2.db_string_field': 'world'},
{}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[2].string_field,
'world')
# Test multiple assignments
doc.embedded_field.list_field[2].string_field = 'hello world'
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
self.assertEqual(doc._get_changed_fields(),
['db_embedded_field.db_list_field.2'])
self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': {
'_cls': 'Embedded',
'db_string_field': 'hello world',
'db_int_field': 1,
'db_list_field': ['1', 2, {'hello': 'world'}],
'db_dict_field': {'hello': 'world'}}}, {}))
self.assertEqual(doc._delta(), ({
'db_embedded_field.db_list_field.2': {
'_cls': 'Embedded',
'db_string_field': 'hello world',
'db_int_field': 1,
'db_list_field': ['1', 2, {'hello': 'world'}],
'db_dict_field': {'hello': 'world'}}
}, {}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[2].string_field,
'hello world')
# Test list native methods
doc.embedded_field.list_field[2].list_field.pop(0)
self.assertEqual(doc._delta(),
({'db_embedded_field.db_list_field.2.db_list_field':
[2, {'hello': 'world'}]}, {}))
doc.save()
doc = doc.reload(10)
doc.embedded_field.list_field[2].list_field.append(1)
self.assertEqual(doc._delta(),
({'db_embedded_field.db_list_field.2.db_list_field':
[2, {'hello': 'world'}, 1]}, {}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[2].list_field,
[2, {'hello': 'world'}, 1])
doc.embedded_field.list_field[2].list_field.sort(key=str)
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[2].list_field,
[1, 2, {'hello': 'world'}])
del doc.embedded_field.list_field[2].list_field[2]['hello']
self.assertEqual(doc._delta(),
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
doc.save()
doc = doc.reload(10)
del doc.embedded_field.list_field[2].list_field
self.assertEqual(doc._delta(), ({},
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
def test_delta_for_dynamic_documents(self):
class Person(DynamicDocument):
name = StringField()
meta = {'allow_inheritance': True}
Person.drop_collection()
p = Person(name="James", age=34)
self.assertEqual(p._delta(), (
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
p.doc = 123
del p.doc
self.assertEqual(p._delta(), (
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
p = Person()
p.name = "Dean"
p.age = 22
p.save()
p.age = 24
self.assertEqual(p.age, 24)
self.assertEqual(p._get_changed_fields(), ['age'])
self.assertEqual(p._delta(), ({'age': 24}, {}))
p = Person.objects(age=22).get()
p.age = 24
self.assertEqual(p.age, 24)
self.assertEqual(p._get_changed_fields(), ['age'])
self.assertEqual(p._delta(), ({'age': 24}, {}))
p.save()
self.assertEqual(1, Person.objects(age=24).count())
def test_dynamic_delta(self):
class Doc(DynamicDocument):
pass
Doc.drop_collection()
doc = Doc()
doc.save()
doc = Doc.objects.first()
self.assertEqual(doc._get_changed_fields(), [])
self.assertEqual(doc._delta(), ({}, {}))
doc.string_field = 'hello'
self.assertEqual(doc._get_changed_fields(), ['string_field'])
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
doc._changed_fields = []
doc.int_field = 1
self.assertEqual(doc._get_changed_fields(), ['int_field'])
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
doc._changed_fields = []
dict_value = {'hello': 'world', 'ping': 'pong'}
doc.dict_field = dict_value
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
doc._changed_fields = []
list_value = ['1', 2, {'hello': 'world'}]
doc.list_field = list_value
self.assertEqual(doc._get_changed_fields(), ['list_field'])
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
# Test unsetting
doc._changed_fields = []
doc.dict_field = {}
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
doc._changed_fields = []
doc.list_field = []
self.assertEqual(doc._get_changed_fields(), ['list_field'])
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
def test_delta_with_dbref_true(self):
person, organization, employee = self.circular_reference_deltas_2(Document, Document, True)
employee.name = 'test'
self.assertEqual(organization._get_changed_fields(), [])
updates, removals = organization._delta()
self.assertEqual({}, removals)
self.assertEqual({}, updates)
organization.employees.append(person)
updates, removals = organization._delta()
self.assertEqual({}, removals)
self.assertTrue('employees' in updates)
def test_delta_with_dbref_false(self):
person, organization, employee = self.circular_reference_deltas_2(Document, Document, False)
employee.name = 'test'
self.assertEqual(organization._get_changed_fields(), [])
updates, removals = organization._delta()
self.assertEqual({}, removals)
self.assertEqual({}, updates)
organization.employees.append(person)
updates, removals = organization._delta()
self.assertEqual({}, removals)
self.assertTrue('employees' in updates)
def test_nested_nested_fields_mark_as_changed(self):
class EmbeddedDoc(EmbeddedDocument):
name = StringField()
class MyDoc(Document):
subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc)))
name = StringField()
MyDoc.drop_collection()
mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save()
mydoc = MyDoc.objects.first()
subdoc = mydoc.subs['a']['b']
subdoc.name = 'bar'
self.assertEqual(["name"], subdoc._get_changed_fields())
self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields())
mydoc._clear_changed_fields()
self.assertEqual([], mydoc._get_changed_fields())
def test_lower_level_mark_as_changed(self):
class EmbeddedDoc(EmbeddedDocument):
name = StringField()
class MyDoc(Document):
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
MyDoc.drop_collection()
MyDoc().save()
mydoc = MyDoc.objects.first()
mydoc.subs['a'] = EmbeddedDoc()
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
subdoc = mydoc.subs['a']
subdoc.name = 'bar'
self.assertEqual(["name"], subdoc._get_changed_fields())
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
mydoc.save()
mydoc._clear_changed_fields()
self.assertEqual([], mydoc._get_changed_fields())
def test_upper_level_mark_as_changed(self):
class EmbeddedDoc(EmbeddedDocument):
name = StringField()
class MyDoc(Document):
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
MyDoc.drop_collection()
MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save()
mydoc = MyDoc.objects.first()
subdoc = mydoc.subs['a']
subdoc.name = 'bar'
self.assertEqual(["name"], subdoc._get_changed_fields())
self.assertEqual(["subs.a.name"], mydoc._get_changed_fields())
mydoc.subs['a'] = EmbeddedDoc()
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
mydoc.save()
mydoc._clear_changed_fields()
self.assertEqual([], mydoc._get_changed_fields())
def test_referenced_object_changed_attributes(self):
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
class Organization(Document):
name = StringField()
class User(Document):
name = StringField()
org = ReferenceField('Organization', required=True)
Organization.drop_collection()
User.drop_collection()
org1 = Organization(name='Org 1')
org1.save()
org2 = Organization(name='Org 2')
org2.save()
user = User(name='Fred', org=org1)
user.save()
org1.reload()
org2.reload()
user.reload()
self.assertEqual(org1.name, 'Org 1')
self.assertEqual(org2.name, 'Org 2')
self.assertEqual(user.name, 'Fred')
user.name = 'Harold'
user.org = org2
org2.name = 'New Org 2'
self.assertEqual(org2.name, 'New Org 2')
user.save()
org2.save()
self.assertEqual(org2.name, 'New Org 2')
org2.reload()
self.assertEqual(org2.name, 'New Org 2')
def test_delta_for_nested_map_fields(self):
class UInfoDocument(Document):
phone = StringField()
class EmbeddedRole(EmbeddedDocument):
type = StringField()
class EmbeddedUser(EmbeddedDocument):
name = StringField()
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
info = ReferenceField(UInfoDocument)
class Doc(Document):
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
num = IntField(default=-1)
Doc.drop_collection()
doc = Doc(num=1)
doc.users["007"] = EmbeddedUser(name="Agent007")
doc.save()
uinfo = UInfoDocument(phone="79089269066")
uinfo.save()
d = Doc.objects(num=1).first()
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
d.users["007"]["info"] = uinfo
delta = d._delta()
self.assertEqual(True, "users.007.roles.666" in delta[0])
self.assertEqual(True, "users.007.rolist" in delta[0])
self.assertEqual(True, "users.007.info" in delta[0])
self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"])
self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"])
self.assertEqual(uinfo.id, delta[0]["users.007.info"])
if __name__ == '__main__':
unittest.main()

View File

@@ -1,373 +0,0 @@
import unittest
from mongoengine import *
from mongoengine.connection import get_db
__all__ = ("DynamicTest", )
class DynamicTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
class Person(DynamicDocument):
name = StringField()
meta = {'allow_inheritance': True}
Person.drop_collection()
self.Person = Person
def test_simple_dynamic_document(self):
"""Ensures simple dynamic documents are saved correctly"""
p = self.Person()
p.name = "James"
p.age = 34
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James",
"age": 34})
self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"])
p.save()
self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"])
self.assertEqual(self.Person.objects.first().age, 34)
# Confirm no changes to self.Person
self.assertFalse(hasattr(self.Person, 'age'))
def test_change_scope_of_variable(self):
"""Test changing the scope of a dynamic field has no adverse effects"""
p = self.Person()
p.name = "Dean"
p.misc = 22
p.save()
p = self.Person.objects.get()
p.misc = {'hello': 'world'}
p.save()
p = self.Person.objects.get()
self.assertEqual(p.misc, {'hello': 'world'})
def test_delete_dynamic_field(self):
"""Test deleting a dynamic field works"""
self.Person.drop_collection()
p = self.Person()
p.name = "Dean"
p.misc = 22
p.save()
p = self.Person.objects.get()
p.misc = {'hello': 'world'}
p.save()
p = self.Person.objects.get()
self.assertEqual(p.misc, {'hello': 'world'})
collection = self.db[self.Person._get_collection_name()]
obj = collection.find_one()
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
del p.misc
p.save()
p = self.Person.objects.get()
self.assertFalse(hasattr(p, 'misc'))
obj = collection.find_one()
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name'])
def test_reload_after_unsetting(self):
p = self.Person()
p.misc = 22
p.save()
p.update(unset__misc=1)
p.reload()
def test_reload_dynamic_field(self):
self.Person.objects.delete()
p = self.Person.objects.create()
p.update(age=1)
self.assertEqual(len(p._data), 3)
self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name'])
p.reload()
self.assertEqual(len(p._data), 4)
self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name'])
def test_dynamic_document_queries(self):
"""Ensure we can query dynamic fields"""
p = self.Person()
p.name = "Dean"
p.age = 22
p.save()
self.assertEqual(1, self.Person.objects(age=22).count())
p = self.Person.objects(age=22)
p = p.get()
self.assertEqual(22, p.age)
def test_complex_dynamic_document_queries(self):
class Person(DynamicDocument):
name = StringField()
Person.drop_collection()
p = Person(name="test")
p.age = "ten"
p.save()
p1 = Person(name="test1")
p1.age = "less then ten and a half"
p1.save()
p2 = Person(name="test2")
p2.age = 10
p2.save()
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
self.assertEqual(Person.objects(age__gte=10).count(), 1)
def test_complex_data_lookups(self):
"""Ensure you can query dynamic document dynamic fields"""
p = self.Person()
p.misc = {'hello': 'world'}
p.save()
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
def test_three_level_complex_data_lookups(self):
"""Ensure you can query three level document dynamic fields"""
p = self.Person.objects.create(
misc={'hello': {'hello2': 'world'}}
)
self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count())
def test_complex_embedded_document_validation(self):
"""Ensure embedded dynamic documents may be validated"""
class Embedded(DynamicEmbeddedDocument):
content = URLField()
class Doc(DynamicDocument):
pass
Doc.drop_collection()
doc = Doc()
embedded_doc_1 = Embedded(content='http://mongoengine.org')
embedded_doc_1.validate()
embedded_doc_2 = Embedded(content='this is not a url')
self.assertRaises(ValidationError, embedded_doc_2.validate)
doc.embedded_field_1 = embedded_doc_1
doc.embedded_field_2 = embedded_doc_2
self.assertRaises(ValidationError, doc.validate)
def test_inheritance(self):
"""Ensure that dynamic document plays nice with inheritance"""
class Employee(self.Person):
salary = IntField()
Employee.drop_collection()
self.assertTrue('name' in Employee._fields)
self.assertTrue('salary' in Employee._fields)
self.assertEqual(Employee._get_collection_name(),
self.Person._get_collection_name())
joe_bloggs = Employee()
joe_bloggs.name = "Joe Bloggs"
joe_bloggs.salary = 10
joe_bloggs.age = 20
joe_bloggs.save()
self.assertEqual(1, self.Person.objects(age=20).count())
self.assertEqual(1, Employee.objects(age=20).count())
joe_bloggs = self.Person.objects.first()
self.assertTrue(isinstance(joe_bloggs, Employee))
def test_embedded_dynamic_document(self):
"""Test dynamic embedded documents"""
class Embedded(DynamicEmbeddedDocument):
pass
class Doc(DynamicDocument):
pass
Doc.drop_collection()
doc = Doc()
embedded_1 = Embedded()
embedded_1.string_field = 'hello'
embedded_1.int_field = 1
embedded_1.dict_field = {'hello': 'world'}
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field = embedded_1
self.assertEqual(doc.to_mongo(), {
"embedded_field": {
"_cls": "Embedded",
"string_field": "hello",
"int_field": 1,
"dict_field": {"hello": "world"},
"list_field": ['1', 2, {'hello': 'world'}]
}
})
doc.save()
doc = Doc.objects.first()
self.assertEqual(doc.embedded_field.__class__, Embedded)
self.assertEqual(doc.embedded_field.string_field, "hello")
self.assertEqual(doc.embedded_field.int_field, 1)
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEqual(doc.embedded_field.list_field,
['1', 2, {'hello': 'world'}])
def test_complex_embedded_documents(self):
"""Test complex dynamic embedded documents setups"""
class Embedded(DynamicEmbeddedDocument):
pass
class Doc(DynamicDocument):
pass
Doc.drop_collection()
doc = Doc()
embedded_1 = Embedded()
embedded_1.string_field = 'hello'
embedded_1.int_field = 1
embedded_1.dict_field = {'hello': 'world'}
embedded_2 = Embedded()
embedded_2.string_field = 'hello'
embedded_2.int_field = 1
embedded_2.dict_field = {'hello': 'world'}
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
embedded_1.list_field = ['1', 2, embedded_2]
doc.embedded_field = embedded_1
self.assertEqual(doc.to_mongo(), {
"embedded_field": {
"_cls": "Embedded",
"string_field": "hello",
"int_field": 1,
"dict_field": {"hello": "world"},
"list_field": ['1', 2,
{"_cls": "Embedded",
"string_field": "hello",
"int_field": 1,
"dict_field": {"hello": "world"},
"list_field": ['1', 2, {'hello': 'world'}]}
]
}
})
doc.save()
doc = Doc.objects.first()
self.assertEqual(doc.embedded_field.__class__, Embedded)
self.assertEqual(doc.embedded_field.string_field, "hello")
self.assertEqual(doc.embedded_field.int_field, 1)
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEqual(doc.embedded_field.list_field[0], '1')
self.assertEqual(doc.embedded_field.list_field[1], 2)
embedded_field = doc.embedded_field.list_field[2]
self.assertEqual(embedded_field.__class__, Embedded)
self.assertEqual(embedded_field.string_field, "hello")
self.assertEqual(embedded_field.int_field, 1)
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
self.assertEqual(embedded_field.list_field, ['1', 2,
{'hello': 'world'}])
def test_dynamic_and_embedded(self):
"""Ensure embedded documents play nicely"""
class Address(EmbeddedDocument):
city = StringField()
class Person(DynamicDocument):
name = StringField()
Person.drop_collection()
Person(name="Ross", address=Address(city="London")).save()
person = Person.objects.first()
person.address.city = "Lundenne"
person.save()
self.assertEqual(Person.objects.first().address.city, "Lundenne")
person = Person.objects.first()
person.address = Address(city="Londinium")
person.save()
self.assertEqual(Person.objects.first().address.city, "Londinium")
person = Person.objects.first()
person.age = 35
person.save()
self.assertEqual(Person.objects.first().age, 35)
def test_dynamic_embedded_works_with_only(self):
"""Ensure custom fieldnames on a dynamic embedded document are found by qs.only()"""
class Address(DynamicEmbeddedDocument):
city = StringField()
class Person(DynamicDocument):
address = EmbeddedDocumentField(Address)
Person.drop_collection()
Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save()
self.assertEqual(Person.objects.first().address.street_number, '1337')
self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337')
def test_dynamic_and_embedded_dict_access(self):
"""Ensure embedded dynamic documents work with dict[] style access"""
class Address(EmbeddedDocument):
city = StringField()
class Person(DynamicDocument):
name = StringField()
Person.drop_collection()
Person(name="Ross", address=Address(city="London")).save()
person = Person.objects.first()
person.attrval = "This works"
person["phone"] = "555-1212" # but this should too
# Same thing two levels deep
person["address"]["city"] = "Lundenne"
person.save()
self.assertEqual(Person.objects.first().address.city, "Lundenne")
self.assertEqual(Person.objects.first().phone, "555-1212")
person = Person.objects.first()
person.address = Address(city="Londinium")
person.save()
self.assertEqual(Person.objects.first().address.city, "Londinium")
person = Person.objects.first()
person["age"] = 35
person.save()
self.assertEqual(Person.objects.first().age, 35)
if __name__ == '__main__':
unittest.main()

File diff suppressed because it is too large Load Diff

View File

@@ -1,512 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
import warnings
from datetime import datetime
from tests.fixtures import Base
from mongoengine import Document, EmbeddedDocument, connect
from mongoengine.connection import get_db
from mongoengine.fields import (BooleanField, GenericReferenceField,
IntField, StringField)
__all__ = ('InheritanceTest', )
class InheritanceTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
def tearDown(self):
for collection in self.db.collection_names():
if 'system.' in collection:
continue
self.db.drop_collection(collection)
def test_superclasses(self):
"""Ensure that the correct list of superclasses is assembled.
"""
class Animal(Document):
meta = {'allow_inheritance': True}
class Fish(Animal): pass
class Guppy(Fish): pass
class Mammal(Animal): pass
class Dog(Mammal): pass
class Human(Mammal): pass
self.assertEqual(Animal._superclasses, ())
self.assertEqual(Fish._superclasses, ('Animal',))
self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish'))
self.assertEqual(Mammal._superclasses, ('Animal',))
self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal'))
self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal'))
def test_external_superclasses(self):
"""Ensure that the correct list of super classes is assembled when
importing part of the model.
"""
class Animal(Base): pass
class Fish(Animal): pass
class Guppy(Fish): pass
class Mammal(Animal): pass
class Dog(Mammal): pass
class Human(Mammal): pass
self.assertEqual(Animal._superclasses, ('Base', ))
self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',))
self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal',
'Base.Animal.Fish'))
self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',))
self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal',
'Base.Animal.Mammal'))
self.assertEqual(Human._superclasses, ('Base', 'Base.Animal',
'Base.Animal.Mammal'))
def test_subclasses(self):
"""Ensure that the correct list of _subclasses (subclasses) is
assembled.
"""
class Animal(Document):
meta = {'allow_inheritance': True}
class Fish(Animal): pass
class Guppy(Fish): pass
class Mammal(Animal): pass
class Dog(Mammal): pass
class Human(Mammal): pass
self.assertEqual(Animal._subclasses, ('Animal',
'Animal.Fish',
'Animal.Fish.Guppy',
'Animal.Mammal',
'Animal.Mammal.Dog',
'Animal.Mammal.Human'))
self.assertEqual(Fish._subclasses, ('Animal.Fish',
'Animal.Fish.Guppy',))
self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',))
self.assertEqual(Mammal._subclasses, ('Animal.Mammal',
'Animal.Mammal.Dog',
'Animal.Mammal.Human'))
self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',))
def test_external_subclasses(self):
"""Ensure that the correct list of _subclasses (subclasses) is
assembled when importing part of the model.
"""
class Animal(Base): pass
class Fish(Animal): pass
class Guppy(Fish): pass
class Mammal(Animal): pass
class Dog(Mammal): pass
class Human(Mammal): pass
self.assertEqual(Animal._subclasses, ('Base.Animal',
'Base.Animal.Fish',
'Base.Animal.Fish.Guppy',
'Base.Animal.Mammal',
'Base.Animal.Mammal.Dog',
'Base.Animal.Mammal.Human'))
self.assertEqual(Fish._subclasses, ('Base.Animal.Fish',
'Base.Animal.Fish.Guppy',))
self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',))
self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal',
'Base.Animal.Mammal.Dog',
'Base.Animal.Mammal.Human'))
self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',))
def test_dynamic_declarations(self):
"""Test that declaring an extra class updates meta data"""
class Animal(Document):
meta = {'allow_inheritance': True}
self.assertEqual(Animal._superclasses, ())
self.assertEqual(Animal._subclasses, ('Animal',))
# Test dynamically adding a class changes the meta data
class Fish(Animal):
pass
self.assertEqual(Animal._superclasses, ())
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish'))
self.assertEqual(Fish._superclasses, ('Animal', ))
self.assertEqual(Fish._subclasses, ('Animal.Fish',))
# Test dynamically adding an inherited class changes the meta data
class Pike(Fish):
pass
self.assertEqual(Animal._superclasses, ())
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish',
'Animal.Fish.Pike'))
self.assertEqual(Fish._superclasses, ('Animal', ))
self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike'))
self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish'))
self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',))
def test_inheritance_meta_data(self):
"""Ensure that document may inherit fields from a superclass document.
"""
class Person(Document):
name = StringField()
age = IntField()
meta = {'allow_inheritance': True}
class Employee(Person):
salary = IntField()
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
sorted(Employee._fields.keys()))
self.assertEqual(Employee._get_collection_name(),
Person._get_collection_name())
def test_inheritance_to_mongo_keys(self):
"""Ensure that document may inherit fields from a superclass document.
"""
class Person(Document):
name = StringField()
age = IntField()
meta = {'allow_inheritance': True}
class Employee(Person):
salary = IntField()
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
sorted(Employee._fields.keys()))
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
['_cls', 'name', 'age'])
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
['_cls', 'name', 'age', 'salary'])
self.assertEqual(Employee._get_collection_name(),
Person._get_collection_name())
def test_indexes_and_multiple_inheritance(self):
""" Ensure that all of the indexes are created for a document with
multiple inheritance.
"""
class A(Document):
a = StringField()
meta = {
'allow_inheritance': True,
'indexes': ['a']
}
class B(Document):
b = StringField()
meta = {
'allow_inheritance': True,
'indexes': ['b']
}
class C(A, B):
pass
A.drop_collection()
B.drop_collection()
C.drop_collection()
C.ensure_indexes()
self.assertEqual(
sorted([idx['key'] for idx in C._get_collection().index_information().values()]),
sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]])
)
def test_polymorphic_queries(self):
"""Ensure that the correct subclasses are returned from a query
"""
class Animal(Document):
meta = {'allow_inheritance': True}
class Fish(Animal): pass
class Mammal(Animal): pass
class Dog(Mammal): pass
class Human(Mammal): pass
Animal.drop_collection()
Animal().save()
Fish().save()
Mammal().save()
Dog().save()
Human().save()
classes = [obj.__class__ for obj in Animal.objects]
self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human])
classes = [obj.__class__ for obj in Mammal.objects]
self.assertEqual(classes, [Mammal, Dog, Human])
classes = [obj.__class__ for obj in Human.objects]
self.assertEqual(classes, [Human])
def test_allow_inheritance(self):
"""Ensure that inheritance is disabled by default on simple
classes and that _cls will not be used.
"""
class Animal(Document):
name = StringField()
# can't inherit because Animal didn't explicitly allow inheritance
with self.assertRaises(ValueError):
class Dog(Animal):
pass
# Check that _cls etc aren't present on simple documents
dog = Animal(name='dog').save()
self.assertEqual(dog.to_mongo().keys(), ['_id', 'name'])
collection = self.db[Animal._get_collection_name()]
obj = collection.find_one()
self.assertFalse('_cls' in obj)
def test_cant_turn_off_inheritance_on_subclass(self):
"""Ensure if inheritance is on in a subclass you cant turn it off.
"""
class Animal(Document):
name = StringField()
meta = {'allow_inheritance': True}
with self.assertRaises(ValueError):
class Mammal(Animal):
meta = {'allow_inheritance': False}
def test_allow_inheritance_abstract_document(self):
"""Ensure that abstract documents can set inheritance rules and that
_cls will not be used.
"""
class FinalDocument(Document):
meta = {'abstract': True,
'allow_inheritance': False}
class Animal(FinalDocument):
name = StringField()
with self.assertRaises(ValueError):
class Mammal(Animal):
pass
# Check that _cls isn't present in simple documents
doc = Animal(name='dog')
self.assertFalse('_cls' in doc.to_mongo())
def test_abstract_handle_ids_in_metaclass_properly(self):
class City(Document):
continent = StringField()
meta = {'abstract': True,
'allow_inheritance': False}
class EuropeanCity(City):
name = StringField()
berlin = EuropeanCity(name='Berlin', continent='Europe')
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._fields_ordered), 3)
self.assertEqual(berlin._fields_ordered[0], 'id')
def test_auto_id_not_set_if_specific_in_parent_class(self):
class City(Document):
continent = StringField()
city_id = IntField(primary_key=True)
meta = {'abstract': True,
'allow_inheritance': False}
class EuropeanCity(City):
name = StringField()
berlin = EuropeanCity(name='Berlin', continent='Europe')
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._fields_ordered), 3)
self.assertEqual(berlin._fields_ordered[0], 'city_id')
def test_auto_id_vs_non_pk_id_field(self):
class City(Document):
continent = StringField()
id = IntField()
meta = {'abstract': True,
'allow_inheritance': False}
class EuropeanCity(City):
name = StringField()
berlin = EuropeanCity(name='Berlin', continent='Europe')
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
self.assertEqual(len(berlin._fields_ordered), 4)
self.assertEqual(berlin._fields_ordered[0], 'auto_id_0')
berlin.save()
self.assertEqual(berlin.pk, berlin.auto_id_0)
def test_abstract_document_creation_does_not_fail(self):
class City(Document):
continent = StringField()
meta = {'abstract': True,
'allow_inheritance': False}
bkk = City(continent='asia')
self.assertEqual(None, bkk.pk)
# TODO: expected error? Shouldn't we create a new error type?
with self.assertRaises(KeyError):
setattr(bkk, 'pk', 1)
def test_allow_inheritance_embedded_document(self):
"""Ensure embedded documents respect inheritance."""
class Comment(EmbeddedDocument):
content = StringField()
with self.assertRaises(ValueError):
class SpecialComment(Comment):
pass
doc = Comment(content='test')
self.assertFalse('_cls' in doc.to_mongo())
class Comment(EmbeddedDocument):
content = StringField()
meta = {'allow_inheritance': True}
doc = Comment(content='test')
self.assertTrue('_cls' in doc.to_mongo())
def test_document_inheritance(self):
"""Ensure mutliple inheritance of abstract documents
"""
class DateCreatedDocument(Document):
meta = {
'allow_inheritance': True,
'abstract': True,
}
class DateUpdatedDocument(Document):
meta = {
'allow_inheritance': True,
'abstract': True,
}
try:
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
pass
except Exception:
self.assertTrue(False, "Couldn't create MyDocument class")
def test_abstract_documents(self):
"""Ensure that a document superclass can be marked as abstract
thereby not using it as the name for the collection."""
defaults = {'index_background': True,
'index_drop_dups': True,
'index_opts': {'hello': 'world'},
'allow_inheritance': True,
'queryset_class': 'QuerySet',
'db_alias': 'myDB',
'shard_key': ('hello', 'world')}
meta_settings = {'abstract': True}
meta_settings.update(defaults)
class Animal(Document):
name = StringField()
meta = meta_settings
class Fish(Animal): pass
class Guppy(Fish): pass
class Mammal(Animal):
meta = {'abstract': True}
class Human(Mammal): pass
for k, v in defaults.iteritems():
for cls in [Animal, Fish, Guppy]:
self.assertEqual(cls._meta[k], v)
self.assertFalse('collection' in Animal._meta)
self.assertFalse('collection' in Mammal._meta)
self.assertEqual(Animal._get_collection_name(), None)
self.assertEqual(Mammal._get_collection_name(), None)
self.assertEqual(Fish._get_collection_name(), 'fish')
self.assertEqual(Guppy._get_collection_name(), 'fish')
self.assertEqual(Human._get_collection_name(), 'human')
# ensure that a subclass of a non-abstract class can't be abstract
with self.assertRaises(ValueError):
class EvilHuman(Human):
evil = BooleanField(default=True)
meta = {'abstract': True}
def test_abstract_embedded_documents(self):
# 789: EmbeddedDocument shouldn't inherit abstract
class A(EmbeddedDocument):
meta = {"abstract": True}
class B(A):
pass
self.assertFalse(B._meta["abstract"])
def test_inherited_collections(self):
"""Ensure that subclassed documents don't override parents'
collections
"""
class Drink(Document):
name = StringField()
meta = {'allow_inheritance': True}
class Drinker(Document):
drink = GenericReferenceField()
try:
warnings.simplefilter("error")
class AcloholicDrink(Drink):
meta = {'collection': 'booze'}
except SyntaxWarning:
warnings.simplefilter("ignore")
class AlcoholicDrink(Drink):
meta = {'collection': 'booze'}
else:
raise AssertionError("SyntaxWarning should be triggered")
warnings.resetwarnings()
Drink.drop_collection()
AlcoholicDrink.drop_collection()
Drinker.drop_collection()
red_bull = Drink(name='Red Bull')
red_bull.save()
programmer = Drinker(drink=red_bull)
programmer.save()
beer = AlcoholicDrink(name='Beer')
beer.save()
real_person = Drinker(drink=beer)
real_person.save()
self.assertEqual(Drinker.objects[0].drink.name, red_bull.name)
self.assertEqual(Drinker.objects[1].drink.name, beer.name)
if __name__ == '__main__':
unittest.main()

File diff suppressed because it is too large Load Diff

View File

@@ -1,112 +0,0 @@
import unittest
import uuid
from nose.plugins.skip import SkipTest
from datetime import datetime
from bson import ObjectId
import pymongo
from mongoengine import *
__all__ = ("TestJson",)
class TestJson(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
def test_json_names(self):
"""
Going to test reported issue:
https://github.com/MongoEngine/mongoengine/issues/654
where the reporter asks for the availability to perform
a to_json with the original class names and not the abreviated
mongodb document keys
"""
class Embedded(EmbeddedDocument):
string = StringField(db_field='s')
class Doc(Document):
string = StringField(db_field='s')
embedded = EmbeddedDocumentField(Embedded, db_field='e')
doc = Doc( string="Hello", embedded=Embedded(string="Inner Hello"))
doc_json = doc.to_json(sort_keys=True, use_db_field=False,separators=(',', ':'))
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
self.assertEqual( doc_json, expected_json)
def test_json_simple(self):
class Embedded(EmbeddedDocument):
string = StringField()
class Doc(Document):
string = StringField()
embedded_field = EmbeddedDocumentField(Embedded)
def __eq__(self, other):
return (self.string == other.string and
self.embedded_field == other.embedded_field)
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
doc_json = doc.to_json(sort_keys=True, separators=(',', ':'))
expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}"""
self.assertEqual(doc_json, expected_json)
self.assertEqual(doc, Doc.from_json(doc.to_json()))
def test_json_complex(self):
if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3:
raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs")
class EmbeddedDoc(EmbeddedDocument):
pass
class Simple(Document):
pass
class Doc(Document):
string_field = StringField(default='1')
int_field = IntField(default=1)
float_field = FloatField(default=1.1)
boolean_field = BooleanField(default=True)
datetime_field = DateTimeField(default=datetime.now)
embedded_document_field = EmbeddedDocumentField(EmbeddedDoc,
default=lambda: EmbeddedDoc())
list_field = ListField(default=lambda: [1, 2, 3])
dict_field = DictField(default=lambda: {"hello": "world"})
objectid_field = ObjectIdField(default=ObjectId)
reference_field = ReferenceField(Simple, default=lambda:
Simple().save())
map_field = MapField(IntField(), default=lambda: {"simple": 1})
decimal_field = DecimalField(default=1.0)
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
url_field = URLField(default="http://mongoengine.org")
dynamic_field = DynamicField(default=1)
generic_reference_field = GenericReferenceField(
default=lambda: Simple().save())
sorted_list_field = SortedListField(IntField(),
default=lambda: [1, 2, 3])
email_field = EmailField(default="ross@example.com")
geo_point_field = GeoPointField(default=lambda: [1, 2])
sequence_field = SequenceField()
uuid_field = UUIDField(default=uuid.uuid4)
generic_embedded_document_field = GenericEmbeddedDocumentField(
default=lambda: EmbeddedDoc())
def __eq__(self, other):
import json
return json.loads(self.to_json()) == json.loads(other.to_json())
doc = Doc()
self.assertEqual(doc, Doc.from_json(doc.to_json()))
if __name__ == '__main__':
unittest.main()

View File

@@ -1,214 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
from datetime import datetime
from mongoengine import *
__all__ = ("ValidatorErrorTest",)
class ValidatorErrorTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
def test_to_dict(self):
"""Ensure a ValidationError handles error to_dict correctly.
"""
error = ValidationError('root')
self.assertEqual(error.to_dict(), {})
# 1st level error schema
error.errors = {'1st': ValidationError('bad 1st'), }
self.assertTrue('1st' in error.to_dict())
self.assertEqual(error.to_dict()['1st'], 'bad 1st')
# 2nd level error schema
error.errors = {'1st': ValidationError('bad 1st', errors={
'2nd': ValidationError('bad 2nd'),
})}
self.assertTrue('1st' in error.to_dict())
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
self.assertTrue('2nd' in error.to_dict()['1st'])
self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd')
# moar levels
error.errors = {'1st': ValidationError('bad 1st', errors={
'2nd': ValidationError('bad 2nd', errors={
'3rd': ValidationError('bad 3rd', errors={
'4th': ValidationError('Inception'),
}),
}),
})}
self.assertTrue('1st' in error.to_dict())
self.assertTrue('2nd' in error.to_dict()['1st'])
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'],
'Inception')
self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
def test_model_validation(self):
class User(Document):
username = StringField(primary_key=True)
name = StringField(required=True)
try:
User().validate()
except ValidationError as e:
self.assertTrue("User:None" in e.message)
self.assertEqual(e.to_dict(), {
'username': 'Field is required',
'name': 'Field is required'})
user = User(username="RossC0", name="Ross").save()
user.name = None
try:
user.save()
except ValidationError as e:
self.assertTrue("User:RossC0" in e.message)
self.assertEqual(e.to_dict(), {
'name': 'Field is required'})
def test_fields_rewrite(self):
class BasePerson(Document):
name = StringField()
age = IntField()
meta = {'abstract': True}
class Person(BasePerson):
name = StringField(required=True)
p = Person(age=15)
self.assertRaises(ValidationError, p.validate)
def test_embedded_document_validation(self):
"""Ensure that embedded documents may be validated.
"""
class Comment(EmbeddedDocument):
date = DateTimeField()
content = StringField(required=True)
comment = Comment()
self.assertRaises(ValidationError, comment.validate)
comment.content = 'test'
comment.validate()
comment.date = 4
self.assertRaises(ValidationError, comment.validate)
comment.date = datetime.now()
comment.validate()
self.assertEqual(comment._instance, None)
def test_embedded_db_field_validate(self):
class SubDoc(EmbeddedDocument):
val = IntField(required=True)
class Doc(Document):
id = StringField(primary_key=True)
e = EmbeddedDocumentField(SubDoc, db_field='eb')
try:
Doc(id="bad").validate()
except ValidationError as e:
self.assertTrue("SubDoc:None" in e.message)
self.assertEqual(e.to_dict(), {
"e": {'val': 'OK could not be converted to int'}})
Doc.drop_collection()
Doc(id="test", e=SubDoc(val=15)).save()
doc = Doc.objects.first()
keys = doc._data.keys()
self.assertEqual(2, len(keys))
self.assertTrue('e' in keys)
self.assertTrue('id' in keys)
doc.e.val = "OK"
try:
doc.save()
except ValidationError as e:
self.assertTrue("Doc:test" in e.message)
self.assertEqual(e.to_dict(), {
"e": {'val': 'OK could not be converted to int'}})
def test_embedded_weakref(self):
class SubDoc(EmbeddedDocument):
val = IntField(required=True)
class Doc(Document):
e = EmbeddedDocumentField(SubDoc, db_field='eb')
Doc.drop_collection()
d1 = Doc()
d2 = Doc()
s = SubDoc()
self.assertRaises(ValidationError, s.validate)
d1.e = s
d2.e = s
del d1
self.assertRaises(ValidationError, d2.validate)
def test_parent_reference_in_child_document(self):
"""
Test to ensure a ReferenceField can store a reference to a parent
class when inherited. Issue #954.
"""
class Parent(Document):
meta = {'allow_inheritance': True}
reference = ReferenceField('self')
class Child(Parent):
pass
parent = Parent()
parent.save()
child = Child(reference=parent)
# Saving child should not raise a ValidationError
try:
child.save()
except ValidationError as e:
self.fail("ValidationError raised: %s" % e.message)
def test_parent_reference_set_as_attribute_in_child_document(self):
"""
Test to ensure a ReferenceField can store a reference to a parent
class when inherited and when set via attribute. Issue #954.
"""
class Parent(Document):
meta = {'allow_inheritance': True}
reference = ReferenceField('self')
class Child(Parent):
pass
parent = Parent()
parent.save()
child = Child()
child.reference = parent
# Saving the child should not raise a ValidationError
try:
child.save()
except ValidationError as e:
self.fail("ValidationError raised: %s" % e.message)
if __name__ == '__main__':
unittest.main()

788
tests/fields.py Normal file
View File

@@ -0,0 +1,788 @@
import unittest
import datetime
from decimal import Decimal
import pymongo
import gridfs
from mongoengine import *
from mongoengine.connection import _get_db
class FieldTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = _get_db()
def test_default_values(self):
"""Ensure that default field values are used when creating a document.
"""
class Person(Document):
name = StringField()
age = IntField(default=30)
userid = StringField(default=lambda: 'test')
person = Person(name='Test Person')
self.assertEqual(person._data['age'], 30)
self.assertEqual(person._data['userid'], 'test')
def test_required_values(self):
"""Ensure that required field constraints are enforced.
"""
class Person(Document):
name = StringField(required=True)
age = IntField(required=True)
userid = StringField()
person = Person(name="Test User")
self.assertRaises(ValidationError, person.validate)
person = Person(age=30)
self.assertRaises(ValidationError, person.validate)
def test_object_id_validation(self):
"""Ensure that invalid values cannot be assigned to string fields.
"""
class Person(Document):
name = StringField()
person = Person(name='Test User')
self.assertEqual(person.id, None)
person.id = 47
self.assertRaises(ValidationError, person.validate)
person.id = 'abc'
self.assertRaises(ValidationError, person.validate)
person.id = '497ce96f395f2f052a494fd4'
person.validate()
def test_string_validation(self):
"""Ensure that invalid values cannot be assigned to string fields.
"""
class Person(Document):
name = StringField(max_length=20)
userid = StringField(r'[0-9a-z_]+$')
person = Person(name=34)
self.assertRaises(ValidationError, person.validate)
# Test regex validation on userid
person = Person(userid='test.User')
self.assertRaises(ValidationError, person.validate)
person.userid = 'test_user'
self.assertEqual(person.userid, 'test_user')
person.validate()
# Test max length validation on name
person = Person(name='Name that is more than twenty characters')
self.assertRaises(ValidationError, person.validate)
person.name = 'Shorter name'
person.validate()
def test_url_validation(self):
"""Ensure that URLFields validate urls properly.
"""
class Link(Document):
url = URLField()
link = Link()
link.url = 'google'
self.assertRaises(ValidationError, link.validate)
link.url = 'http://www.google.com:8080'
link.validate()
def test_int_validation(self):
"""Ensure that invalid values cannot be assigned to int fields.
"""
class Person(Document):
age = IntField(min_value=0, max_value=110)
person = Person()
person.age = 50
person.validate()
person.age = -1
self.assertRaises(ValidationError, person.validate)
person.age = 120
self.assertRaises(ValidationError, person.validate)
person.age = 'ten'
self.assertRaises(ValidationError, person.validate)
def test_float_validation(self):
"""Ensure that invalid values cannot be assigned to float fields.
"""
class Person(Document):
height = FloatField(min_value=0.1, max_value=3.5)
person = Person()
person.height = 1.89
person.validate()
person.height = '2.0'
self.assertRaises(ValidationError, person.validate)
person.height = 0.01
self.assertRaises(ValidationError, person.validate)
person.height = 4.0
self.assertRaises(ValidationError, person.validate)
def test_decimal_validation(self):
"""Ensure that invalid values cannot be assigned to decimal fields.
"""
class Person(Document):
height = DecimalField(min_value=Decimal('0.1'),
max_value=Decimal('3.5'))
Person.drop_collection()
person = Person()
person.height = Decimal('1.89')
person.save()
person.reload()
self.assertEqual(person.height, Decimal('1.89'))
person.height = '2.0'
person.save()
person.height = 0.01
self.assertRaises(ValidationError, person.validate)
person.height = Decimal('0.01')
self.assertRaises(ValidationError, person.validate)
person.height = Decimal('4.0')
self.assertRaises(ValidationError, person.validate)
Person.drop_collection()
def test_boolean_validation(self):
"""Ensure that invalid values cannot be assigned to boolean fields.
"""
class Person(Document):
admin = BooleanField()
person = Person()
person.admin = True
person.validate()
person.admin = 2
self.assertRaises(ValidationError, person.validate)
person.admin = 'Yes'
self.assertRaises(ValidationError, person.validate)
def test_datetime_validation(self):
"""Ensure that invalid values cannot be assigned to datetime fields.
"""
class LogEntry(Document):
time = DateTimeField()
log = LogEntry()
log.time = datetime.datetime.now()
log.validate()
log.time = -1
self.assertRaises(ValidationError, log.validate)
log.time = '1pm'
self.assertRaises(ValidationError, log.validate)
def test_list_validation(self):
"""Ensure that a list field only accepts lists with valid elements.
"""
class User(Document):
pass
class Comment(EmbeddedDocument):
content = StringField()
class BlogPost(Document):
content = StringField()
comments = ListField(EmbeddedDocumentField(Comment))
tags = ListField(StringField())
authors = ListField(ReferenceField(User))
post = BlogPost(content='Went for a walk today...')
post.validate()
post.tags = 'fun'
self.assertRaises(ValidationError, post.validate)
post.tags = [1, 2]
self.assertRaises(ValidationError, post.validate)
post.tags = ['fun', 'leisure']
post.validate()
post.tags = ('fun', 'leisure')
post.validate()
post.comments = ['a']
self.assertRaises(ValidationError, post.validate)
post.comments = 'yay'
self.assertRaises(ValidationError, post.validate)
comments = [Comment(content='Good for you'), Comment(content='Yay.')]
post.comments = comments
post.validate()
post.authors = [Comment()]
self.assertRaises(ValidationError, post.validate)
post.authors = [User()]
post.validate()
def test_sorted_list_sorting(self):
"""Ensure that a sorted list field properly sorts values.
"""
class Comment(EmbeddedDocument):
order = IntField()
content = StringField()
class BlogPost(Document):
content = StringField()
comments = SortedListField(EmbeddedDocumentField(Comment),
ordering='order')
tags = SortedListField(StringField())
post = BlogPost(content='Went for a walk today...')
post.save()
post.tags = ['leisure', 'fun']
post.save()
post.reload()
self.assertEqual(post.tags, ['fun', 'leisure'])
comment1 = Comment(content='Good for you', order=1)
comment2 = Comment(content='Yay.', order=0)
comments = [comment1, comment2]
post.comments = comments
post.save()
post.reload()
self.assertEqual(post.comments[0].content, comment2.content)
self.assertEqual(post.comments[1].content, comment1.content)
BlogPost.drop_collection()
def test_dict_validation(self):
"""Ensure that dict types work as expected.
"""
class BlogPost(Document):
info = DictField()
post = BlogPost()
post.info = 'my post'
self.assertRaises(ValidationError, post.validate)
post.info = ['test', 'test']
self.assertRaises(ValidationError, post.validate)
post.info = {'$title': 'test'}
self.assertRaises(ValidationError, post.validate)
post.info = {'the.title': 'test'}
self.assertRaises(ValidationError, post.validate)
post.info = {'title': 'test'}
post.validate()
def test_embedded_document_validation(self):
"""Ensure that invalid embedded documents cannot be assigned to
embedded document fields.
"""
class Comment(EmbeddedDocument):
content = StringField()
class PersonPreferences(EmbeddedDocument):
food = StringField(required=True)
number = IntField()
class Person(Document):
name = StringField()
preferences = EmbeddedDocumentField(PersonPreferences)
person = Person(name='Test User')
person.preferences = 'My Preferences'
self.assertRaises(ValidationError, person.validate)
# Check that only the right embedded doc works
person.preferences = Comment(content='Nice blog post...')
self.assertRaises(ValidationError, person.validate)
# Check that the embedded doc is valid
person.preferences = PersonPreferences()
self.assertRaises(ValidationError, person.validate)
person.preferences = PersonPreferences(food='Cheese', number=47)
self.assertEqual(person.preferences.food, 'Cheese')
person.validate()
def test_embedded_document_inheritance(self):
"""Ensure that subclasses of embedded documents may be provided to
EmbeddedDocumentFields of the superclass' type.
"""
class User(EmbeddedDocument):
name = StringField()
class PowerUser(User):
power = IntField()
class BlogPost(Document):
content = StringField()
author = EmbeddedDocumentField(User)
post = BlogPost(content='What I did today...')
post.author = User(name='Test User')
post.author = PowerUser(name='Test User', power=47)
def test_reference_validation(self):
"""Ensure that invalid docment objects cannot be assigned to reference
fields.
"""
class User(Document):
name = StringField()
class BlogPost(Document):
content = StringField()
author = ReferenceField(User)
User.drop_collection()
BlogPost.drop_collection()
self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument)
user = User(name='Test User')
# Ensure that the referenced object must have been saved
post1 = BlogPost(content='Chips and gravy taste good.')
post1.author = user
self.assertRaises(ValidationError, post1.save)
# Check that an invalid object type cannot be used
post2 = BlogPost(content='Chips and chilli taste good.')
post1.author = post2
self.assertRaises(ValidationError, post1.validate)
user.save()
post1.author = user
post1.save()
post2.save()
post1.author = post2
self.assertRaises(ValidationError, post1.validate)
User.drop_collection()
BlogPost.drop_collection()
def test_list_item_dereference(self):
"""Ensure that DBRef items in ListFields are dereferenced.
"""
class User(Document):
name = StringField()
class Group(Document):
members = ListField(ReferenceField(User))
User.drop_collection()
Group.drop_collection()
user1 = User(name='user1')
user1.save()
user2 = User(name='user2')
user2.save()
group = Group(members=[user1, user2])
group.save()
group_obj = Group.objects.first()
self.assertEqual(group_obj.members[0].name, user1.name)
self.assertEqual(group_obj.members[1].name, user2.name)
User.drop_collection()
Group.drop_collection()
def test_recursive_reference(self):
"""Ensure that ReferenceFields can reference their own documents.
"""
class Employee(Document):
name = StringField()
boss = ReferenceField('self')
friends = ListField(ReferenceField('self'))
bill = Employee(name='Bill Lumbergh')
bill.save()
michael = Employee(name='Michael Bolton')
michael.save()
samir = Employee(name='Samir Nagheenanajar')
samir.save()
friends = [michael, samir]
peter = Employee(name='Peter Gibbons', boss=bill, friends=friends)
peter.save()
peter = Employee.objects.with_id(peter.id)
self.assertEqual(peter.boss, bill)
self.assertEqual(peter.friends, friends)
def test_recursive_embedding(self):
"""Ensure that EmbeddedDocumentFields can contain their own documents.
"""
class Tree(Document):
name = StringField()
children = ListField(EmbeddedDocumentField('TreeNode'))
class TreeNode(EmbeddedDocument):
name = StringField()
children = ListField(EmbeddedDocumentField('self'))
tree = Tree(name="Tree")
first_child = TreeNode(name="Child 1")
tree.children.append(first_child)
second_child = TreeNode(name="Child 2")
first_child.children.append(second_child)
third_child = TreeNode(name="Child 3")
first_child.children.append(third_child)
tree.save()
tree_obj = Tree.objects.first()
self.assertEqual(len(tree.children), 1)
self.assertEqual(tree.children[0].name, first_child.name)
self.assertEqual(tree.children[0].children[0].name, second_child.name)
self.assertEqual(tree.children[0].children[1].name, third_child.name)
def test_undefined_reference(self):
"""Ensure that ReferenceFields may reference undefined Documents.
"""
class Product(Document):
name = StringField()
company = ReferenceField('Company')
class Company(Document):
name = StringField()
ten_gen = Company(name='10gen')
ten_gen.save()
mongodb = Product(name='MongoDB', company=ten_gen)
mongodb.save()
obj = Product.objects(company=ten_gen).first()
self.assertEqual(obj, mongodb)
self.assertEqual(obj.company, ten_gen)
def test_reference_query_conversion(self):
"""Ensure that ReferenceFields can be queried using objects and values
of the type of the primary key of the referenced object.
"""
class Member(Document):
user_num = IntField(primary_key=True)
class BlogPost(Document):
title = StringField()
author = ReferenceField(Member)
Member.drop_collection()
BlogPost.drop_collection()
m1 = Member(user_num=1)
m1.save()
m2 = Member(user_num=2)
m2.save()
post1 = BlogPost(title='post 1', author=m1)
post1.save()
post2 = BlogPost(title='post 2', author=m2)
post2.save()
post = BlogPost.objects(author=m1).first()
self.assertEqual(post.id, post1.id)
post = BlogPost.objects(author=m2).first()
self.assertEqual(post.id, post2.id)
Member.drop_collection()
BlogPost.drop_collection()
def test_generic_reference(self):
"""Ensure that a GenericReferenceField properly dereferences items.
"""
class Link(Document):
title = StringField()
meta = {'allow_inheritance': False}
class Post(Document):
title = StringField()
class Bookmark(Document):
bookmark_object = GenericReferenceField()
Link.drop_collection()
Post.drop_collection()
Bookmark.drop_collection()
link_1 = Link(title="Pitchfork")
link_1.save()
post_1 = Post(title="Behind the Scenes of the Pavement Reunion")
post_1.save()
bm = Bookmark(bookmark_object=post_1)
bm.save()
bm = Bookmark.objects(bookmark_object=post_1).first()
self.assertEqual(bm.bookmark_object, post_1)
self.assertTrue(isinstance(bm.bookmark_object, Post))
bm.bookmark_object = link_1
bm.save()
bm = Bookmark.objects(bookmark_object=link_1).first()
self.assertEqual(bm.bookmark_object, link_1)
self.assertTrue(isinstance(bm.bookmark_object, Link))
Link.drop_collection()
Post.drop_collection()
Bookmark.drop_collection()
def test_generic_reference_list(self):
"""Ensure that a ListField properly dereferences generic references.
"""
class Link(Document):
title = StringField()
class Post(Document):
title = StringField()
class User(Document):
bookmarks = ListField(GenericReferenceField())
Link.drop_collection()
Post.drop_collection()
User.drop_collection()
link_1 = Link(title="Pitchfork")
link_1.save()
post_1 = Post(title="Behind the Scenes of the Pavement Reunion")
post_1.save()
user = User(bookmarks=[post_1, link_1])
user.save()
user = User.objects(bookmarks__all=[post_1, link_1]).first()
self.assertEqual(user.bookmarks[0], post_1)
self.assertEqual(user.bookmarks[1], link_1)
Link.drop_collection()
Post.drop_collection()
User.drop_collection()
def test_binary_fields(self):
"""Ensure that binary fields can be stored and retrieved.
"""
class Attachment(Document):
content_type = StringField()
blob = BinaryField()
BLOB = '\xe6\x00\xc4\xff\x07'
MIME_TYPE = 'application/octet-stream'
Attachment.drop_collection()
attachment = Attachment(content_type=MIME_TYPE, blob=BLOB)
attachment.save()
attachment_1 = Attachment.objects().first()
self.assertEqual(MIME_TYPE, attachment_1.content_type)
self.assertEqual(BLOB, attachment_1.blob)
Attachment.drop_collection()
def test_binary_validation(self):
"""Ensure that invalid values cannot be assigned to binary fields.
"""
class Attachment(Document):
blob = BinaryField()
class AttachmentRequired(Document):
blob = BinaryField(required=True)
class AttachmentSizeLimit(Document):
blob = BinaryField(max_bytes=4)
Attachment.drop_collection()
AttachmentRequired.drop_collection()
AttachmentSizeLimit.drop_collection()
attachment = Attachment()
attachment.validate()
attachment.blob = 2
self.assertRaises(ValidationError, attachment.validate)
attachment_required = AttachmentRequired()
self.assertRaises(ValidationError, attachment_required.validate)
attachment_required.blob = '\xe6\x00\xc4\xff\x07'
attachment_required.validate()
attachment_size_limit = AttachmentSizeLimit(blob='\xe6\x00\xc4\xff\x07')
self.assertRaises(ValidationError, attachment_size_limit.validate)
attachment_size_limit.blob = '\xe6\x00\xc4\xff'
attachment_size_limit.validate()
Attachment.drop_collection()
AttachmentRequired.drop_collection()
AttachmentSizeLimit.drop_collection()
def test_choices_validation(self):
"""Ensure that value is in a container of allowed values.
"""
class Shirt(Document):
size = StringField(max_length=3, choices=('S','M','L','XL','XXL'))
Shirt.drop_collection()
shirt = Shirt()
shirt.validate()
shirt.size = "S"
shirt.validate()
shirt.size = "XS"
self.assertRaises(ValidationError, shirt.validate)
Shirt.drop_collection()
def test_file_fields(self):
"""Ensure that file fields can be written to and their data retrieved
"""
class PutFile(Document):
file = FileField()
class StreamFile(Document):
file = FileField()
class SetFile(Document):
file = FileField()
text = 'Hello, World!'
more_text = 'Foo Bar'
content_type = 'text/plain'
PutFile.drop_collection()
StreamFile.drop_collection()
SetFile.drop_collection()
putfile = PutFile()
putfile.file.put(text, content_type=content_type)
putfile.save()
putfile.validate()
result = PutFile.objects.first()
self.assertTrue(putfile == result)
self.assertEquals(result.file.read(), text)
self.assertEquals(result.file.content_type, content_type)
result.file.delete() # Remove file from GridFS
streamfile = StreamFile()
streamfile.file.new_file(content_type=content_type)
streamfile.file.write(text)
streamfile.file.write(more_text)
streamfile.file.close()
streamfile.save()
streamfile.validate()
result = StreamFile.objects.first()
self.assertTrue(streamfile == result)
self.assertEquals(result.file.read(), text + more_text)
self.assertEquals(result.file.content_type, content_type)
result.file.delete()
# Ensure deleted file returns None
self.assertTrue(result.file.read() == None)
setfile = SetFile()
setfile.file = text
setfile.save()
setfile.validate()
result = SetFile.objects.first()
self.assertTrue(setfile == result)
self.assertEquals(result.file.read(), text)
# Try replacing file with new one
result.file.replace(more_text)
result.save()
result.validate()
result = SetFile.objects.first()
self.assertTrue(setfile == result)
self.assertEquals(result.file.read(), more_text)
result.file.delete()
PutFile.drop_collection()
StreamFile.drop_collection()
SetFile.drop_collection()
# Make sure FileField is optional and not required
class DemoFile(Document):
file = FileField()
d = DemoFile.objects.create()
def test_file_uniqueness(self):
"""Ensure that each instance of a FileField is unique
"""
class TestFile(Document):
name = StringField()
file = FileField()
# First instance
testfile = TestFile()
testfile.name = "Hello, World!"
testfile.file.put('Hello, World!')
testfile.save()
# Second instance
testfiledupe = TestFile()
data = testfiledupe.file.read() # Should be None
self.assertTrue(testfile.name != testfiledupe.name)
self.assertTrue(testfile.file.read() != data)
TestFile.drop_collection()
def test_geo_indexes(self):
"""Ensure that indexes are created automatically for GeoPointFields.
"""
class Event(Document):
title = StringField()
location = GeoPointField()
Event.drop_collection()
event = Event(title="Coltrane Motion @ Double Door",
location=[41.909889, -87.677137])
event.save()
info = Event.objects._collection.index_information()
self.assertTrue(u'location_2d' in info)
self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')])
Event.drop_collection()
def test_ensure_unique_default_instances(self):
"""Ensure that every field has it's own unique default instance."""
class D(Document):
data = DictField()
data2 = DictField(default=lambda: {})
d1 = D()
d1.data['foo'] = 'bar'
d1.data2['foo'] = 'bar'
d2 = D()
self.assertEqual(d2.data, {})
self.assertEqual(d2.data2, {})
if __name__ == '__main__':
unittest.main()

View File

@@ -1,3 +0,0 @@
from fields import *
from file_tests import *
from geo import *

File diff suppressed because it is too large Load Diff

View File

@@ -1,582 +0,0 @@
# -*- coding: utf-8 -*-
import copy
import os
import unittest
import tempfile
import gridfs
import six
from nose.plugins.skip import SkipTest
from mongoengine import *
from mongoengine.connection import get_db
from mongoengine.python_support import StringIO
try:
from PIL import Image
HAS_PIL = True
except ImportError:
HAS_PIL = False
from tests.utils import MongoDBTestCase
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png')
class FileTest(MongoDBTestCase):
def tearDown(self):
self.db.drop_collection('fs.files')
self.db.drop_collection('fs.chunks')
def test_file_field_optional(self):
# Make sure FileField is optional and not required
class DemoFile(Document):
the_file = FileField()
DemoFile.objects.create()
def test_file_fields(self):
"""Ensure that file fields can be written to and their data retrieved
"""
class PutFile(Document):
the_file = FileField()
PutFile.drop_collection()
text = six.b('Hello, World!')
content_type = 'text/plain'
putfile = PutFile()
putfile.the_file.put(text, content_type=content_type, filename="hello")
putfile.save()
result = PutFile.objects.first()
self.assertTrue(putfile == result)
self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello>")
self.assertEqual(result.the_file.read(), text)
self.assertEqual(result.the_file.content_type, content_type)
result.the_file.delete() # Remove file from GridFS
PutFile.objects.delete()
# Ensure file-like objects are stored
PutFile.drop_collection()
putfile = PutFile()
putstring = StringIO()
putstring.write(text)
putstring.seek(0)
putfile.the_file.put(putstring, content_type=content_type)
putfile.save()
result = PutFile.objects.first()
self.assertTrue(putfile == result)
self.assertEqual(result.the_file.read(), text)
self.assertEqual(result.the_file.content_type, content_type)
result.the_file.delete()
def test_file_fields_stream(self):
"""Ensure that file fields can be written to and their data retrieved
"""
class StreamFile(Document):
the_file = FileField()
StreamFile.drop_collection()
text = six.b('Hello, World!')
more_text = six.b('Foo Bar')
content_type = 'text/plain'
streamfile = StreamFile()
streamfile.the_file.new_file(content_type=content_type)
streamfile.the_file.write(text)
streamfile.the_file.write(more_text)
streamfile.the_file.close()
streamfile.save()
result = StreamFile.objects.first()
self.assertTrue(streamfile == result)
self.assertEqual(result.the_file.read(), text + more_text)
self.assertEqual(result.the_file.content_type, content_type)
result.the_file.seek(0)
self.assertEqual(result.the_file.tell(), 0)
self.assertEqual(result.the_file.read(len(text)), text)
self.assertEqual(result.the_file.tell(), len(text))
self.assertEqual(result.the_file.read(len(more_text)), more_text)
self.assertEqual(result.the_file.tell(), len(text + more_text))
result.the_file.delete()
# Ensure deleted file returns None
self.assertTrue(result.the_file.read() is None)
def test_file_fields_stream_after_none(self):
"""Ensure that a file field can be written to after it has been saved as
None
"""
class StreamFile(Document):
the_file = FileField()
StreamFile.drop_collection()
text = six.b('Hello, World!')
more_text = six.b('Foo Bar')
content_type = 'text/plain'
streamfile = StreamFile()
streamfile.save()
streamfile.the_file.new_file()
streamfile.the_file.write(text)
streamfile.the_file.write(more_text)
streamfile.the_file.close()
streamfile.save()
result = StreamFile.objects.first()
self.assertTrue(streamfile == result)
self.assertEqual(result.the_file.read(), text + more_text)
# self.assertEqual(result.the_file.content_type, content_type)
result.the_file.seek(0)
self.assertEqual(result.the_file.tell(), 0)
self.assertEqual(result.the_file.read(len(text)), text)
self.assertEqual(result.the_file.tell(), len(text))
self.assertEqual(result.the_file.read(len(more_text)), more_text)
self.assertEqual(result.the_file.tell(), len(text + more_text))
result.the_file.delete()
# Ensure deleted file returns None
self.assertTrue(result.the_file.read() is None)
def test_file_fields_set(self):
class SetFile(Document):
the_file = FileField()
text = six.b('Hello, World!')
more_text = six.b('Foo Bar')
SetFile.drop_collection()
setfile = SetFile()
setfile.the_file = text
setfile.save()
result = SetFile.objects.first()
self.assertTrue(setfile == result)
self.assertEqual(result.the_file.read(), text)
# Try replacing file with new one
result.the_file.replace(more_text)
result.save()
result = SetFile.objects.first()
self.assertTrue(setfile == result)
self.assertEqual(result.the_file.read(), more_text)
result.the_file.delete()
def test_file_field_no_default(self):
class GridDocument(Document):
the_file = FileField()
GridDocument.drop_collection()
with tempfile.TemporaryFile() as f:
f.write(six.b("Hello World!"))
f.flush()
# Test without default
doc_a = GridDocument()
doc_a.save()
doc_b = GridDocument.objects.with_id(doc_a.id)
doc_b.the_file.replace(f, filename='doc_b')
doc_b.save()
self.assertNotEqual(doc_b.the_file.grid_id, None)
# Test it matches
doc_c = GridDocument.objects.with_id(doc_b.id)
self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
# Test with default
doc_d = GridDocument(the_file=six.b(''))
doc_d.save()
doc_e = GridDocument.objects.with_id(doc_d.id)
self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id)
doc_e.the_file.replace(f, filename='doc_e')
doc_e.save()
doc_f = GridDocument.objects.with_id(doc_e.id)
self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id)
db = GridDocument._get_db()
grid_fs = gridfs.GridFS(db)
self.assertEqual(['doc_b', 'doc_e'], grid_fs.list())
def test_file_uniqueness(self):
"""Ensure that each instance of a FileField is unique
"""
class TestFile(Document):
name = StringField()
the_file = FileField()
# First instance
test_file = TestFile()
test_file.name = "Hello, World!"
test_file.the_file.put(six.b('Hello, World!'))
test_file.save()
# Second instance
test_file_dupe = TestFile()
data = test_file_dupe.the_file.read() # Should be None
self.assertTrue(test_file.name != test_file_dupe.name)
self.assertTrue(test_file.the_file.read() != data)
TestFile.drop_collection()
def test_file_saving(self):
"""Ensure you can add meta data to file"""
class Animal(Document):
genus = StringField()
family = StringField()
photo = FileField()
Animal.drop_collection()
marmot = Animal(genus='Marmota', family='Sciuridae')
marmot_photo = open(TEST_IMAGE_PATH, 'rb') # Retrieve a photo from disk
marmot.photo.put(marmot_photo, content_type='image/jpeg', foo='bar')
marmot.photo.close()
marmot.save()
marmot = Animal.objects.get()
self.assertEqual(marmot.photo.content_type, 'image/jpeg')
self.assertEqual(marmot.photo.foo, 'bar')
def test_file_reassigning(self):
class TestFile(Document):
the_file = FileField()
TestFile.drop_collection()
test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save()
self.assertEqual(test_file.the_file.get().length, 8313)
test_file = TestFile.objects.first()
test_file.the_file = open(TEST_IMAGE2_PATH, 'rb')
test_file.save()
self.assertEqual(test_file.the_file.get().length, 4971)
def test_file_boolean(self):
"""Ensure that a boolean test of a FileField indicates its presence
"""
class TestFile(Document):
the_file = FileField()
TestFile.drop_collection()
test_file = TestFile()
self.assertFalse(bool(test_file.the_file))
test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain')
test_file.save()
self.assertTrue(bool(test_file.the_file))
test_file = TestFile.objects.first()
self.assertEqual(test_file.the_file.content_type, "text/plain")
def test_file_cmp(self):
"""Test comparing against other types"""
class TestFile(Document):
the_file = FileField()
test_file = TestFile()
self.assertFalse(test_file.the_file in [{"test": 1}])
def test_file_disk_space(self):
""" Test disk space usage when we delete/replace a file """
class TestFile(Document):
the_file = FileField()
text = six.b('Hello, World!')
content_type = 'text/plain'
testfile = TestFile()
testfile.the_file.put(text, content_type=content_type, filename="hello")
testfile.save()
# Now check fs.files and fs.chunks
db = TestFile._get_db()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 1)
self.assertEquals(len(list(chunks)), 1)
# Deleting the docoument should delete the files
testfile.delete()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 0)
self.assertEquals(len(list(chunks)), 0)
# Test case where we don't store a file in the first place
testfile = TestFile()
testfile.save()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 0)
self.assertEquals(len(list(chunks)), 0)
testfile.delete()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 0)
self.assertEquals(len(list(chunks)), 0)
# Test case where we overwrite the file
testfile = TestFile()
testfile.the_file.put(text, content_type=content_type, filename="hello")
testfile.save()
text = six.b('Bonjour, World!')
testfile.the_file.replace(text, content_type=content_type, filename="hello")
testfile.save()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 1)
self.assertEquals(len(list(chunks)), 1)
testfile.delete()
files = db.fs.files.find()
chunks = db.fs.chunks.find()
self.assertEquals(len(list(files)), 0)
self.assertEquals(len(list(chunks)), 0)
def test_image_field(self):
if not HAS_PIL:
raise SkipTest('PIL not installed')
class TestImage(Document):
image = ImageField()
TestImage.drop_collection()
with tempfile.TemporaryFile() as f:
f.write(six.b("Hello World!"))
f.flush()
t = TestImage()
try:
t.image.put(f)
self.fail("Should have raised an invalidation error")
except ValidationError as e:
self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f)
t = TestImage()
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
t.save()
t = TestImage.objects.first()
self.assertEqual(t.image.format, 'PNG')
w, h = t.image.size
self.assertEqual(w, 371)
self.assertEqual(h, 76)
t.image.delete()
def test_image_field_reassigning(self):
if not HAS_PIL:
raise SkipTest('PIL not installed')
class TestFile(Document):
the_file = ImageField()
TestFile.drop_collection()
test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save()
self.assertEqual(test_file.the_file.size, (371, 76))
test_file = TestFile.objects.first()
test_file.the_file = open(TEST_IMAGE2_PATH, 'rb')
test_file.save()
self.assertEqual(test_file.the_file.size, (45, 101))
def test_image_field_resize(self):
if not HAS_PIL:
raise SkipTest('PIL not installed')
class TestImage(Document):
image = ImageField(size=(185, 37))
TestImage.drop_collection()
t = TestImage()
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
t.save()
t = TestImage.objects.first()
self.assertEqual(t.image.format, 'PNG')
w, h = t.image.size
self.assertEqual(w, 185)
self.assertEqual(h, 37)
t.image.delete()
def test_image_field_resize_force(self):
if not HAS_PIL:
raise SkipTest('PIL not installed')
class TestImage(Document):
image = ImageField(size=(185, 37, True))
TestImage.drop_collection()
t = TestImage()
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
t.save()
t = TestImage.objects.first()
self.assertEqual(t.image.format, 'PNG')
w, h = t.image.size
self.assertEqual(w, 185)
self.assertEqual(h, 37)
t.image.delete()
def test_image_field_thumbnail(self):
if not HAS_PIL:
raise SkipTest('PIL not installed')
class TestImage(Document):
image = ImageField(thumbnail_size=(92, 18))
TestImage.drop_collection()
t = TestImage()
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
t.save()
t = TestImage.objects.first()
self.assertEqual(t.image.thumbnail.format, 'PNG')
self.assertEqual(t.image.thumbnail.width, 92)
self.assertEqual(t.image.thumbnail.height, 18)
t.image.delete()
def test_file_multidb(self):
register_connection('test_files', 'test_files')
class TestFile(Document):
name = StringField()
the_file = FileField(db_alias="test_files",
collection_name="macumba")
TestFile.drop_collection()
# delete old filesystem
get_db("test_files").macumba.files.drop()
get_db("test_files").macumba.chunks.drop()
# First instance
test_file = TestFile()
test_file.name = "Hello, World!"
test_file.the_file.put(six.b('Hello, World!'),
name="hello.txt")
test_file.save()
data = get_db("test_files").macumba.files.find_one()
self.assertEqual(data.get('name'), 'hello.txt')
test_file = TestFile.objects.first()
self.assertEqual(test_file.the_file.read(), six.b('Hello, World!'))
test_file = TestFile.objects.first()
test_file.the_file = six.b('HELLO, WORLD!')
test_file.save()
test_file = TestFile.objects.first()
self.assertEqual(test_file.the_file.read(),
six.b('HELLO, WORLD!'))
def test_copyable(self):
class PutFile(Document):
the_file = FileField()
PutFile.drop_collection()
text = six.b('Hello, World!')
content_type = 'text/plain'
putfile = PutFile()
putfile.the_file.put(text, content_type=content_type)
putfile.save()
class TestFile(Document):
name = StringField()
self.assertEqual(putfile, copy.copy(putfile))
self.assertEqual(putfile, copy.deepcopy(putfile))
def test_get_image_by_grid_id(self):
if not HAS_PIL:
raise SkipTest('PIL not installed')
class TestImage(Document):
image1 = ImageField()
image2 = ImageField()
TestImage.drop_collection()
t = TestImage()
t.image1.put(open(TEST_IMAGE_PATH, 'rb'))
t.image2.put(open(TEST_IMAGE2_PATH, 'rb'))
t.save()
test = TestImage.objects.first()
grid_id = test.image1.grid_id
self.assertEqual(1, TestImage.objects(Q(image1=grid_id)
or Q(image2=grid_id)).count())
def test_complex_field_filefield(self):
"""Ensure you can add meta data to file"""
class Animal(Document):
genus = StringField()
family = StringField()
photos = ListField(FileField())
Animal.drop_collection()
marmot = Animal(genus='Marmota', family='Sciuridae')
marmot_photo = open(TEST_IMAGE_PATH, 'rb') # Retrieve a photo from disk
photos_field = marmot._fields['photos'].field
new_proxy = photos_field.get_proxy_obj('photos', marmot)
new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar')
marmot_photo.close()
marmot.photos.append(new_proxy)
marmot.save()
marmot = Animal.objects.get()
self.assertEqual(marmot.photos[0].content_type, 'image/jpeg')
self.assertEqual(marmot.photos[0].foo, 'bar')
self.assertEqual(marmot.photos[0].get().length, 8313)
if __name__ == '__main__':
unittest.main()

View File

@@ -1,387 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
from mongoengine import *
from mongoengine.connection import get_db
__all__ = ("GeoFieldTest", )
class GeoFieldTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
def _test_for_expected_error(self, Cls, loc, expected):
try:
Cls(loc=loc).validate()
self.fail('Should not validate the location {0}'.format(loc))
except ValidationError as e:
self.assertEqual(expected, e.to_dict()['loc'])
def test_geopoint_validation(self):
class Location(Document):
loc = GeoPointField()
invalid_coords = [{"x": 1, "y": 2}, 5, "a"]
expected = 'GeoPointField can only accept tuples or lists of (x, y)'
for coord in invalid_coords:
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[], [1], [1, 2, 3]]
for coord in invalid_coords:
expected = "Value (%s) must be a two-dimensional point" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[{}, {}], ("a", "b")]
for coord in invalid_coords:
expected = "Both values (%s) in point must be float or int" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
def test_point_validation(self):
class Location(Document):
loc = PointField()
invalid_coords = {"x": 1, "y": 2}
expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": []}
expected = 'PointField type must be "Point"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "Point", "coordinates": [1, 2, 3]}
expected = "Value ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [5, "a"]
expected = "PointField can only accept lists of [x, y]"
for coord in invalid_coords:
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[], [1], [1, 2, 3]]
for coord in invalid_coords:
expected = "Value (%s) must be a two-dimensional point" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[{}, {}], ("a", "b")]
for coord in invalid_coords:
expected = "Both values (%s) in point must be float or int" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
Location(loc=[1, 2]).validate()
Location(loc={
"type": "Point",
"coordinates": [
81.4471435546875,
23.61432859499169
]}).validate()
def test_linestring_validation(self):
class Location(Document):
loc = LineStringField()
invalid_coords = {"x": 1, "y": 2}
expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'LineStringField type must be "LineString"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]}
expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [5, "a"]
expected = "Invalid LineString must contain at least one valid point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[1]]
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[1, 2, 3]]
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[{}, {}]], [("a", "b")]]
for coord in invalid_coords:
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
self._test_for_expected_error(Location, coord, expected)
Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate()
def test_polygon_validation(self):
class Location(Document):
loc = PolygonField()
invalid_coords = {"x": 1, "y": 2}
expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'PolygonField type must be "Polygon"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "Polygon", "coordinates": [[[1, 2, 3]]]}
expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[5, "a"]]]
expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[]]]
expected = "Invalid Polygon must contain at least one valid linestring"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1, 2, 3]]]
expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[{}, {}]], [("a", "b")]]
expected = "Invalid Polygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1, 2], [3, 4]]]
expected = "Invalid Polygon:\nLineStrings must start and end at the same point"
self._test_for_expected_error(Location, invalid_coords, expected)
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
def test_multipoint_validation(self):
class Location(Document):
loc = MultiPointField()
invalid_coords = {"x": 1, "y": 2}
expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'MultiPointField type must be "MultiPoint"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MultiPoint", "coordinates": [[1, 2, 3]]}
expected = "Value ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[]]
expected = "Invalid MultiPoint must contain at least one valid point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1]], [[1, 2, 3]]]
for coord in invalid_coords:
expected = "Value (%s) must be a two-dimensional point" % repr(coord[0])
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[[{}, {}]], [("a", "b")]]
for coord in invalid_coords:
expected = "Both values (%s) in point must be float or int" % repr(coord[0])
self._test_for_expected_error(Location, coord, expected)
Location(loc=[[1, 2]]).validate()
Location(loc={
"type": "MultiPoint",
"coordinates": [
[1, 2],
[81.4471435546875, 23.61432859499169]
]}).validate()
def test_multilinestring_validation(self):
class Location(Document):
loc = MultiLineStringField()
invalid_coords = {"x": 1, "y": 2}
expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'MultiLineStringField type must be "MultiLineString"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MultiLineString", "coordinates": [[[1, 2, 3]]]}
expected = "Invalid MultiLineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [5, "a"]
expected = "Invalid MultiLineString must contain at least one valid linestring"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1]]]
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1, 2, 3]]]
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
for coord in invalid_coords:
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
self._test_for_expected_error(Location, coord, expected)
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
def test_multipolygon_validation(self):
class Location(Document):
loc = MultiPolygonField()
invalid_coords = {"x": 1, "y": 2}
expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'MultiPolygonField type must be "MultiPolygon"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]}
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[5, "a"]]]]
expected = "Invalid MultiPolygon:\nBoth values ([5, 'a']) in point must be float or int"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[]]]]
expected = "Invalid MultiPolygon must contain at least one valid Polygon"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[1, 2, 3]]]]
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
expected = "Invalid MultiPolygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[1, 2], [3, 4]]]]
expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point"
self._test_for_expected_error(Location, invalid_coords, expected)
Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate()
def test_indexes_geopoint(self):
"""Ensure that indexes are created automatically for GeoPointFields.
"""
class Event(Document):
title = StringField()
location = GeoPointField()
geo_indicies = Event._geo_indices()
self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}])
def test_geopoint_embedded_indexes(self):
"""Ensure that indexes are created automatically for GeoPointFields on
embedded documents.
"""
class Venue(EmbeddedDocument):
location = GeoPointField()
name = StringField()
class Event(Document):
title = StringField()
venue = EmbeddedDocumentField(Venue)
geo_indicies = Event._geo_indices()
self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}])
def test_indexes_2dsphere(self):
"""Ensure that indexes are created automatically for GeoPointFields.
"""
class Event(Document):
title = StringField()
point = PointField()
line = LineStringField()
polygon = PolygonField()
geo_indicies = Event._geo_indices()
self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies)
def test_indexes_2dsphere_embedded(self):
"""Ensure that indexes are created automatically for GeoPointFields.
"""
class Venue(EmbeddedDocument):
name = StringField()
point = PointField()
line = LineStringField()
polygon = PolygonField()
class Event(Document):
title = StringField()
venue = EmbeddedDocumentField(Venue)
geo_indicies = Event._geo_indices()
self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies)
def test_geo_indexes_recursion(self):
class Location(Document):
name = StringField()
location = GeoPointField()
class Parent(Document):
name = StringField()
location = ReferenceField(Location)
Location.drop_collection()
Parent.drop_collection()
Parent(name='Berlin').save()
info = Parent._get_collection().index_information()
self.assertFalse('location_2d' in info)
info = Location._get_collection().index_information()
self.assertTrue('location_2d' in info)
self.assertEqual(len(Parent._geo_indices()), 0)
self.assertEqual(len(Location._geo_indices()), 1)
def test_geo_indexes_auto_index(self):
# Test just listing the fields
class Log(Document):
location = PointField(auto_index=False)
datetime = DateTimeField()
meta = {
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
}
self.assertEqual([], Log._geo_indices())
Log.drop_collection()
Log.ensure_indexes()
info = Log._get_collection().index_information()
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
[('location', '2dsphere'), ('datetime', 1)])
# Test listing explicitly
class Log(Document):
location = PointField(auto_index=False)
datetime = DateTimeField()
meta = {
'indexes': [
{'fields': [("location", "2dsphere"), ("datetime", 1)]}
]
}
self.assertEqual([], Log._geo_indices())
Log.drop_collection()
Log.ensure_indexes()
info = Log._get_collection().index_information()
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
[('location', '2dsphere'), ('datetime', 1)])
if __name__ == '__main__':
unittest.main()

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.1 KiB

View File

@@ -1,60 +0,0 @@
import pickle
from datetime import datetime
from mongoengine import *
from mongoengine import signals
class PickleEmbedded(EmbeddedDocument):
date = DateTimeField(default=datetime.now)
class PickleTest(Document):
number = IntField()
string = StringField(choices=(('One', '1'), ('Two', '2')))
embedded = EmbeddedDocumentField(PickleEmbedded)
lists = ListField(StringField())
photo = FileField()
class NewDocumentPickleTest(Document):
number = IntField()
string = StringField(choices=(('One', '1'), ('Two', '2')))
embedded = EmbeddedDocumentField(PickleEmbedded)
lists = ListField(StringField())
photo = FileField()
new_field = StringField()
class PickleDynamicEmbedded(DynamicEmbeddedDocument):
date = DateTimeField(default=datetime.now)
class PickleDynamicTest(DynamicDocument):
number = IntField()
class PickleSignalsTest(Document):
number = IntField()
string = StringField(choices=(('One', '1'), ('Two', '2')))
embedded = EmbeddedDocumentField(PickleEmbedded)
lists = ListField(StringField())
@classmethod
def post_save(self, sender, document, created, **kwargs):
pickled = pickle.dumps(document)
@classmethod
def post_delete(self, sender, document, **kwargs):
pickled = pickle.dumps(document)
signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest)
signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest)
class Mixin(object):
name = StringField()
class Base(Document):
meta = {'allow_inheritance': True}

1521
tests/queryset.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +0,0 @@
from transform import *
from field_list import *
from queryset import *
from visitor import *
from geo import *
from modify import *

View File

@@ -1,440 +0,0 @@
import unittest
from mongoengine import *
from mongoengine.queryset import QueryFieldList
__all__ = ("QueryFieldListTest", "OnlyExcludeAllTest")
class QueryFieldListTest(unittest.TestCase):
def test_empty(self):
q = QueryFieldList()
self.assertFalse(q)
q = QueryFieldList(always_include=['_cls'])
self.assertFalse(q)
def test_include_include(self):
q = QueryFieldList()
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY, _only_called=True)
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1})
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1, 'c': 1})
def test_include_exclude(self):
q = QueryFieldList()
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY)
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1})
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
self.assertEqual(q.as_dict(), {'a': 1})
def test_exclude_exclude(self):
q = QueryFieldList()
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0})
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0, 'c': 0})
def test_exclude_include(self):
q = QueryFieldList()
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0})
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
self.assertEqual(q.as_dict(), {'c': 1})
def test_always_include(self):
q = QueryFieldList(always_include=['x', 'y'])
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1})
def test_reset(self):
q = QueryFieldList(always_include=['x', 'y'])
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1})
q.reset()
self.assertFalse(q)
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'b': 1, 'c': 1})
def test_using_a_slice(self):
q = QueryFieldList()
q += QueryFieldList(fields=['a'], value={"$slice": 5})
self.assertEqual(q.as_dict(), {'a': {"$slice": 5}})
class OnlyExcludeAllTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
class Person(Document):
name = StringField()
age = IntField()
meta = {'allow_inheritance': True}
Person.drop_collection()
self.Person = Person
def test_mixing_only_exclude(self):
class MyDoc(Document):
a = StringField()
b = StringField()
c = StringField()
d = StringField()
e = StringField()
f = StringField()
include = ['a', 'b', 'c', 'd', 'e']
exclude = ['d', 'e']
only = ['b', 'c']
qs = MyDoc.objects.fields(**{i: 1 for i in include})
self.assertEqual(qs._loaded_fields.as_dict(),
{'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1})
qs = qs.only(*only)
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
qs = qs.exclude(*exclude)
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
qs = MyDoc.objects.fields(**{i: 1 for i in include})
qs = qs.exclude(*exclude)
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
qs = qs.only(*only)
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
qs = MyDoc.objects.exclude(*exclude)
qs = qs.fields(**{i: 1 for i in include})
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
qs = qs.only(*only)
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
def test_slicing(self):
class MyDoc(Document):
a = ListField()
b = ListField()
c = ListField()
d = ListField()
e = ListField()
f = ListField()
include = ['a', 'b', 'c', 'd', 'e']
exclude = ['d', 'e']
only = ['b', 'c']
qs = MyDoc.objects.fields(**{i: 1 for i in include})
qs = qs.exclude(*exclude)
qs = qs.only(*only)
qs = qs.fields(slice__b=5)
self.assertEqual(qs._loaded_fields.as_dict(),
{'b': {'$slice': 5}, 'c': 1})
qs = qs.fields(slice__c=[5, 1])
self.assertEqual(qs._loaded_fields.as_dict(),
{'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}})
qs = qs.exclude('c')
self.assertEqual(qs._loaded_fields.as_dict(),
{'b': {'$slice': 5}})
def test_mix_slice_with_other_fields(self):
class MyDoc(Document):
a = ListField()
b = ListField()
c = ListField()
qs = MyDoc.objects.fields(a=1, b=0, slice__c=2)
self.assertEqual(qs._loaded_fields.as_dict(),
{'c': {'$slice': 2}, 'a': 1})
def test_only(self):
"""Ensure that QuerySet.only only returns the requested fields.
"""
person = self.Person(name='test', age=25)
person.save()
obj = self.Person.objects.only('name').get()
self.assertEqual(obj.name, person.name)
self.assertEqual(obj.age, None)
obj = self.Person.objects.only('age').get()
self.assertEqual(obj.name, None)
self.assertEqual(obj.age, person.age)
obj = self.Person.objects.only('name', 'age').get()
self.assertEqual(obj.name, person.name)
self.assertEqual(obj.age, person.age)
obj = self.Person.objects.only(*('id', 'name',)).get()
self.assertEqual(obj.name, person.name)
self.assertEqual(obj.age, None)
# Check polymorphism still works
class Employee(self.Person):
salary = IntField(db_field='wage')
employee = Employee(name='test employee', age=40, salary=30000)
employee.save()
obj = self.Person.objects(id=employee.id).only('age').get()
self.assertTrue(isinstance(obj, Employee))
# Check field names are looked up properly
obj = Employee.objects(id=employee.id).only('salary').get()
self.assertEqual(obj.salary, employee.salary)
self.assertEqual(obj.name, None)
def test_only_with_subfields(self):
class User(EmbeddedDocument):
name = StringField()
email = StringField()
class Comment(EmbeddedDocument):
title = StringField()
text = StringField()
class VariousData(EmbeddedDocument):
some = BooleanField()
class BlogPost(Document):
content = StringField()
author = EmbeddedDocumentField(User)
comments = ListField(EmbeddedDocumentField(Comment))
various = MapField(field=EmbeddedDocumentField(VariousData))
BlogPost.drop_collection()
post = BlogPost(content='Had a good coffee today...', various={'test_dynamic':{'some': True}})
post.author = User(name='Test User')
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
post.save()
obj = BlogPost.objects.only('author.name',).get()
self.assertEqual(obj.content, None)
self.assertEqual(obj.author.email, None)
self.assertEqual(obj.author.name, 'Test User')
self.assertEqual(obj.comments, [])
obj = BlogPost.objects.only('various.test_dynamic.some').get()
self.assertEqual(obj.various["test_dynamic"].some, True)
obj = BlogPost.objects.only('content', 'comments.title',).get()
self.assertEqual(obj.content, 'Had a good coffee today...')
self.assertEqual(obj.author, None)
self.assertEqual(obj.comments[0].title, 'I aggree')
self.assertEqual(obj.comments[1].title, 'Coffee')
self.assertEqual(obj.comments[0].text, None)
self.assertEqual(obj.comments[1].text, None)
obj = BlogPost.objects.only('comments',).get()
self.assertEqual(obj.content, None)
self.assertEqual(obj.author, None)
self.assertEqual(obj.comments[0].title, 'I aggree')
self.assertEqual(obj.comments[1].title, 'Coffee')
self.assertEqual(obj.comments[0].text, 'Great post!')
self.assertEqual(obj.comments[1].text, 'I hate coffee')
BlogPost.drop_collection()
def test_exclude(self):
class User(EmbeddedDocument):
name = StringField()
email = StringField()
class Comment(EmbeddedDocument):
title = StringField()
text = StringField()
class BlogPost(Document):
content = StringField()
author = EmbeddedDocumentField(User)
comments = ListField(EmbeddedDocumentField(Comment))
BlogPost.drop_collection()
post = BlogPost(content='Had a good coffee today...')
post.author = User(name='Test User')
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
post.save()
obj = BlogPost.objects.exclude('author', 'comments.text').get()
self.assertEqual(obj.author, None)
self.assertEqual(obj.content, 'Had a good coffee today...')
self.assertEqual(obj.comments[0].title, 'I aggree')
self.assertEqual(obj.comments[0].text, None)
BlogPost.drop_collection()
def test_exclude_only_combining(self):
class Attachment(EmbeddedDocument):
name = StringField()
content = StringField()
class Email(Document):
sender = StringField()
to = StringField()
subject = StringField()
body = StringField()
content_type = StringField()
attachments = ListField(EmbeddedDocumentField(Attachment))
Email.drop_collection()
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
email.attachments = [
Attachment(name='file1.doc', content='ABC'),
Attachment(name='file2.doc', content='XYZ'),
]
email.save()
obj = Email.objects.exclude('content_type').exclude('body').get()
self.assertEqual(obj.sender, 'me')
self.assertEqual(obj.to, 'you')
self.assertEqual(obj.subject, 'From Russia with Love')
self.assertEqual(obj.body, None)
self.assertEqual(obj.content_type, None)
obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get()
self.assertEqual(obj.sender, None)
self.assertEqual(obj.to, 'you')
self.assertEqual(obj.subject, None)
self.assertEqual(obj.body, None)
self.assertEqual(obj.content_type, None)
obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get()
self.assertEqual(obj.attachments[0].name, 'file1.doc')
self.assertEqual(obj.attachments[0].content, None)
self.assertEqual(obj.sender, None)
self.assertEqual(obj.to, 'you')
self.assertEqual(obj.subject, None)
self.assertEqual(obj.body, None)
self.assertEqual(obj.content_type, None)
Email.drop_collection()
def test_all_fields(self):
class Email(Document):
sender = StringField()
to = StringField()
subject = StringField()
body = StringField()
content_type = StringField()
Email.drop_collection()
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
email.save()
obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get()
self.assertEqual(obj.sender, 'me')
self.assertEqual(obj.to, 'you')
self.assertEqual(obj.subject, 'From Russia with Love')
self.assertEqual(obj.body, 'Hello!')
self.assertEqual(obj.content_type, 'text/plain')
Email.drop_collection()
def test_slicing_fields(self):
"""Ensure that query slicing an array works.
"""
class Numbers(Document):
n = ListField(IntField())
Numbers.drop_collection()
numbers = Numbers(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
numbers.save()
# first three
numbers = Numbers.objects.fields(slice__n=3).get()
self.assertEqual(numbers.n, [0, 1, 2])
# last three
numbers = Numbers.objects.fields(slice__n=-3).get()
self.assertEqual(numbers.n, [-3, -2, -1])
# skip 2, limit 3
numbers = Numbers.objects.fields(slice__n=[2, 3]).get()
self.assertEqual(numbers.n, [2, 3, 4])
# skip to fifth from last, limit 4
numbers = Numbers.objects.fields(slice__n=[-5, 4]).get()
self.assertEqual(numbers.n, [-5, -4, -3, -2])
# skip to fifth from last, limit 10
numbers = Numbers.objects.fields(slice__n=[-5, 10]).get()
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
# skip to fifth from last, limit 10 dict method
numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get()
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
def test_slicing_nested_fields(self):
"""Ensure that query slicing an embedded array works.
"""
class EmbeddedNumber(EmbeddedDocument):
n = ListField(IntField())
class Numbers(Document):
embedded = EmbeddedDocumentField(EmbeddedNumber)
Numbers.drop_collection()
numbers = Numbers()
numbers.embedded = EmbeddedNumber(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
numbers.save()
# first three
numbers = Numbers.objects.fields(slice__embedded__n=3).get()
self.assertEqual(numbers.embedded.n, [0, 1, 2])
# last three
numbers = Numbers.objects.fields(slice__embedded__n=-3).get()
self.assertEqual(numbers.embedded.n, [-3, -2, -1])
# skip 2, limit 3
numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get()
self.assertEqual(numbers.embedded.n, [2, 3, 4])
# skip to fifth from last, limit 4
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get()
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2])
# skip to fifth from last, limit 10
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get()
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
# skip to fifth from last, limit 10 dict method
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
def test_exclude_from_subclasses_docs(self):
class Base(Document):
username = StringField()
meta = {'allow_inheritance': True}
class Anon(Base):
anon = BooleanField()
class User(Base):
password = StringField()
wibble = StringField()
Base.drop_collection()
User(username="mongodb", password="secret").save()
user = Base.objects().exclude("password", "wibble").first()
self.assertEqual(user.password, None)
self.assertRaises(LookUpError, Base.objects.exclude, "made_up")
if __name__ == '__main__':
unittest.main()

View File

@@ -1,575 +0,0 @@
import datetime
import unittest
from mongoengine import *
from tests.utils import MongoDBTestCase, needs_mongodb_v3
__all__ = ("GeoQueriesTest",)
class GeoQueriesTest(MongoDBTestCase):
def _create_event_data(self, point_field_class=GeoPointField):
"""Create some sample data re-used in many of the tests below."""
class Event(Document):
title = StringField()
date = DateTimeField()
location = point_field_class()
def __unicode__(self):
return self.title
self.Event = Event
Event.drop_collection()
event1 = Event.objects.create(
title="Coltrane Motion @ Double Door",
date=datetime.datetime.now() - datetime.timedelta(days=1),
location=[-87.677137, 41.909889])
event2 = Event.objects.create(
title="Coltrane Motion @ Bottom of the Hill",
date=datetime.datetime.now() - datetime.timedelta(days=10),
location=[-122.4194155, 37.7749295])
event3 = Event.objects.create(
title="Coltrane Motion @ Empty Bottle",
date=datetime.datetime.now(),
location=[-87.686638, 41.900474])
return event1, event2, event3
def test_near(self):
"""Make sure the "near" operator works."""
event1, event2, event3 = self._create_event_data()
# find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too,
# although it sorts to last.
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2])
# ensure ordering is respected by "near"
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
events = events.order_by("-date")
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event3, event1, event2])
def test_near_and_max_distance(self):
"""Ensure the "max_distance" operator works alongside the "near"
operator.
"""
event1, event2, event3 = self._create_event_data()
# find events within 10 degrees of san francisco
point = [-122.415579, 37.7566023]
events = self.Event.objects(location__near=point,
location__max_distance=10)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# $minDistance was added in MongoDB v2.6, but continued being buggy
# until v3.0; skip for older versions
@needs_mongodb_v3
def test_near_and_min_distance(self):
"""Ensure the "min_distance" operator works alongside the "near"
operator.
"""
event1, event2, event3 = self._create_event_data()
# find events at least 10 degrees away of san francisco
point = [-122.415579, 37.7566023]
events = self.Event.objects(location__near=point,
location__min_distance=10)
self.assertEqual(events.count(), 2)
def test_within_distance(self):
"""Make sure the "within_distance" operator works."""
event1, event2, event3 = self._create_event_data()
# find events within 5 degrees of pitchfork office, chicago
point_and_distance = [[-87.67892, 41.9120459], 5]
events = self.Event.objects(
location__within_distance=point_and_distance)
self.assertEqual(events.count(), 2)
events = list(events)
self.assertTrue(event2 not in events)
self.assertTrue(event1 in events)
self.assertTrue(event3 in events)
# find events within 10 degrees of san francisco
point_and_distance = [[-122.415579, 37.7566023], 10]
events = self.Event.objects(
location__within_distance=point_and_distance)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# find events within 1 degree of greenpoint, broolyn, nyc, ny
point_and_distance = [[-73.9509714, 40.7237134], 1]
events = self.Event.objects(
location__within_distance=point_and_distance)
self.assertEqual(events.count(), 0)
# ensure ordering is respected by "within_distance"
point_and_distance = [[-87.67892, 41.9120459], 10]
events = self.Event.objects(
location__within_distance=point_and_distance)
events = events.order_by("-date")
self.assertEqual(events.count(), 2)
self.assertEqual(events[0], event3)
def test_within_box(self):
"""Ensure the "within_box" operator works."""
event1, event2, event3 = self._create_event_data()
# check that within_box works
box = [(-125.0, 35.0), (-100.0, 40.0)]
events = self.Event.objects(location__within_box=box)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event2.id)
def test_within_polygon(self):
"""Ensure the "within_polygon" operator works."""
event1, event2, event3 = self._create_event_data()
polygon = [
(-87.694445, 41.912114),
(-87.69084, 41.919395),
(-87.681742, 41.927186),
(-87.654276, 41.911731),
(-87.656164, 41.898061),
]
events = self.Event.objects(location__within_polygon=polygon)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event1.id)
polygon2 = [
(-1.742249, 54.033586),
(-1.225891, 52.792797),
(-4.40094, 53.389881)
]
events = self.Event.objects(location__within_polygon=polygon2)
self.assertEqual(events.count(), 0)
def test_2dsphere_near(self):
"""Make sure the "near" operator works with a PointField, which
corresponds to a 2dsphere index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
# find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too,
# although it sorts to last.
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2])
# ensure ordering is respected by "near"
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
events = events.order_by("-date")
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event3, event1, event2])
def test_2dsphere_near_and_max_distance(self):
"""Ensure the "max_distance" operator works alongside the "near"
operator with a 2dsphere index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
# find events within 10km of san francisco
point = [-122.415579, 37.7566023]
events = self.Event.objects(location__near=point,
location__max_distance=10000)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# find events within 1km of greenpoint, broolyn, nyc, ny
events = self.Event.objects(location__near=[-73.9509714, 40.7237134],
location__max_distance=1000)
self.assertEqual(events.count(), 0)
# ensure ordering is respected by "near"
events = self.Event.objects(
location__near=[-87.67892, 41.9120459],
location__max_distance=10000
).order_by("-date")
self.assertEqual(events.count(), 2)
self.assertEqual(events[0], event3)
def test_2dsphere_geo_within_box(self):
"""Ensure the "geo_within_box" operator works with a 2dsphere
index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
# check that within_box works
box = [(-125.0, 35.0), (-100.0, 40.0)]
events = self.Event.objects(location__geo_within_box=box)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event2.id)
def test_2dsphere_geo_within_polygon(self):
"""Ensure the "geo_within_polygon" operator works with a
2dsphere index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
polygon = [
(-87.694445, 41.912114),
(-87.69084, 41.919395),
(-87.681742, 41.927186),
(-87.654276, 41.911731),
(-87.656164, 41.898061),
]
events = self.Event.objects(location__geo_within_polygon=polygon)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event1.id)
polygon2 = [
(-1.742249, 54.033586),
(-1.225891, 52.792797),
(-4.40094, 53.389881)
]
events = self.Event.objects(location__geo_within_polygon=polygon2)
self.assertEqual(events.count(), 0)
# $minDistance was added in MongoDB v2.6, but continued being buggy
# until v3.0; skip for older versions
@needs_mongodb_v3
def test_2dsphere_near_and_min_max_distance(self):
"""Ensure "min_distace" and "max_distance" operators work well
together with the "near" operator in a 2dsphere index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
# ensure min_distance and max_distance combine well
events = self.Event.objects(
location__near=[-87.67892, 41.9120459],
location__min_distance=1000,
location__max_distance=10000
).order_by("-date")
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event3)
# ensure ordering is respected by "near" with "min_distance"
events = self.Event.objects(
location__near=[-87.67892, 41.9120459],
location__min_distance=10000
).order_by("-date")
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
def test_2dsphere_geo_within_center(self):
"""Make sure the "geo_within_center" operator works with a
2dsphere index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
# find events within 5 degrees of pitchfork office, chicago
point_and_distance = [[-87.67892, 41.9120459], 2]
events = self.Event.objects(
location__geo_within_center=point_and_distance)
self.assertEqual(events.count(), 2)
events = list(events)
self.assertTrue(event2 not in events)
self.assertTrue(event1 in events)
self.assertTrue(event3 in events)
def _test_embedded(self, point_field_class):
"""Helper test method ensuring given point field class works
well in an embedded document.
"""
class Venue(EmbeddedDocument):
location = point_field_class()
name = StringField()
class Event(Document):
title = StringField()
venue = EmbeddedDocumentField(Venue)
Event.drop_collection()
venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889])
venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295])
event1 = Event(title="Coltrane Motion @ Double Door",
venue=venue1).save()
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
venue=venue2).save()
event3 = Event(title="Coltrane Motion @ Empty Bottle",
venue=venue1).save()
# find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too,
# although it sorts to last.
events = Event.objects(venue__location__near=[-87.67892, 41.9120459])
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2])
def test_geo_spatial_embedded(self):
"""Make sure GeoPointField works properly in an embedded document."""
self._test_embedded(point_field_class=GeoPointField)
def test_2dsphere_point_embedded(self):
"""Make sure PointField works properly in an embedded document."""
self._test_embedded(point_field_class=PointField)
# Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039
@needs_mongodb_v3
def test_spherical_geospatial_operators(self):
"""Ensure that spherical geospatial queries are working."""
class Point(Document):
location = GeoPointField()
Point.drop_collection()
# These points are one degree apart, which (according to Google Maps)
# is about 110 km apart at this place on the Earth.
north_point = Point(location=[-122, 38]).save() # Near Concord, CA
south_point = Point(location=[-122, 37]).save() # Near Santa Cruz, CA
earth_radius = 6378.009 # in km (needs to be a float for dividing by)
# Finds both points because they are within 60 km of the reference
# point equidistant between them.
points = Point.objects(location__near_sphere=[-122, 37.5])
self.assertEqual(points.count(), 2)
# Same behavior for _within_spherical_distance
points = Point.objects(
location__within_spherical_distance=[
[-122, 37.5],
60 / earth_radius
]
)
self.assertEqual(points.count(), 2)
points = Point.objects(location__near_sphere=[-122, 37.5],
location__max_distance=60 / earth_radius)
self.assertEqual(points.count(), 2)
# Test query works with max_distance, being farer from one point
points = Point.objects(location__near_sphere=[-122, 37.8],
location__max_distance=60 / earth_radius)
close_point = points.first()
self.assertEqual(points.count(), 1)
# Test query works with min_distance, being farer from one point
points = Point.objects(location__near_sphere=[-122, 37.8],
location__min_distance=60 / earth_radius)
self.assertEqual(points.count(), 1)
far_point = points.first()
self.assertNotEqual(close_point, far_point)
# Finds both points, but orders the north point first because it's
# closer to the reference point to the north.
points = Point.objects(location__near_sphere=[-122, 38.5])
self.assertEqual(points.count(), 2)
self.assertEqual(points[0].id, north_point.id)
self.assertEqual(points[1].id, south_point.id)
# Finds both points, but orders the south point first because it's
# closer to the reference point to the south.
points = Point.objects(location__near_sphere=[-122, 36.5])
self.assertEqual(points.count(), 2)
self.assertEqual(points[0].id, south_point.id)
self.assertEqual(points[1].id, north_point.id)
# Finds only one point because only the first point is within 60km of
# the reference point to the south.
points = Point.objects(
location__within_spherical_distance=[
[-122, 36.5],
60 / earth_radius
]
)
self.assertEqual(points.count(), 1)
self.assertEqual(points[0].id, south_point.id)
def test_linestring(self):
class Road(Document):
name = StringField()
line = LineStringField()
Road.drop_collection()
Road(name="66", line=[[40, 5], [41, 6]]).save()
# near
point = {"type": "Point", "coordinates": [40, 5]}
roads = Road.objects.filter(line__near=point["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__near=point).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__near={"$geometry": point}).count()
self.assertEqual(1, roads)
# Within
polygon = {"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_within=polygon).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_within={"$geometry": polygon}).count()
self.assertEqual(1, roads)
# Intersects
line = {"type": "LineString",
"coordinates": [[40, 5], [40, 6]]}
roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_intersects=line).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count()
self.assertEqual(1, roads)
polygon = {"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_intersects=polygon).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}).count()
self.assertEqual(1, roads)
def test_polygon(self):
class Road(Document):
name = StringField()
poly = PolygonField()
Road.drop_collection()
Road(name="66", poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save()
# near
point = {"type": "Point", "coordinates": [40, 5]}
roads = Road.objects.filter(poly__near=point["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__near=point).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__near={"$geometry": point}).count()
self.assertEqual(1, roads)
# Within
polygon = {"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_within=polygon).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}).count()
self.assertEqual(1, roads)
# Intersects
line = {"type": "LineString",
"coordinates": [[40, 5], [41, 6]]}
roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_intersects=line).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count()
self.assertEqual(1, roads)
polygon = {"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_intersects=polygon).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
self.assertEqual(1, roads)
def test_aspymongo_with_only(self):
"""Ensure as_pymongo works with only"""
class Place(Document):
location = PointField()
Place.drop_collection()
p = Place(location=[24.946861267089844, 60.16311983618494])
p.save()
qs = Place.objects().only('location')
self.assertDictEqual(
qs.as_pymongo()[0]['location'],
{u'type': u'Point',
u'coordinates': [
24.946861267089844,
60.16311983618494]
}
)
def test_2dsphere_point_sets_correctly(self):
class Location(Document):
loc = PointField()
Location.drop_collection()
Location(loc=[1,2]).save()
loc = Location.objects.as_pymongo()[0]
self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]})
Location.objects.update(set__loc=[2,1])
loc = Location.objects.as_pymongo()[0]
self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]})
def test_2dsphere_linestring_sets_correctly(self):
class Location(Document):
line = LineStringField()
Location.drop_collection()
Location(line=[[1, 2], [2, 2]]).save()
loc = Location.objects.as_pymongo()[0]
self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]})
Location.objects.update(set__line=[[2, 1], [1, 2]])
loc = Location.objects.as_pymongo()[0]
self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]})
def test_geojson_PolygonField(self):
class Location(Document):
poly = PolygonField()
Location.drop_collection()
Location(poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save()
loc = Location.objects.as_pymongo()[0]
self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
Location.objects.update(set__poly=[[[40, 4], [40, 6], [41, 6], [40, 4]]])
loc = Location.objects.as_pymongo()[0]
self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]})
if __name__ == '__main__':
unittest.main()

View File

@@ -1,132 +0,0 @@
import unittest
from mongoengine import connect, Document, IntField, StringField, ListField
from tests.utils import needs_mongodb_v26
__all__ = ("FindAndModifyTest",)
class Doc(Document):
id = IntField(primary_key=True)
value = IntField()
class FindAndModifyTest(unittest.TestCase):
def setUp(self):
connect(db="mongoenginetest")
Doc.drop_collection()
def assertDbEqual(self, docs):
self.assertEqual(list(Doc._collection.find().sort("id")), docs)
def test_modify(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).modify(set__value=-1)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
def test_modify_with_new(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
new_doc = Doc.objects(id=1).modify(set__value=-1, new=True)
doc.value = -1
self.assertEqual(new_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
def test_modify_not_existing(self):
Doc(id=0, value=0).save()
self.assertEqual(Doc.objects(id=1).modify(set__value=-1), None)
self.assertDbEqual([{"_id": 0, "value": 0}])
def test_modify_with_upsert(self):
Doc(id=0, value=0).save()
old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True)
self.assertEqual(old_doc, None)
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
def test_modify_with_upsert_existing(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).modify(set__value=-1, upsert=True)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
def test_modify_with_upsert_with_new(self):
Doc(id=0, value=0).save()
new_doc = Doc.objects(id=1).modify(upsert=True, new=True, set__value=1)
self.assertEqual(new_doc.to_mongo(), {"_id": 1, "value": 1})
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
def test_modify_with_remove(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).modify(remove=True)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}])
def test_find_and_modify_with_remove_not_existing(self):
Doc(id=0, value=0).save()
self.assertEqual(Doc.objects(id=1).modify(remove=True), None)
self.assertDbEqual([{"_id": 0, "value": 0}])
def test_modify_with_order_by(self):
Doc(id=0, value=3).save()
Doc(id=1, value=2).save()
Doc(id=2, value=1).save()
doc = Doc(id=3, value=0).save()
old_doc = Doc.objects().order_by("-id").modify(set__value=-1)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([
{"_id": 0, "value": 3}, {"_id": 1, "value": 2},
{"_id": 2, "value": 1}, {"_id": 3, "value": -1}])
def test_modify_with_fields(self):
Doc(id=0, value=0).save()
Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).only("id").modify(set__value=-1)
self.assertEqual(old_doc.to_mongo(), {"_id": 1})
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
@needs_mongodb_v26
def test_modify_with_push(self):
class BlogPost(Document):
tags = ListField(StringField())
BlogPost.drop_collection()
blog = BlogPost.objects.create()
# Push a new tag via modify with new=False (default).
BlogPost(id=blog.id).modify(push__tags='code')
self.assertEqual(blog.tags, [])
blog.reload()
self.assertEqual(blog.tags, ['code'])
# Push a new tag via modify with new=True.
blog = BlogPost.objects(id=blog.id).modify(push__tags='java', new=True)
self.assertEqual(blog.tags, ['code', 'java'])
# Push a new tag with a positional argument.
blog = BlogPost.objects(id=blog.id).modify(
push__tags__0='python',
new=True)
self.assertEqual(blog.tags, ['python', 'code', 'java'])
# Push multiple new tags with a positional argument.
blog = BlogPost.objects(id=blog.id).modify(
push__tags__1=['go', 'rust'],
new=True)
self.assertEqual(blog.tags, ['python', 'go', 'rust', 'code', 'java'])
if __name__ == '__main__':
unittest.main()

View File

@@ -1,78 +0,0 @@
import pickle
import unittest
from pymongo.mongo_client import MongoClient
from mongoengine import Document, StringField, IntField
from mongoengine.connection import connect
__author__ = 'stas'
class Person(Document):
name = StringField()
age = IntField()
class TestQuerysetPickable(unittest.TestCase):
"""
Test for adding pickling support for QuerySet instances
See issue https://github.com/MongoEngine/mongoengine/issues/442
"""
def setUp(self):
super(TestQuerysetPickable, self).setUp()
connection = connect(db="test") #type: pymongo.mongo_client.MongoClient
connection.drop_database("test")
self.john = Person.objects.create(
name="John",
age=21
)
def test_picke_simple_qs(self):
qs = Person.objects.all()
pickle.dumps(qs)
def _get_loaded(self, qs):
s = pickle.dumps(qs)
return pickle.loads(s)
def test_unpickle(self):
qs = Person.objects.all()
loadedQs = self._get_loaded(qs)
self.assertEqual(qs.count(), loadedQs.count())
#can update loadedQs
loadedQs.update(age=23)
#check
self.assertEqual(Person.objects.first().age, 23)
def test_pickle_support_filtration(self):
Person.objects.create(
name="Alice",
age=22
)
Person.objects.create(
name="Bob",
age=23
)
qs = Person.objects.filter(age__gte=22)
self.assertEqual(qs.count(), 2)
loaded = self._get_loaded(qs)
self.assertEqual(loaded.count(), 2)
self.assertEqual(loaded.filter(name="Bob").first().age, 23)

File diff suppressed because it is too large Load Diff

View File

@@ -1,246 +0,0 @@
import unittest
from mongoengine import *
from mongoengine.queryset import Q, transform
__all__ = ("TransformTest",)
class TransformTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
def test_transform_query(self):
"""Ensure that the _transform_query function operates correctly.
"""
self.assertEqual(transform.query(name='test', age=30),
{'name': 'test', 'age': 30})
self.assertEqual(transform.query(age__lt=30),
{'age': {'$lt': 30}})
self.assertEqual(transform.query(age__gt=20, age__lt=50),
{'age': {'$gt': 20, '$lt': 50}})
self.assertEqual(transform.query(age=20, age__gt=50),
{'$and': [{'age': {'$gt': 50}}, {'age': 20}]})
self.assertEqual(transform.query(friend__age__gte=30),
{'friend.age': {'$gte': 30}})
self.assertEqual(transform.query(name__exists=True),
{'name': {'$exists': True}})
def test_transform_update(self):
class DicDoc(Document):
dictField = DictField()
class Doc(Document):
pass
DicDoc.drop_collection()
Doc.drop_collection()
DicDoc().save()
doc = Doc().save()
for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")):
update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc})
self.assertTrue(isinstance(update[v]["dictField.test"], dict))
# Update special cases
update = transform.update(DicDoc, unset__dictField__test=doc)
self.assertEqual(update["$unset"]["dictField.test"], 1)
update = transform.update(DicDoc, pull__dictField__test=doc)
self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict))
def test_query_field_name(self):
"""Ensure that the correct field name is used when querying.
"""
class Comment(EmbeddedDocument):
content = StringField(db_field='commentContent')
class BlogPost(Document):
title = StringField(db_field='postTitle')
comments = ListField(EmbeddedDocumentField(Comment),
db_field='postComments')
BlogPost.drop_collection()
data = {'title': 'Post 1', 'comments': [Comment(content='test')]}
post = BlogPost(**data)
post.save()
self.assertTrue('postTitle' in
BlogPost.objects(title=data['title'])._query)
self.assertFalse('title' in
BlogPost.objects(title=data['title'])._query)
self.assertEqual(BlogPost.objects(title=data['title']).count(), 1)
self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query)
self.assertEqual(BlogPost.objects(pk=post.id).count(), 1)
self.assertTrue('postComments.commentContent' in
BlogPost.objects(comments__content='test')._query)
self.assertEqual(BlogPost.objects(comments__content='test').count(), 1)
BlogPost.drop_collection()
def test_query_pk_field_name(self):
"""Ensure that the correct "primary key" field name is used when
querying
"""
class BlogPost(Document):
title = StringField(primary_key=True, db_field='postTitle')
BlogPost.drop_collection()
data = {'title': 'Post 1'}
post = BlogPost(**data)
post.save()
self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query)
self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query)
self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1)
BlogPost.drop_collection()
def test_chaining(self):
class A(Document):
pass
class B(Document):
a = ReferenceField(A)
A.drop_collection()
B.drop_collection()
a1 = A().save()
a2 = A().save()
B(a=a1).save()
# Works
q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query
# Doesn't work
q2 = B.objects.filter(a__in=[a1, a2])
q2 = q2.filter(a=a1)._query
self.assertEqual(q1, q2)
def test_raw_query_and_Q_objects(self):
"""
Test raw plays nicely
"""
class Foo(Document):
name = StringField()
a = StringField()
b = StringField()
c = StringField()
meta = {
'allow_inheritance': False
}
query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query
self.assertEqual(query, {'$nor': [{'name': 'bar'}]})
q1 = {'$or': [{'a': 1}, {'b': 1}]}
query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query
self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1})
def test_raw_and_merging(self):
class Doc(Document):
meta = {'allow_inheritance': False}
raw_query = Doc.objects(__raw__={
'deleted': False,
'scraped': 'yes',
'$nor': [
{'views.extracted': 'no'},
{'attachments.views.extracted': 'no'}
]
})._query
self.assertEqual(raw_query, {
'deleted': False,
'scraped': 'yes',
'$nor': [
{'views.extracted': 'no'},
{'attachments.views.extracted': 'no'}
]
})
def test_geojson_PointField(self):
class Location(Document):
loc = PointField()
update = transform.update(Location, set__loc=[1, 2])
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}})
update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1, 2]})
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}})
def test_geojson_LineStringField(self):
class Location(Document):
line = LineStringField()
update = transform.update(Location, set__line=[[1, 2], [2, 2]])
self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}})
update = transform.update(Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]})
self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}})
def test_geojson_PolygonField(self):
class Location(Document):
poly = PolygonField()
update = transform.update(Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}})
update = transform.update(Location, set__poly={"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}})
def test_type(self):
class Doc(Document):
df = DynamicField()
Doc(df=True).save()
Doc(df=7).save()
Doc(df="df").save()
self.assertEqual(Doc.objects(df__type=1).count(), 0) # double
self.assertEqual(Doc.objects(df__type=8).count(), 1) # bool
self.assertEqual(Doc.objects(df__type=2).count(), 1) # str
self.assertEqual(Doc.objects(df__type=16).count(), 1) # int
def test_last_field_name_like_operator(self):
class EmbeddedItem(EmbeddedDocument):
type = StringField()
name = StringField()
class Doc(Document):
item = EmbeddedDocumentField(EmbeddedItem)
Doc.drop_collection()
doc = Doc(item=EmbeddedItem(type="axe", name="Heroic axe"))
doc.save()
self.assertEqual(1, Doc.objects(item__type__="axe").count())
self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count())
Doc.objects(id=doc.id).update(set__item__type__='sword')
self.assertEqual(1, Doc.objects(item__type__="sword").count())
self.assertEqual(0, Doc.objects(item__type__="axe").count())
def test_understandable_error_raised(self):
class Event(Document):
title = StringField()
location = GeoPointField()
box = [(35.0, -125.0), (40.0, -100.0)]
# I *meant* to execute location__within_box=box
events = Event.objects(location__within=box)
with self.assertRaises(InvalidQueryError):
events.count()
if __name__ == '__main__':
unittest.main()

View File

@@ -1,347 +0,0 @@
import datetime
import re
import unittest
from bson import ObjectId
from mongoengine import *
from mongoengine.errors import InvalidQueryError
from mongoengine.queryset import Q
__all__ = ("QTest",)
class QTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
class Person(Document):
name = StringField()
age = IntField()
meta = {'allow_inheritance': True}
Person.drop_collection()
self.Person = Person
def test_empty_q(self):
"""Ensure that empty Q objects won't hurt.
"""
q1 = Q()
q2 = Q(age__gte=18)
q3 = Q()
q4 = Q(name='test')
q5 = Q()
class Person(Document):
name = StringField()
age = IntField()
query = {'$or': [{'age': {'$gte': 18}}, {'name': 'test'}]}
self.assertEqual((q1 | q2 | q3 | q4 | q5).to_query(Person), query)
query = {'age': {'$gte': 18}, 'name': 'test'}
self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query)
def test_q_with_dbref(self):
"""Ensure Q objects handle DBRefs correctly"""
connect(db='mongoenginetest')
class User(Document):
pass
class Post(Document):
created_user = ReferenceField(User)
user = User.objects.create()
Post.objects.create(created_user=user)
self.assertEqual(Post.objects.filter(created_user=user).count(), 1)
self.assertEqual(Post.objects.filter(Q(created_user=user)).count(), 1)
def test_and_combination(self):
"""Ensure that Q-objects correctly AND together.
"""
class TestDoc(Document):
x = IntField()
y = StringField()
query = (Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc)
self.assertEqual(query, {'$and': [{'x': {'$lt': 7}}, {'x': {'$lt': 3}}]})
query = (Q(y="a") & Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc)
self.assertEqual(query, {'$and': [{'y': "a"}, {'x': {'$lt': 7}}, {'x': {'$lt': 3}}]})
# Check normal cases work without an error
query = Q(x__lt=7) & Q(x__gt=3)
q1 = Q(x__lt=7)
q2 = Q(x__gt=3)
query = (q1 & q2).to_query(TestDoc)
self.assertEqual(query, {'x': {'$lt': 7, '$gt': 3}})
# More complex nested example
query = Q(x__lt=100) & Q(y__ne='NotMyString')
query &= Q(y__in=['a', 'b', 'c']) & Q(x__gt=-100)
mongo_query = {
'x': {'$lt': 100, '$gt': -100},
'y': {'$ne': 'NotMyString', '$in': ['a', 'b', 'c']},
}
self.assertEqual(query.to_query(TestDoc), mongo_query)
def test_or_combination(self):
"""Ensure that Q-objects correctly OR together.
"""
class TestDoc(Document):
x = IntField()
q1 = Q(x__lt=3)
q2 = Q(x__gt=7)
query = (q1 | q2).to_query(TestDoc)
self.assertEqual(query, {
'$or': [
{'x': {'$lt': 3}},
{'x': {'$gt': 7}},
]
})
def test_and_or_combination(self):
"""Ensure that Q-objects handle ANDing ORed components.
"""
class TestDoc(Document):
x = IntField()
y = BooleanField()
TestDoc.drop_collection()
query = (Q(x__gt=0) | Q(x__exists=False))
query &= Q(x__lt=100)
self.assertEqual(query.to_query(TestDoc), {'$and': [
{'$or': [{'x': {'$gt': 0}},
{'x': {'$exists': False}}]},
{'x': {'$lt': 100}}]
})
q1 = (Q(x__gt=0) | Q(x__exists=False))
q2 = (Q(x__lt=100) | Q(y=True))
query = (q1 & q2).to_query(TestDoc)
TestDoc(x=101).save()
TestDoc(x=10).save()
TestDoc(y=True).save()
self.assertEqual(query, {
'$and': [
{'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]},
{'$or': [{'x': {'$lt': 100}}, {'y': True}]}
]
})
self.assertEqual(2, TestDoc.objects(q1 & q2).count())
def test_or_and_or_combination(self):
"""Ensure that Q-objects handle ORing ANDed ORed components. :)
"""
class TestDoc(Document):
x = IntField()
y = BooleanField()
TestDoc.drop_collection()
TestDoc(x=-1, y=True).save()
TestDoc(x=101, y=True).save()
TestDoc(x=99, y=False).save()
TestDoc(x=101, y=False).save()
q1 = (Q(x__gt=0) & (Q(y=True) | Q(y__exists=False)))
q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False)))
query = (q1 | q2).to_query(TestDoc)
self.assertEqual(query, {
'$or': [
{'$and': [{'x': {'$gt': 0}},
{'$or': [{'y': True}, {'y': {'$exists': False}}]}]},
{'$and': [{'x': {'$lt': 100}},
{'$or': [{'y': False}, {'y': {'$exists': False}}]}]}
]
})
self.assertEqual(2, TestDoc.objects(q1 | q2).count())
def test_multiple_occurence_in_field(self):
class Test(Document):
name = StringField(max_length=40)
title = StringField(max_length=40)
q1 = Q(name__contains='te') | Q(title__contains='te')
q2 = Q(name__contains='12') | Q(title__contains='12')
q3 = q1 & q2
query = q3.to_query(Test)
self.assertEqual(query["$and"][0], q1.to_query(Test))
self.assertEqual(query["$and"][1], q2.to_query(Test))
def test_q_clone(self):
class TestDoc(Document):
x = IntField()
TestDoc.drop_collection()
for i in range(1, 101):
t = TestDoc(x=i)
t.save()
# Check normal cases work without an error
test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3))
self.assertEqual(test.count(), 3)
test2 = test.clone()
self.assertEqual(test2.count(), 3)
self.assertFalse(test2 == test)
test3 = test2.filter(x=6)
self.assertEqual(test3.count(), 1)
self.assertEqual(test.count(), 3)
def test_q(self):
"""Ensure that Q objects may be used to query for documents.
"""
class BlogPost(Document):
title = StringField()
publish_date = DateTimeField()
published = BooleanField()
BlogPost.drop_collection()
post1 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 8), published=False)
post1.save()
post2 = BlogPost(title='Test 2', publish_date=datetime.datetime(2010, 1, 15), published=True)
post2.save()
post3 = BlogPost(title='Test 3', published=True)
post3.save()
post4 = BlogPost(title='Test 4', publish_date=datetime.datetime(2010, 1, 8))
post4.save()
post5 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 15))
post5.save()
post6 = BlogPost(title='Test 1', published=False)
post6.save()
# Check ObjectId lookup works
obj = BlogPost.objects(id=post1.id).first()
self.assertEqual(obj, post1)
# Check Q object combination with one does not exist
q = BlogPost.objects(Q(title='Test 5') | Q(published=True))
posts = [post.id for post in q]
published_posts = (post2, post3)
self.assertTrue(all(obj.id in posts for obj in published_posts))
q = BlogPost.objects(Q(title='Test 1') | Q(published=True))
posts = [post.id for post in q]
published_posts = (post1, post2, post3, post5, post6)
self.assertTrue(all(obj.id in posts for obj in published_posts))
# Check Q object combination
date = datetime.datetime(2010, 1, 10)
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
posts = [post.id for post in q]
published_posts = (post1, post2, post3, post4)
self.assertTrue(all(obj.id in posts for obj in published_posts))
self.assertFalse(any(obj.id in posts for obj in [post5, post6]))
BlogPost.drop_collection()
# Check the 'in' operator
self.Person(name='user1', age=20).save()
self.Person(name='user2', age=20).save()
self.Person(name='user3', age=30).save()
self.Person(name='user4', age=40).save()
self.assertEqual(self.Person.objects(Q(age__in=[20])).count(), 2)
self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3)
# Test invalid query objs
with self.assertRaises(InvalidQueryError):
self.Person.objects('user1')
# filter should fail, too
with self.assertRaises(InvalidQueryError):
self.Person.objects.filter('user1')
def test_q_regex(self):
"""Ensure that Q objects can be queried using regexes.
"""
person = self.Person(name='Guido van Rossum')
person.save()
obj = self.Person.objects(Q(name=re.compile('^Gui'))).first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name=re.compile('^gui'))).first()
self.assertEqual(obj, None)
obj = self.Person.objects(Q(name=re.compile('^gui', re.I))).first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name__not=re.compile('^bob'))).first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first()
self.assertEqual(obj, None)
def test_q_lists(self):
"""Ensure that Q objects query ListFields correctly.
"""
class BlogPost(Document):
tags = ListField(StringField())
BlogPost.drop_collection()
BlogPost(tags=['python', 'mongo']).save()
BlogPost(tags=['python']).save()
self.assertEqual(BlogPost.objects(Q(tags='mongo')).count(), 1)
self.assertEqual(BlogPost.objects(Q(tags='python')).count(), 2)
BlogPost.drop_collection()
def test_q_merge_queries_edge_case(self):
class User(Document):
email = EmailField(required=False)
name = StringField()
User.drop_collection()
pk = ObjectId()
User(email='example@example.com', pk=pk).save()
self.assertEqual(1, User.objects.filter(Q(email='example@example.com') |
Q(name='John Doe')).limit(2).filter(pk=pk).count())
def test_chained_q_or_filtering(self):
class Post(EmbeddedDocument):
name = StringField(required=True)
class Item(Document):
postables = ListField(EmbeddedDocumentField(Post))
Item.drop_collection()
Item(postables=[Post(name="a"), Post(name="b")]).save()
Item(postables=[Post(name="a"), Post(name="c")]).save()
Item(postables=[Post(name="a"), Post(name="b"), Post(name="c")]).save()
self.assertEqual(Item.objects(Q(postables__name="a") & Q(postables__name="b")).count(), 2)
self.assertEqual(Item.objects.filter(postables__name="a").filter(postables__name="b").count(), 2)
if __name__ == '__main__':
unittest.main()

View File

@@ -1,390 +0,0 @@
import datetime
from pymongo.errors import OperationFailure
try:
import unittest2 as unittest
except ImportError:
import unittest
from nose.plugins.skip import SkipTest
import pymongo
from bson.tz_util import utc
from mongoengine import (
connect, register_connection,
Document, DateTimeField
)
from mongoengine.python_support import IS_PYMONGO_3
import mongoengine.connection
from mongoengine.connection import (MongoEngineConnectionError, get_db,
get_connection)
def get_tz_awareness(connection):
if not IS_PYMONGO_3:
return connection.tz_aware
else:
return connection.codec_options.tz_aware
class ConnectionTest(unittest.TestCase):
def tearDown(self):
mongoengine.connection._connection_settings = {}
mongoengine.connection._connections = {}
mongoengine.connection._dbs = {}
def test_connect(self):
"""Ensure that the connect() method works properly."""
connect('mongoenginetest')
conn = get_connection()
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
db = get_db()
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest')
connect('mongoenginetest2', alias='testdb')
conn = get_connection('testdb')
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
def test_connect_in_mocking(self):
"""Ensure that the connect() method works properly in mocking.
"""
try:
import mongomock
except ImportError:
raise SkipTest('you need mongomock installed to run this testcase')
connect('mongoenginetest', host='mongomock://localhost')
conn = get_connection()
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2')
conn = get_connection('testdb2')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3')
conn = get_connection('testdb3')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect('mongoenginetest4', is_mock=True, alias='testdb4')
conn = get_connection('testdb4')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5')
conn = get_connection('testdb5')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6')
conn = get_connection('testdb6')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7')
conn = get_connection('testdb7')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
def test_connect_with_host_list(self):
"""Ensure that the connect() method works when host is a list
Uses mongomock to test w/o needing multiple mongod/mongos processes
"""
try:
import mongomock
except ImportError:
raise SkipTest('you need mongomock installed to run this testcase')
connect(host=['mongomock://localhost'])
conn = get_connection()
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2')
conn = get_connection('testdb2')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host=['localhost'], is_mock=True, alias='testdb3')
conn = get_connection('testdb3')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4')
conn = get_connection('testdb4')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5')
conn = get_connection('testdb5')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6')
conn = get_connection('testdb6')
self.assertTrue(isinstance(conn, mongomock.MongoClient))
def test_disconnect(self):
"""Ensure that the disconnect() method works properly
"""
conn1 = connect('mongoenginetest')
mongoengine.connection.disconnect()
conn2 = connect('mongoenginetest')
self.assertTrue(conn1 is not conn2)
def test_sharing_connections(self):
"""Ensure that connections are shared when the connection settings are exactly the same
"""
connect('mongoenginetests', alias='testdb1')
expected_connection = get_connection('testdb1')
connect('mongoenginetests', alias='testdb2')
actual_connection = get_connection('testdb2')
# Handle PyMongo 3+ Async Connection
if IS_PYMONGO_3:
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
# Purposely not catching exception to fail test if thrown.
expected_connection.server_info()
self.assertEqual(expected_connection, actual_connection)
def test_connect_uri(self):
"""Ensure that the connect() method works properly with URIs."""
c = connect(db='mongoenginetest', alias='admin')
c.admin.system.users.remove({})
c.mongoenginetest.system.users.remove({})
c.admin.add_user("admin", "password")
c.admin.authenticate("admin", "password")
c.mongoenginetest.add_user("username", "password")
if not IS_PYMONGO_3:
self.assertRaises(
MongoEngineConnectionError, connect, 'testdb_uri_bad',
host='mongodb://test:password@localhost'
)
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
conn = get_connection()
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
db = get_db()
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest')
c.admin.system.users.remove({})
c.mongoenginetest.system.users.remove({})
def test_connect_uri_without_db(self):
"""Ensure connect() method works properly if the URI doesn't
include a database name.
"""
connect("mongoenginetest", host='mongodb://localhost/')
conn = get_connection()
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
db = get_db()
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest')
def test_connect_uri_default_db(self):
"""Ensure connect() defaults to the right database name if
the URI and the database_name don't explicitly specify it.
"""
connect(host='mongodb://localhost/')
conn = get_connection()
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
db = get_db()
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'test')
def test_uri_without_credentials_doesnt_override_conn_settings(self):
"""Ensure connect() uses the username & password params if the URI
doesn't explicitly specify them.
"""
c = connect(host='mongodb://localhost/mongoenginetest',
username='user',
password='pass')
# OperationFailure means that mongoengine attempted authentication
# w/ the provided username/password and failed - that's the desired
# behavior. If the MongoDB URI would override the credentials
self.assertRaises(OperationFailure, get_db)
def test_connect_uri_with_authsource(self):
"""Ensure that the connect() method works well with `authSource`
option in the URI.
"""
# Create users
c = connect('mongoenginetest')
c.admin.system.users.remove({})
c.admin.add_user('username2', 'password')
# Authentication fails without "authSource"
if IS_PYMONGO_3:
test_conn = connect(
'mongoenginetest', alias='test1',
host='mongodb://username2:password@localhost/mongoenginetest'
)
self.assertRaises(OperationFailure, test_conn.server_info)
else:
self.assertRaises(
MongoEngineConnectionError,
connect, 'mongoenginetest', alias='test1',
host='mongodb://username2:password@localhost/mongoenginetest'
)
self.assertRaises(MongoEngineConnectionError, get_db, 'test1')
# Authentication succeeds with "authSource"
authd_conn = connect(
'mongoenginetest', alias='test2',
host=('mongodb://username2:password@localhost/'
'mongoenginetest?authSource=admin')
)
db = get_db('test2')
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest')
# Clear all users
authd_conn.admin.system.users.remove({})
def test_register_connection(self):
"""Ensure that connections with different aliases may be registered.
"""
register_connection('testdb', 'mongoenginetest2')
self.assertRaises(MongoEngineConnectionError, get_connection)
conn = get_connection('testdb')
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
db = get_db('testdb')
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest2')
def test_register_connection_defaults(self):
"""Ensure that defaults are used when the host and port are None.
"""
register_connection('testdb', 'mongoenginetest', host=None, port=None)
conn = get_connection('testdb')
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
def test_connection_kwargs(self):
"""Ensure that connection kwargs get passed to pymongo."""
connect('mongoenginetest', alias='t1', tz_aware=True)
conn = get_connection('t1')
self.assertTrue(get_tz_awareness(conn))
connect('mongoenginetest2', alias='t2')
conn = get_connection('t2')
self.assertFalse(get_tz_awareness(conn))
def test_connection_pool_via_kwarg(self):
"""Ensure we can specify a max connection pool size using
a connection kwarg.
"""
# Use "max_pool_size" or "maxpoolsize" depending on PyMongo version
# (former was changed to the latter as described in
# https://jira.mongodb.org/browse/PYTHON-854).
# TODO remove once PyMongo < 3.0 support is dropped
if pymongo.version_tuple[0] >= 3:
pool_size_kwargs = {'maxpoolsize': 100}
else:
pool_size_kwargs = {'max_pool_size': 100}
conn = connect('mongoenginetest', alias='max_pool_size_via_kwarg', **pool_size_kwargs)
self.assertEqual(conn.max_pool_size, 100)
def test_connection_pool_via_uri(self):
"""Ensure we can specify a max connection pool size using
an option in a connection URI.
"""
if pymongo.version_tuple[0] == 2 and pymongo.version_tuple[1] < 9:
raise SkipTest('maxpoolsize as a URI option is only supported in PyMongo v2.9+')
conn = connect(host='mongodb://localhost/test?maxpoolsize=100', alias='max_pool_size_via_uri')
self.assertEqual(conn.max_pool_size, 100)
def test_write_concern(self):
"""Ensure write concern can be specified in connect() via
a kwarg or as part of the connection URI.
"""
conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true')
conn2 = connect('testing', alias='conn2', w=1, j=True)
if IS_PYMONGO_3:
self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True})
self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True})
else:
self.assertEqual(dict(conn1.write_concern), {'w': 1, 'j': True})
self.assertEqual(dict(conn2.write_concern), {'w': 1, 'j': True})
def test_connect_with_replicaset_via_uri(self):
"""Ensure connect() works when specifying a replicaSet via the
MongoDB URI.
"""
if IS_PYMONGO_3:
c = connect(host='mongodb://localhost/test?replicaSet=local-rs')
db = get_db()
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'test')
else:
# PyMongo < v3.x raises an exception:
# "localhost:27017 is not a member of replica set local-rs"
with self.assertRaises(MongoEngineConnectionError):
c = connect(host='mongodb://localhost/test?replicaSet=local-rs')
def test_connect_with_replicaset_via_kwargs(self):
"""Ensure connect() works when specifying a replicaSet via the
connection kwargs
"""
if IS_PYMONGO_3:
c = connect(replicaset='local-rs')
self.assertEqual(c._MongoClient__options.replica_set_name,
'local-rs')
db = get_db()
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'test')
else:
# PyMongo < v3.x raises an exception:
# "localhost:27017 is not a member of replica set local-rs"
with self.assertRaises(MongoEngineConnectionError):
c = connect(replicaset='local-rs')
def test_datetime(self):
connect('mongoenginetest', tz_aware=True)
d = datetime.datetime(2010, 5, 5, tzinfo=utc)
class DateDoc(Document):
the_date = DateTimeField(required=True)
DateDoc.drop_collection()
DateDoc(the_date=d).save()
date_doc = DateDoc.objects.first()
self.assertEqual(d, date_doc.the_date)
def test_multiple_connection_settings(self):
connect('mongoenginetest', alias='t1', host="localhost")
connect('mongoenginetest2', alias='t2', host="127.0.0.1")
mongo_connections = mongoengine.connection._connections
self.assertEqual(len(mongo_connections.items()), 2)
self.assertTrue('t1' in mongo_connections.keys())
self.assertTrue('t2' in mongo_connections.keys())
if not IS_PYMONGO_3:
self.assertEqual(mongo_connections['t1'].host, 'localhost')
self.assertEqual(mongo_connections['t2'].host, '127.0.0.1')
else:
# Handle PyMongo 3+ Async Connection
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
# Purposely not catching exception to fail test if thrown.
mongo_connections['t1'].server_info()
mongo_connections['t2'].server_info()
self.assertEqual(mongo_connections['t1'].address[0], 'localhost')
self.assertEqual(mongo_connections['t2'].address[0], '127.0.0.1')
if __name__ == '__main__':
unittest.main()

View File

@@ -1,202 +0,0 @@
import unittest
from mongoengine import *
from mongoengine.connection import get_db
from mongoengine.context_managers import (switch_db, switch_collection,
no_sub_classes, no_dereference,
query_counter)
class ContextManagersTest(unittest.TestCase):
def test_switch_db_context_manager(self):
connect('mongoenginetest')
register_connection('testdb-1', 'mongoenginetest2')
class Group(Document):
name = StringField()
Group.drop_collection()
Group(name="hello - default").save()
self.assertEqual(1, Group.objects.count())
with switch_db(Group, 'testdb-1') as Group:
self.assertEqual(0, Group.objects.count())
Group(name="hello").save()
self.assertEqual(1, Group.objects.count())
Group.drop_collection()
self.assertEqual(0, Group.objects.count())
self.assertEqual(1, Group.objects.count())
def test_switch_collection_context_manager(self):
connect('mongoenginetest')
register_connection('testdb-1', 'mongoenginetest2')
class Group(Document):
name = StringField()
Group.drop_collection()
with switch_collection(Group, 'group1') as Group:
Group.drop_collection()
Group(name="hello - group").save()
self.assertEqual(1, Group.objects.count())
with switch_collection(Group, 'group1') as Group:
self.assertEqual(0, Group.objects.count())
Group(name="hello - group1").save()
self.assertEqual(1, Group.objects.count())
Group.drop_collection()
self.assertEqual(0, Group.objects.count())
self.assertEqual(1, Group.objects.count())
def test_no_dereference_context_manager_object_id(self):
"""Ensure that DBRef items in ListFields aren't dereferenced.
"""
connect('mongoenginetest')
class User(Document):
name = StringField()
class Group(Document):
ref = ReferenceField(User, dbref=False)
generic = GenericReferenceField()
members = ListField(ReferenceField(User, dbref=False))
User.drop_collection()
Group.drop_collection()
for i in range(1, 51):
User(name='user %s' % i).save()
user = User.objects.first()
Group(ref=user, members=User.objects, generic=user).save()
with no_dereference(Group) as NoDeRefGroup:
self.assertTrue(Group._fields['members']._auto_dereference)
self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference)
with no_dereference(Group) as Group:
group = Group.objects.first()
self.assertTrue(all([not isinstance(m, User)
for m in group.members]))
self.assertFalse(isinstance(group.ref, User))
self.assertFalse(isinstance(group.generic, User))
self.assertTrue(all([isinstance(m, User)
for m in group.members]))
self.assertTrue(isinstance(group.ref, User))
self.assertTrue(isinstance(group.generic, User))
def test_no_dereference_context_manager_dbref(self):
"""Ensure that DBRef items in ListFields aren't dereferenced.
"""
connect('mongoenginetest')
class User(Document):
name = StringField()
class Group(Document):
ref = ReferenceField(User, dbref=True)
generic = GenericReferenceField()
members = ListField(ReferenceField(User, dbref=True))
User.drop_collection()
Group.drop_collection()
for i in range(1, 51):
User(name='user %s' % i).save()
user = User.objects.first()
Group(ref=user, members=User.objects, generic=user).save()
with no_dereference(Group) as NoDeRefGroup:
self.assertTrue(Group._fields['members']._auto_dereference)
self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference)
with no_dereference(Group) as Group:
group = Group.objects.first()
self.assertTrue(all([not isinstance(m, User)
for m in group.members]))
self.assertFalse(isinstance(group.ref, User))
self.assertFalse(isinstance(group.generic, User))
self.assertTrue(all([isinstance(m, User)
for m in group.members]))
self.assertTrue(isinstance(group.ref, User))
self.assertTrue(isinstance(group.generic, User))
def test_no_sub_classes(self):
class A(Document):
x = IntField()
y = IntField()
meta = {'allow_inheritance': True}
class B(A):
z = IntField()
class C(B):
zz = IntField()
A.drop_collection()
A(x=10, y=20).save()
A(x=15, y=30).save()
B(x=20, y=40).save()
B(x=30, y=50).save()
C(x=40, y=60).save()
self.assertEqual(A.objects.count(), 5)
self.assertEqual(B.objects.count(), 3)
self.assertEqual(C.objects.count(), 1)
with no_sub_classes(A) as A:
self.assertEqual(A.objects.count(), 2)
for obj in A.objects:
self.assertEqual(obj.__class__, A)
with no_sub_classes(B) as B:
self.assertEqual(B.objects.count(), 2)
for obj in B.objects:
self.assertEqual(obj.__class__, B)
with no_sub_classes(C) as C:
self.assertEqual(C.objects.count(), 1)
for obj in C.objects:
self.assertEqual(obj.__class__, C)
# Confirm context manager exit correctly
self.assertEqual(A.objects.count(), 5)
self.assertEqual(B.objects.count(), 3)
self.assertEqual(C.objects.count(), 1)
def test_query_counter(self):
connect('mongoenginetest')
db = get_db()
db.test.find({})
with query_counter() as q:
self.assertEqual(0, q)
for i in range(1, 51):
db.test.find({}).count()
self.assertEqual(50, q)
if __name__ == '__main__':
unittest.main()

View File

@@ -1,80 +0,0 @@
import unittest
from mongoengine.base.datastructures import StrictDict
class TestStrictDict(unittest.TestCase):
def strict_dict_class(self, *args, **kwargs):
return StrictDict.create(*args, **kwargs)
def setUp(self):
self.dtype = self.strict_dict_class(("a", "b", "c"))
def test_init(self):
d = self.dtype(a=1, b=1, c=1)
self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
def test_repr(self):
d = self.dtype(a=1, b=2, c=3)
self.assertEqual(repr(d), '{"a": 1, "b": 2, "c": 3}')
# make sure quotes are escaped properly
d = self.dtype(a='"', b="'", c="")
self.assertEqual(repr(d), '{"a": \'"\', "b": "\'", "c": \'\'}')
def test_init_fails_on_nonexisting_attrs(self):
with self.assertRaises(AttributeError):
self.dtype(a=1, b=2, d=3)
def test_eq(self):
d = self.dtype(a=1, b=1, c=1)
dd = self.dtype(a=1, b=1, c=1)
e = self.dtype(a=1, b=1, c=3)
f = self.dtype(a=1, b=1)
g = self.strict_dict_class(("a", "b", "c", "d"))(a=1, b=1, c=1, d=1)
h = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=1)
i = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=2)
self.assertEqual(d, dd)
self.assertNotEqual(d, e)
self.assertNotEqual(d, f)
self.assertNotEqual(d, g)
self.assertNotEqual(f, d)
self.assertEqual(d, h)
self.assertNotEqual(d, i)
def test_setattr_getattr(self):
d = self.dtype()
d.a = 1
self.assertEqual(d.a, 1)
self.assertRaises(AttributeError, getattr, d, 'b')
def test_setattr_raises_on_nonexisting_attr(self):
d = self.dtype()
with self.assertRaises(AttributeError):
d.x = 1
def test_setattr_getattr_special(self):
d = self.strict_dict_class(["items"])
d.items = 1
self.assertEqual(d.items, 1)
def test_get(self):
d = self.dtype(a=1)
self.assertEqual(d.get('a'), 1)
self.assertEqual(d.get('b', 'bla'), 'bla')
def test_items(self):
d = self.dtype(a=1)
self.assertEqual(d.items(), [('a', 1)])
d = self.dtype(a=1, b=2)
self.assertEqual(d.items(), [('a', 1), ('b', 2)])
def test_mappings_protocol(self):
d = self.dtype(a=1, b=2)
assert dict(d) == {'a': 1, 'b': 2}
assert dict(**d) == {'a': 1, 'b': 2}
if __name__ == '__main__':
unittest.main()

File diff suppressed because it is too large Load Diff

View File

@@ -1,51 +0,0 @@
import unittest
from pymongo import ReadPreference
from mongoengine.python_support import IS_PYMONGO_3
if IS_PYMONGO_3:
from pymongo import MongoClient
CONN_CLASS = MongoClient
READ_PREF = ReadPreference.SECONDARY
else:
from pymongo import ReplicaSetConnection
CONN_CLASS = ReplicaSetConnection
READ_PREF = ReadPreference.SECONDARY_ONLY
import mongoengine
from mongoengine import *
from mongoengine.connection import MongoEngineConnectionError
class ConnectionTest(unittest.TestCase):
def setUp(self):
mongoengine.connection._connection_settings = {}
mongoengine.connection._connections = {}
mongoengine.connection._dbs = {}
def tearDown(self):
mongoengine.connection._connection_settings = {}
mongoengine.connection._connections = {}
mongoengine.connection._dbs = {}
def test_replicaset_uri_passes_read_preference(self):
"""Requires a replica set called "rs" on port 27017
"""
try:
conn = connect(db='mongoenginetest',
host="mongodb://localhost/mongoenginetest?replicaSet=rs",
read_preference=READ_PREF)
except MongoEngineConnectionError as e:
return
if not isinstance(conn, CONN_CLASS):
# really???
return
self.assertEqual(conn.read_preference, READ_PREF)
if __name__ == '__main__':
unittest.main()

View File

@@ -1,435 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
from mongoengine import *
from mongoengine import signals
signal_output = []
class SignalTests(unittest.TestCase):
"""
Testing signals before/after saving and deleting.
"""
def get_signal_output(self, fn, *args, **kwargs):
# Flush any existing signal output
global signal_output
signal_output = []
fn(*args, **kwargs)
return signal_output
def setUp(self):
connect(db='mongoenginetest')
class Author(Document):
# Make the id deterministic for easier testing
id = SequenceField(primary_key=True)
name = StringField()
def __unicode__(self):
return self.name
@classmethod
def pre_init(cls, sender, document, *args, **kwargs):
signal_output.append('pre_init signal, %s' % cls.__name__)
signal_output.append(kwargs['values'])
@classmethod
def post_init(cls, sender, document, **kwargs):
signal_output.append('post_init signal, %s, document._created = %s' % (document, document._created))
@classmethod
def pre_save(cls, sender, document, **kwargs):
signal_output.append('pre_save signal, %s' % document)
signal_output.append(kwargs)
@classmethod
def pre_save_post_validation(cls, sender, document, **kwargs):
signal_output.append('pre_save_post_validation signal, %s' % document)
if kwargs.pop('created', False):
signal_output.append('Is created')
else:
signal_output.append('Is updated')
signal_output.append(kwargs)
@classmethod
def post_save(cls, sender, document, **kwargs):
dirty_keys = document._delta()[0].keys() + document._delta()[1].keys()
signal_output.append('post_save signal, %s' % document)
signal_output.append('post_save dirty keys, %s' % dirty_keys)
if kwargs.pop('created', False):
signal_output.append('Is created')
else:
signal_output.append('Is updated')
signal_output.append(kwargs)
@classmethod
def pre_delete(cls, sender, document, **kwargs):
signal_output.append('pre_delete signal, %s' % document)
signal_output.append(kwargs)
@classmethod
def post_delete(cls, sender, document, **kwargs):
signal_output.append('post_delete signal, %s' % document)
signal_output.append(kwargs)
@classmethod
def pre_bulk_insert(cls, sender, documents, **kwargs):
signal_output.append('pre_bulk_insert signal, %s' % documents)
signal_output.append(kwargs)
@classmethod
def post_bulk_insert(cls, sender, documents, **kwargs):
signal_output.append('post_bulk_insert signal, %s' % documents)
if kwargs.pop('loaded', False):
signal_output.append('Is loaded')
else:
signal_output.append('Not loaded')
signal_output.append(kwargs)
self.Author = Author
Author.drop_collection()
Author.id.set_next_value(0)
class Another(Document):
name = StringField()
def __unicode__(self):
return self.name
@classmethod
def pre_delete(cls, sender, document, **kwargs):
signal_output.append('pre_delete signal, %s' % document)
signal_output.append(kwargs)
@classmethod
def post_delete(cls, sender, document, **kwargs):
signal_output.append('post_delete signal, %s' % document)
signal_output.append(kwargs)
self.Another = Another
Another.drop_collection()
class ExplicitId(Document):
id = IntField(primary_key=True)
@classmethod
def post_save(cls, sender, document, **kwargs):
if 'created' in kwargs:
if kwargs['created']:
signal_output.append('Is created')
else:
signal_output.append('Is updated')
self.ExplicitId = ExplicitId
ExplicitId.drop_collection()
class Post(Document):
title = StringField()
content = StringField()
active = BooleanField(default=False)
def __unicode__(self):
return self.title
@classmethod
def pre_bulk_insert(cls, sender, documents, **kwargs):
signal_output.append('pre_bulk_insert signal, %s' %
[(doc, {'active': documents[n].active})
for n, doc in enumerate(documents)])
# make changes here, this is just an example -
# it could be anything that needs pre-validation or looks-ups before bulk bulk inserting
for document in documents:
if not document.active:
document.active = True
signal_output.append(kwargs)
@classmethod
def post_bulk_insert(cls, sender, documents, **kwargs):
signal_output.append('post_bulk_insert signal, %s' %
[(doc, {'active': documents[n].active})
for n, doc in enumerate(documents)])
if kwargs.pop('loaded', False):
signal_output.append('Is loaded')
else:
signal_output.append('Not loaded')
signal_output.append(kwargs)
self.Post = Post
Post.drop_collection()
# Save up the number of connected signals so that we can check at the
# end that all the signals we register get properly unregistered
self.pre_signals = (
len(signals.pre_init.receivers),
len(signals.post_init.receivers),
len(signals.pre_save.receivers),
len(signals.pre_save_post_validation.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
len(signals.pre_bulk_insert.receivers),
len(signals.post_bulk_insert.receivers),
)
signals.pre_init.connect(Author.pre_init, sender=Author)
signals.post_init.connect(Author.post_init, sender=Author)
signals.pre_save.connect(Author.pre_save, sender=Author)
signals.pre_save_post_validation.connect(Author.pre_save_post_validation, sender=Author)
signals.post_save.connect(Author.post_save, sender=Author)
signals.pre_delete.connect(Author.pre_delete, sender=Author)
signals.post_delete.connect(Author.post_delete, sender=Author)
signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author)
signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author)
signals.pre_delete.connect(Another.pre_delete, sender=Another)
signals.post_delete.connect(Another.post_delete, sender=Another)
signals.post_save.connect(ExplicitId.post_save, sender=ExplicitId)
signals.pre_bulk_insert.connect(Post.pre_bulk_insert, sender=Post)
signals.post_bulk_insert.connect(Post.post_bulk_insert, sender=Post)
def tearDown(self):
signals.pre_init.disconnect(self.Author.pre_init)
signals.post_init.disconnect(self.Author.post_init)
signals.post_delete.disconnect(self.Author.post_delete)
signals.pre_delete.disconnect(self.Author.pre_delete)
signals.post_save.disconnect(self.Author.post_save)
signals.pre_save_post_validation.disconnect(self.Author.pre_save_post_validation)
signals.pre_save.disconnect(self.Author.pre_save)
signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert)
signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert)
signals.post_delete.disconnect(self.Another.post_delete)
signals.pre_delete.disconnect(self.Another.pre_delete)
signals.post_save.disconnect(self.ExplicitId.post_save)
signals.pre_bulk_insert.disconnect(self.Post.pre_bulk_insert)
signals.post_bulk_insert.disconnect(self.Post.post_bulk_insert)
# Check that all our signals got disconnected properly.
post_signals = (
len(signals.pre_init.receivers),
len(signals.post_init.receivers),
len(signals.pre_save.receivers),
len(signals.pre_save_post_validation.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
len(signals.pre_bulk_insert.receivers),
len(signals.post_bulk_insert.receivers),
)
self.ExplicitId.objects.delete()
self.assertEqual(self.pre_signals, post_signals)
def test_model_signals(self):
""" Model saves should throw some signals. """
def create_author():
self.Author(name='Bill Shakespeare')
def bulk_create_author_with_load():
a1 = self.Author(name='Bill Shakespeare')
self.Author.objects.insert([a1], load_bulk=True)
def bulk_create_author_without_load():
a1 = self.Author(name='Bill Shakespeare')
self.Author.objects.insert([a1], load_bulk=False)
def load_existing_author():
a = self.Author(name='Bill Shakespeare')
a.save()
self.get_signal_output(lambda: None) # eliminate signal output
a1 = self.Author.objects(name='Bill Shakespeare')[0]
self.assertEqual(self.get_signal_output(create_author), [
"pre_init signal, Author",
{'name': 'Bill Shakespeare'},
"post_init signal, Bill Shakespeare, document._created = True",
])
a1 = self.Author(name='Bill Shakespeare')
self.assertEqual(self.get_signal_output(a1.save), [
"pre_save signal, Bill Shakespeare",
{},
"pre_save_post_validation signal, Bill Shakespeare",
"Is created",
{},
"post_save signal, Bill Shakespeare",
"post_save dirty keys, ['name']",
"Is created",
{}
])
a1.reload()
a1.name = 'William Shakespeare'
self.assertEqual(self.get_signal_output(a1.save), [
"pre_save signal, William Shakespeare",
{},
"pre_save_post_validation signal, William Shakespeare",
"Is updated",
{},
"post_save signal, William Shakespeare",
"post_save dirty keys, ['name']",
"Is updated",
{}
])
self.assertEqual(self.get_signal_output(a1.delete), [
'pre_delete signal, William Shakespeare',
{},
'post_delete signal, William Shakespeare',
{}
])
self.assertEqual(self.get_signal_output(load_existing_author), [
"pre_init signal, Author",
{'id': 2, 'name': 'Bill Shakespeare'},
"post_init signal, Bill Shakespeare, document._created = False"
])
self.assertEqual(self.get_signal_output(bulk_create_author_with_load), [
'pre_init signal, Author',
{'name': 'Bill Shakespeare'},
'post_init signal, Bill Shakespeare, document._created = True',
'pre_bulk_insert signal, [<Author: Bill Shakespeare>]',
{},
'pre_init signal, Author',
{'id': 3, 'name': 'Bill Shakespeare'},
'post_init signal, Bill Shakespeare, document._created = False',
'post_bulk_insert signal, [<Author: Bill Shakespeare>]',
'Is loaded',
{}
])
self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [
"pre_init signal, Author",
{'name': 'Bill Shakespeare'},
"post_init signal, Bill Shakespeare, document._created = True",
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
{},
"post_bulk_insert signal, [<Author: Bill Shakespeare>]",
"Not loaded",
{}
])
def test_signal_kwargs(self):
""" Make sure signal_kwargs is passed to signals calls. """
def live_and_let_die():
a = self.Author(name='Bill Shakespeare')
a.save(signal_kwargs={'live': True, 'die': False})
a.delete(signal_kwargs={'live': False, 'die': True})
self.assertEqual(self.get_signal_output(live_and_let_die), [
"pre_init signal, Author",
{'name': 'Bill Shakespeare'},
"post_init signal, Bill Shakespeare, document._created = True",
"pre_save signal, Bill Shakespeare",
{'die': False, 'live': True},
"pre_save_post_validation signal, Bill Shakespeare",
"Is created",
{'die': False, 'live': True},
"post_save signal, Bill Shakespeare",
"post_save dirty keys, ['name']",
"Is created",
{'die': False, 'live': True},
'pre_delete signal, Bill Shakespeare',
{'die': True, 'live': False},
'post_delete signal, Bill Shakespeare',
{'die': True, 'live': False}
])
def bulk_create_author():
a1 = self.Author(name='Bill Shakespeare')
self.Author.objects.insert([a1], signal_kwargs={'key': True})
self.assertEqual(self.get_signal_output(bulk_create_author), [
'pre_init signal, Author',
{'name': 'Bill Shakespeare'},
'post_init signal, Bill Shakespeare, document._created = True',
'pre_bulk_insert signal, [<Author: Bill Shakespeare>]',
{'key': True},
'pre_init signal, Author',
{'id': 2, 'name': 'Bill Shakespeare'},
'post_init signal, Bill Shakespeare, document._created = False',
'post_bulk_insert signal, [<Author: Bill Shakespeare>]',
'Is loaded',
{'key': True}
])
def test_queryset_delete_signals(self):
""" Queryset delete should throw some signals. """
self.Another(name='Bill Shakespeare').save()
self.assertEqual(self.get_signal_output(self.Another.objects.delete), [
'pre_delete signal, Bill Shakespeare',
{},
'post_delete signal, Bill Shakespeare',
{}
])
def test_signals_with_explicit_doc_ids(self):
""" Model saves must have a created flag the first time."""
ei = self.ExplicitId(id=123)
# post save must received the created flag, even if there's already
# an object id present
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
# second time, it must be an update
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
def test_signals_with_switch_collection(self):
ei = self.ExplicitId(id=123)
ei.switch_collection("explicit__1")
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
ei.switch_collection("explicit__1")
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
ei.switch_collection("explicit__1", keep_created=False)
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
ei.switch_collection("explicit__1", keep_created=False)
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
def test_signals_with_switch_db(self):
connect('mongoenginetest')
register_connection('testdb-1', 'mongoenginetest2')
ei = self.ExplicitId(id=123)
ei.switch_db("testdb-1")
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
ei.switch_db("testdb-1")
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
ei.switch_db("testdb-1", keep_created=False)
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
ei.switch_db("testdb-1", keep_created=False)
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
def test_signals_bulk_insert(self):
def bulk_set_active_post():
posts = [
self.Post(title='Post 1'),
self.Post(title='Post 2'),
self.Post(title='Post 3')
]
self.Post.objects.insert(posts)
results = self.get_signal_output(bulk_set_active_post)
self.assertEqual(results, [
"pre_bulk_insert signal, [(<Post: Post 1>, {'active': False}), (<Post: Post 2>, {'active': False}), (<Post: Post 3>, {'active': False})]",
{},
"post_bulk_insert signal, [(<Post: Post 1>, {'active': True}), (<Post: Post 2>, {'active': True}), (<Post: Post 3>, {'active': True})]",
'Is loaded',
{}
])
if __name__ == '__main__':
unittest.main()

Some files were not shown because too many files have changed in this diff Show More