diff --git a/.gitignore b/.gitignore index b180e87e..048a2d19 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,4 @@ tests/test_bugfix.py htmlcov/ venv venv3 +scratchpad diff --git a/.install_mongodb_on_travis.sh b/.install_mongodb_on_travis.sh new file mode 100644 index 00000000..f2018411 --- /dev/null +++ b/.install_mongodb_on_travis.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +sudo apt-get remove mongodb-org-server +sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 + +if [ "$MONGODB" = "2.4" ]; then + echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list + sudo apt-get update + sudo apt-get install mongodb-10gen=2.4.14 + sudo service mongodb start +elif [ "$MONGODB" = "2.6" ]; then + echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list + sudo apt-get update + sudo apt-get install mongodb-org-server=2.6.12 + # service should be started automatically +elif [ "$MONGODB" = "3.0" ]; then + echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list + sudo apt-get update + sudo apt-get install mongodb-org-server=3.0.14 + # service should be started automatically +else + echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0." + exit 1 +fi; + +mkdir db +1>db/logs mongod --dbpath=db & diff --git a/.travis.yml b/.travis.yml index cb6c97e6..381f7385 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,28 +1,48 @@ +# For full coverage, we'd have to test all supported Python, MongoDB, and +# PyMongo combinations. However, that would result in an overly long build +# with a very large number of jobs, hence we only test a subset of all the +# combinations: +# * MongoDB v2.4 & v3.0 are only tested against Python v2.7 & v3.5. +# * MongoDB v2.4 is tested against PyMongo v2.7 & v3.x. +# * MongoDB v3.0 is tested against PyMongo v3.x. +# * MongoDB v2.6 is currently the "main" version tested against Python v2.7, +# v3.5, PyPy & PyPy3, and PyMongo v2.7, v2.8 & v3.x. +# +# Reminder: Update README.rst if you change MongoDB versions we test. + language: python python: -- '2.7' -- '3.3' -- '3.4' -- '3.5' +- 2.7 +- 3.5 +- 3.6 - pypy -- pypy3 env: -- PYMONGO=2.7 -- PYMONGO=2.8 -- PYMONGO=3.0 -- PYMONGO=dev +- MONGODB=2.6 PYMONGO=3.x matrix: + # Finish the build as soon as one job fails fast_finish: true + include: + - python: 2.7 + env: MONGODB=2.4 PYMONGO=3.5 + - python: 2.7 + env: MONGODB=3.0 PYMONGO=3.x + - python: 3.5 + env: MONGODB=2.4 PYMONGO=3.5 + - python: 3.5 + env: MONGODB=3.0 PYMONGO=3.x + - python: 3.6 + env: MONGODB=2.4 PYMONGO=3.5 + - python: 3.6 + env: MONGODB=3.0 PYMONGO=3.x + before_install: -- travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 -- echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | - sudo tee /etc/apt/sources.list.d/mongodb.list -- travis_retry sudo apt-get update -- travis_retry sudo apt-get install mongodb-org-server +- bash .install_mongodb_on_travis.sh +- sleep 15 # https://docs.travis-ci.com/user/database-setup/#MongoDB-does-not-immediately-accept-connections +- mongo --eval 'db.version();' install: - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev @@ -30,14 +50,17 @@ install: python-tk - travis_retry pip install --upgrade pip - travis_retry pip install coveralls -- travis_retry pip install flake8 +- travis_retry pip install flake8 flake8-import-order - travis_retry pip install tox>=1.9 - travis_retry pip install "virtualenv<14.0.0" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) - travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test +# Cache dependencies installed via pip +cache: pip + # Run flake8 for py27 before_script: -- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then tox -e flake8; fi +- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi script: - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage @@ -45,22 +68,34 @@ script: # For now only submit coveralls for Python v2.7. Python v3.x currently shows # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible # code in a separate dir and runs tests on that. -after_script: +after_success: - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi notifications: irc: irc.freenode.org#mongoengine +# Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z) branches: only: - master - /^v.*$/ +# Whenever a new release is created via GitHub, publish it on PyPI. deploy: provider: pypi user: the_drow password: secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= + + # create a source distribution and a pure python wheel for faster installs + distributions: "sdist bdist_wheel" + + # only deploy on tagged commits (aka GitHub releases) and only for the + # parent repo's builds running Python 2.7 along with PyMongo v3.x (we run + # Travis against many different Python and PyMongo versions and we don't + # want the deploy to occur multiple times). on: tags: true repo: MongoEngine/mongoengine + condition: "$PYMONGO = 3.x" + python: 2.7 diff --git a/AUTHORS b/AUTHORS index 1d724718..b38825dc 100644 --- a/AUTHORS +++ b/AUTHORS @@ -243,3 +243,7 @@ that much better: * Victor Varvaryuk * Stanislav Kaledin (https://github.com/sallyruthstruik) * Dmitry Yantsen (https://github.com/mrTable) + * Renjianxin (https://github.com/Davidrjx) + * Erdenezul Batmunkh (https://github.com/erdenezul) + * Andy Yankovsky (https://github.com/werat) + * Bastien Gérard (https://github.com/bagerard) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index aeba41f7..f7b15c85 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -14,34 +14,38 @@ Before starting to write code, look for existing `tickets `_ or `create one `_ for your specific issue or feature request. That way you avoid working on something -that might not be of interest or that has already been addressed. If in doubt +that might not be of interest or that has already been addressed. If in doubt post to the `user group ` Supported Interpreters ---------------------- -MongoEngine supports CPython 2.6 and newer. Language +MongoEngine supports CPython 2.7 and newer. Language features not supported by all interpreters can not be used. -Please also ensure that your code is properly converted by -`2to3 `_ for Python 3 support. +The codebase is written in python 2 so you must be using python 2 +when developing new features. Compatibility of the library with Python 3 +relies on the 2to3 package that gets executed as part of the installation +build. You should ensure that your code is properly converted by +`2to3 `_. Style Guide ----------- MongoEngine aims to follow `PEP8 `_ -including 4 space indents. When possible we try to stick to 79 character line limits. -However, screens got bigger and an ORM has a strong focus on readability and -if it can help, we accept 119 as maximum line length, in a similar way as -`django does `_ +including 4 space indents. When possible we try to stick to 79 character line +limits. However, screens got bigger and an ORM has a strong focus on +readability and if it can help, we accept 119 as maximum line length, in a +similar way as `django does +`_ Testing ------- All tests are run on `Travis `_ -and any pull requests are automatically tested by Travis. Any pull requests -without tests will take longer to be integrated and might be refused. +and any pull requests are automatically tested. Any pull requests without +tests will take longer to be integrated and might be refused. -You may also submit a simple failing test as a PullRequest if you don't know +You may also submit a simple failing test as a pull request if you don't know how to fix it, it will be easier for other people to work on it and it may get fixed faster. @@ -49,13 +53,18 @@ General Guidelines ------------------ - Avoid backward breaking changes if at all possible. +- If you *have* to introduce a breaking change, make it very clear in your + pull request's description. Also, describe how users of this package + should adapt to the breaking change in docs/upgrade.rst. - Write inline documentation for new classes and methods. - Write tests and make sure they pass (make sure you have a mongod running on the default port, then execute ``python setup.py nosetests`` from the cmd line to run the test suite). -- Ensure tests pass on every Python and PyMongo versions. - You can test on these versions locally by executing ``tox`` -- Add enhancements or problematic bug fixes to docs/changelog.rst +- Ensure tests pass on all supported Python, PyMongo, and MongoDB versions. + You can test various Python and PyMongo versions locally by executing + ``tox``. For different MongoDB versions, you can rely on our automated + Travis tests. +- Add enhancements or problematic bug fixes to docs/changelog.rst. - Add yourself to AUTHORS :) Documentation @@ -69,3 +78,8 @@ just make your changes to the inline documentation of the appropriate branch and submit a `pull request `_. You might also use the github `Edit `_ button. + +If you want to test your documentation changes locally, you need to install +the ``sphinx`` and ``sphinx_rtd_theme`` packages. Once these are installed, +go to the ``docs`` directory, run ``make html`` and inspect the updated docs +by running ``open _build/html/index.html``. diff --git a/README.rst b/README.rst index e46a835f..e1e2aef6 100644 --- a/README.rst +++ b/README.rst @@ -19,32 +19,42 @@ MongoEngine About ===== MongoEngine is a Python Object-Document Mapper for working with MongoDB. -Documentation available at https://mongoengine-odm.readthedocs.io - there is currently -a `tutorial `_, a `user guide -`_ and an `API reference -`_. +Documentation is available at https://mongoengine-odm.readthedocs.io - there +is currently a `tutorial `_, +a `user guide `_, and +an `API reference `_. + +Supported MongoDB Versions +========================== +MongoEngine is currently tested against MongoDB v2.4, v2.6, and v3.0. Future +versions should be supported as well, but aren't actively tested at the moment. +Make sure to open an issue or submit a pull request if you experience any +problems with MongoDB v3.2+. Installation ============ We recommend the use of `virtualenv `_ and of `pip `_. You can then use ``pip install -U mongoengine``. -You may also have `setuptools `_ and thus -you can use ``easy_install -U mongoengine``. Otherwise, you can download the +You may also have `setuptools `_ +and thus you can use ``easy_install -U mongoengine``. Otherwise, you can download the source from `GitHub `_ and run ``python setup.py install``. Dependencies ============ -- pymongo>=2.7.1 -- sphinx (optional - for documentation generation) +All of the dependencies can easily be installed via `pip `_. +At the very least, you'll need these two packages to use MongoEngine: + +- pymongo>=2.7.1 +- six>=1.10.0 + +If you utilize a ``DateTimeField``, you might also use a more flexible date parser: -Optional Dependencies ---------------------- -- **Image Fields**: Pillow>=2.0.0 - dateutil>=2.1.0 -.. note - MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: PyMongo 3.0.1 +If you need to use an ``ImageField`` or ``ImageGridFsProxy``: + +- Pillow>=2.0.0 Examples ======== @@ -57,7 +67,7 @@ Some simple examples of what MongoEngine code looks like: class BlogPost(Document): title = StringField(required=True, max_length=200) - posted = DateTimeField(default=datetime.datetime.now) + posted = DateTimeField(default=datetime.datetime.utcnow) tags = ListField(StringField(max_length=50)) meta = {'allow_inheritance': True} @@ -87,27 +97,28 @@ Some simple examples of what MongoEngine code looks like: ... print ... - >>> len(BlogPost.objects) + # Count all blog posts and its subtypes + >>> BlogPost.objects.count() 2 - >>> len(TextPost.objects) + >>> TextPost.objects.count() 1 - >>> len(LinkPost.objects) + >>> LinkPost.objects.count() 1 - # Find tagged posts - >>> len(BlogPost.objects(tags='mongoengine')) + # Count tagged posts + >>> BlogPost.objects(tags='mongoengine').count() 2 - >>> len(BlogPost.objects(tags='mongodb')) + >>> BlogPost.objects(tags='mongodb').count() 1 Tests ===== To run the test suite, ensure you are running a local instance of MongoDB on -the standard port and have ``nose`` installed. Then, run: ``python setup.py nosetests``. +the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``. -To run the test suite on every supported Python version and every supported PyMongo version, -you can use ``tox``. -tox and each supported Python version should be installed in your environment: +To run the test suite on every supported Python and PyMongo version, you can +use ``tox``. You'll need to make sure you have each supported Python version +installed in your environment and then: .. code-block:: shell @@ -116,13 +127,16 @@ tox and each supported Python version should be installed in your environment: # Run the test suites $ tox -If you wish to run one single or selected tests, use the nosetest convention. It will find the folder, -eventually the file, go to the TestClass specified after the colon and eventually right to the single test. -Also use the -s argument if you want to print out whatever or access pdb while testing. +If you wish to run a subset of tests, use the nosetests convention: .. code-block:: shell - $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest.test_cls_field -s + # Run all the tests in a particular test file + $ python setup.py nosetests --tests tests/fields/fields.py + # Run only particular test class in that file + $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest + # Use the -s option if you want to print some debug statements or use pdb + $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s Community ========= @@ -130,8 +144,7 @@ Community `_ - `MongoEngine Developers mailing list `_ -- `#mongoengine IRC channel `_ Contributing ============ -We welcome contributions! see the `Contribution guidelines `_ +We welcome contributions! See the `Contribution guidelines `_ diff --git a/docs/_themes/sphinx_rtd_theme/__init__.py b/docs/_themes/sphinx_rtd_theme/__init__.py deleted file mode 100755 index 1440863d..00000000 --- a/docs/_themes/sphinx_rtd_theme/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Sphinx ReadTheDocs theme. - -From https://github.com/ryan-roemer/sphinx-bootstrap-theme. - -""" -import os - -VERSION = (0, 1, 5) - -__version__ = ".".join(str(v) for v in VERSION) -__version_full__ = __version__ - - -def get_html_theme_path(): - """Return list of HTML theme paths.""" - cur_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) - return cur_dir diff --git a/docs/_themes/sphinx_rtd_theme/breadcrumbs.html b/docs/_themes/sphinx_rtd_theme/breadcrumbs.html deleted file mode 100755 index 3e4f359c..00000000 --- a/docs/_themes/sphinx_rtd_theme/breadcrumbs.html +++ /dev/null @@ -1,15 +0,0 @@ - -
- diff --git a/docs/_themes/sphinx_rtd_theme/footer.html b/docs/_themes/sphinx_rtd_theme/footer.html deleted file mode 100755 index 1fa05eaa..00000000 --- a/docs/_themes/sphinx_rtd_theme/footer.html +++ /dev/null @@ -1,30 +0,0 @@ -
- {% if next or prev %} - - {% endif %} - -
- -

- {%- if show_copyright %} - {%- if hasdoc('copyright') %} - {% trans path=pathto('copyright'), copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %} - {%- else %} - {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %} - {%- endif %} - {%- endif %} - - {%- if last_updated %} - {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %} - {%- endif %} -

- - {% trans %}Sphinx theme provided by Read the Docs{% endtrans %} -
diff --git a/docs/_themes/sphinx_rtd_theme/layout.html b/docs/_themes/sphinx_rtd_theme/layout.html deleted file mode 100755 index febe8eb0..00000000 --- a/docs/_themes/sphinx_rtd_theme/layout.html +++ /dev/null @@ -1,142 +0,0 @@ -{# TEMPLATE VAR SETTINGS #} -{%- set url_root = pathto('', 1) %} -{%- if url_root == '#' %}{% set url_root = '' %}{% endif %} -{%- if not embedded and docstitle %} - {%- set titlesuffix = " — "|safe + docstitle|e %} -{%- else %} - {%- set titlesuffix = "" %} -{%- endif %} - - - - - - - - {% block htmltitle %} - {{ title|striptags|e }}{{ titlesuffix }} - {% endblock %} - - {# FAVICON #} - {% if favicon %} - - {% endif %} - {# CANONICAL #} - {%- if theme_canonical_url %} - - {%- endif %} - - {# CSS #} - - - {# JS #} - {% if not embedded %} - - - {%- for scriptfile in script_files %} - - {%- endfor %} - - {% if use_opensearch %} - - {% endif %} - - {% endif %} - - {# RTD hosts these file themselves, so just load on non RTD builds #} - {% if not READTHEDOCS %} - - - {% endif %} - - {% for cssfile in css_files %} - - {% endfor %} - - {%- block linktags %} - {%- if hasdoc('about') %} - - {%- endif %} - {%- if hasdoc('genindex') %} - - {%- endif %} - {%- if hasdoc('search') %} - - {%- endif %} - {%- if hasdoc('copyright') %} - - {%- endif %} - - {%- if parents %} - - {%- endif %} - {%- if next %} - - {%- endif %} - {%- if prev %} - - {%- endif %} - {%- endblock %} - {%- block extrahead %} {% endblock %} - - - - - - - -
- - {# SIDE NAV, TOGGLES ON MOBILE #} - - -
- - {# MOBILE NAV, TRIGGLES SIDE NAV ON TOGGLE #} - - - - {# PAGE CONTENT #} -
-
- {% include "breadcrumbs.html" %} - {% block body %}{% endblock %} - {% include "footer.html" %} -
-
- -
- -
- {% include "versions.html" %} - - diff --git a/docs/_themes/sphinx_rtd_theme/layout_old.html b/docs/_themes/sphinx_rtd_theme/layout_old.html deleted file mode 100755 index deb8df2a..00000000 --- a/docs/_themes/sphinx_rtd_theme/layout_old.html +++ /dev/null @@ -1,205 +0,0 @@ -{# - basic/layout.html - ~~~~~~~~~~~~~~~~~ - - Master layout template for Sphinx themes. - - :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -#} -{%- block doctype -%} - -{%- endblock %} -{%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %} -{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %} -{%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and - (sidebars != []) %} -{%- set url_root = pathto('', 1) %} -{# XXX necessary? #} -{%- if url_root == '#' %}{% set url_root = '' %}{% endif %} -{%- if not embedded and docstitle %} - {%- set titlesuffix = " — "|safe + docstitle|e %} -{%- else %} - {%- set titlesuffix = "" %} -{%- endif %} - -{%- macro relbar() %} - -{%- endmacro %} - -{%- macro sidebar() %} - {%- if render_sidebar %} -
-
- {%- block sidebarlogo %} - {%- if logo %} - - {%- endif %} - {%- endblock %} - {%- if sidebars != None %} - {#- new style sidebar: explicitly include/exclude templates #} - {%- for sidebartemplate in sidebars %} - {%- include sidebartemplate %} - {%- endfor %} - {%- else %} - {#- old style sidebars: using blocks -- should be deprecated #} - {%- block sidebartoc %} - {%- include "localtoc.html" %} - {%- endblock %} - {%- block sidebarrel %} - {%- include "relations.html" %} - {%- endblock %} - {%- block sidebarsourcelink %} - {%- include "sourcelink.html" %} - {%- endblock %} - {%- if customsidebar %} - {%- include customsidebar %} - {%- endif %} - {%- block sidebarsearch %} - {%- include "searchbox.html" %} - {%- endblock %} - {%- endif %} -
-
- {%- endif %} -{%- endmacro %} - -{%- macro script() %} - - {%- for scriptfile in script_files %} - - {%- endfor %} -{%- endmacro %} - -{%- macro css() %} - - - {%- for cssfile in css_files %} - - {%- endfor %} -{%- endmacro %} - - - - - {{ metatags }} - {%- block htmltitle %} - {{ title|striptags|e }}{{ titlesuffix }} - {%- endblock %} - {{ css() }} - {%- if not embedded %} - {{ script() }} - {%- if use_opensearch %} - - {%- endif %} - {%- if favicon %} - - {%- endif %} - {%- endif %} -{%- block linktags %} - {%- if hasdoc('about') %} - - {%- endif %} - {%- if hasdoc('genindex') %} - - {%- endif %} - {%- if hasdoc('search') %} - - {%- endif %} - {%- if hasdoc('copyright') %} - - {%- endif %} - - {%- if parents %} - - {%- endif %} - {%- if next %} - - {%- endif %} - {%- if prev %} - - {%- endif %} -{%- endblock %} -{%- block extrahead %} {% endblock %} - - -{%- block header %}{% endblock %} - -{%- block relbar1 %}{{ relbar() }}{% endblock %} - -{%- block content %} - {%- block sidebar1 %} {# possible location for sidebar #} {% endblock %} - -
- {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} -
- {% block body %} {% endblock %} -
- {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} - - {%- block sidebar2 %}{{ sidebar() }}{% endblock %} -
-
-{%- endblock %} - -{%- block relbar2 %}{{ relbar() }}{% endblock %} - -{%- block footer %} - -

asdf asdf asdf asdf 22

-{%- endblock %} - - - diff --git a/docs/_themes/sphinx_rtd_theme/search.html b/docs/_themes/sphinx_rtd_theme/search.html deleted file mode 100755 index d8bbe690..00000000 --- a/docs/_themes/sphinx_rtd_theme/search.html +++ /dev/null @@ -1,50 +0,0 @@ -{# - basic/search.html - ~~~~~~~~~~~~~~~~~ - - Template for the search page. - - :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -#} -{%- extends "layout.html" %} -{% set title = _('Search') %} -{% set script_files = script_files + ['_static/searchtools.js'] %} -{% block extrahead %} - - {# this is used when loading the search index using $.ajax fails, - such as on Chrome for documents on localhost #} - - {{ super() }} -{% endblock %} -{% block body %} - - - {% if search_performed %} -

{{ _('Search Results') }}

- {% if not search_results %} -

{{ _('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.') }}

- {% endif %} - {% endif %} -
- {% if search_results %} -
    - {% for href, caption, context in search_results %} -
  • - {{ caption }} -

    {{ context|e }}

    -
  • - {% endfor %} -
- {% endif %} -
-{% endblock %} diff --git a/docs/_themes/sphinx_rtd_theme/searchbox.html b/docs/_themes/sphinx_rtd_theme/searchbox.html deleted file mode 100755 index f62545ea..00000000 --- a/docs/_themes/sphinx_rtd_theme/searchbox.html +++ /dev/null @@ -1,5 +0,0 @@ -
- - - -
diff --git a/docs/_themes/sphinx_rtd_theme/static/css/badge_only.css b/docs/_themes/sphinx_rtd_theme/static/css/badge_only.css deleted file mode 100755 index 7fccc414..00000000 --- a/docs/_themes/sphinx_rtd_theme/static/css/badge_only.css +++ /dev/null @@ -1 +0,0 @@ -.font-smooth,.icon:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:fontawesome-webfont;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg")}.icon:before{display:inline-block;font-family:fontawesome-webfont;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .icon{display:inline-block;text-decoration:inherit}li .icon{display:inline-block}li .icon-large:before,li .icon-large:before{width:1.875em}ul.icons{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.icons li .icon{width:0.8em}ul.icons li .icon-large:before,ul.icons li .icon-large:before{vertical-align:baseline}.icon-book:before{content:"\f02d"}.icon-caret-down:before{content:"\f0d7"}.icon-caret-up:before{content:"\f0d8"}.icon-caret-left:before{content:"\f0d9"}.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .icon{color:#fcfcfc}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}} diff --git a/docs/_themes/sphinx_rtd_theme/static/css/theme.css b/docs/_themes/sphinx_rtd_theme/static/css/theme.css deleted file mode 100755 index a37f8d8c..00000000 --- a/docs/_themes/sphinx_rtd_theme/static/css/theme.css +++ /dev/null @@ -1 +0,0 @@ -*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}audio:not([controls]){display:none}[hidden]{display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:hover,a:active{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:bold}blockquote{margin:0}dfn{font-style:italic}hr{display:block;height:1px;border:0;border-top:1px solid #ccc;margin:20px 0;padding:0}ins{background:#ff9;color:#000;text-decoration:none}mark{background:#ff0;color:#000;font-style:italic;font-weight:bold}pre,code,.rst-content tt,kbd,samp{font-family:monospace,serif;_font-family:"courier new",monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:before,q:after{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}ul,ol,dl{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure{margin:0}form{margin:0}fieldset{border:0;margin:0;padding:0}label{cursor:pointer}legend{border:0;*margin-left:-7px;padding:0;white-space:normal}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type="checkbox"],input[type="radio"]{box-sizing:border-box;padding:0;*width:13px;*height:13px}input[type="search"]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}input[type="search"]::-webkit-search-decoration,input[type="search"]::-webkit-search-cancel-button{-webkit-appearance:none}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}textarea{overflow:auto;vertical-align:top;resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:0.2em 0;background:#ccc;color:#000;padding:0.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none !important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{html,body,section{background:none !important}*{box-shadow:none !important;text-shadow:none !important;filter:none !important;-ms-filter:none !important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}@page{margin:0.5cm}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}}.font-smooth,.icon:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-tag-input-group .wy-tag .wy-tag-remove:before,.rst-content .admonition-title:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content dl dt .headerlink:before,.wy-alert,.rst-content .note,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .warning,.rst-content .seealso,.btn,input[type="text"],input[type="password"],input[type="email"],input[type="url"],input[type="date"],input[type="month"],input[type="time"],input[type="datetime"],input[type="datetime-local"],input[type="week"],input[type="number"],input[type="search"],input[type="tel"],input[type="color"],select,textarea,.wy-tag-input-group,.wy-menu-vertical li.on a,.wy-menu-vertical li.current>a,.wy-side-nav-search>a,.wy-side-nav-search .wy-dropdown>a,.wy-nav-top a{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:fontawesome-webfont;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg")}.icon:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-tag-input-group .wy-tag .wy-tag-remove:before,.rst-content .admonition-title:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content dl dt .headerlink:before{display:inline-block;font-family:fontawesome-webfont;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .icon,a .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success a .wy-input-context,a .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger a .wy-input-context,a .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning a .wy-input-context,a .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info a .wy-input-context,a .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag a .wy-tag-remove,a .rst-content .admonition-title,.rst-content a .admonition-title,a .rst-content h1 .headerlink,.rst-content h1 a .headerlink,a .rst-content h2 .headerlink,.rst-content h2 a .headerlink,a .rst-content h3 .headerlink,.rst-content h3 a .headerlink,a .rst-content h4 .headerlink,.rst-content h4 a .headerlink,a .rst-content h5 .headerlink,.rst-content h5 a .headerlink,a .rst-content h6 .headerlink,.rst-content h6 a .headerlink,a .rst-content dl dt .headerlink,.rst-content dl dt a .headerlink{display:inline-block;text-decoration:inherit}.icon-large:before{vertical-align:-10%;font-size:1.33333em}.btn .icon,.btn .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success .btn .wy-input-context,.btn .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger .btn .wy-input-context,.btn .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .btn .wy-input-context,.btn .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info .btn .wy-input-context,.btn .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag .btn .wy-tag-remove,.btn .rst-content .admonition-title,.rst-content .btn .admonition-title,.btn .rst-content h1 .headerlink,.rst-content h1 .btn .headerlink,.btn .rst-content h2 .headerlink,.rst-content h2 .btn .headerlink,.btn .rst-content h3 .headerlink,.rst-content h3 .btn .headerlink,.btn .rst-content h4 .headerlink,.rst-content h4 .btn .headerlink,.btn .rst-content h5 .headerlink,.rst-content h5 .btn .headerlink,.btn .rst-content h6 .headerlink,.rst-content h6 .btn .headerlink,.btn .rst-content dl dt .headerlink,.rst-content dl dt .btn .headerlink,.nav .icon,.nav .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success .nav .wy-input-context,.nav .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger .nav .wy-input-context,.nav .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .nav .wy-input-context,.nav .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info .nav .wy-input-context,.nav .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag .nav .wy-tag-remove,.nav .rst-content .admonition-title,.rst-content .nav .admonition-title,.nav .rst-content h1 .headerlink,.rst-content h1 .nav .headerlink,.nav .rst-content h2 .headerlink,.rst-content h2 .nav .headerlink,.nav .rst-content h3 .headerlink,.rst-content h3 .nav .headerlink,.nav .rst-content h4 .headerlink,.rst-content h4 .nav .headerlink,.nav .rst-content h5 .headerlink,.rst-content h5 .nav .headerlink,.nav .rst-content h6 .headerlink,.rst-content h6 .nav .headerlink,.nav .rst-content dl dt .headerlink,.rst-content dl dt .nav .headerlink{display:inline}.btn .icon.icon-large,.btn .wy-inline-validate.wy-inline-validate-success .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-success .btn .icon-large.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-danger .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-danger .btn .icon-large.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-warning .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-warning .btn .icon-large.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-info .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-info .btn .icon-large.wy-input-context,.btn .wy-tag-input-group .wy-tag .icon-large.wy-tag-remove,.wy-tag-input-group .wy-tag .btn .icon-large.wy-tag-remove,.btn .rst-content .icon-large.admonition-title,.rst-content .btn .icon-large.admonition-title,.btn .rst-content h1 .icon-large.headerlink,.rst-content h1 .btn .icon-large.headerlink,.btn .rst-content h2 .icon-large.headerlink,.rst-content h2 .btn .icon-large.headerlink,.btn .rst-content h3 .icon-large.headerlink,.rst-content h3 .btn .icon-large.headerlink,.btn .rst-content h4 .icon-large.headerlink,.rst-content h4 .btn .icon-large.headerlink,.btn .rst-content h5 .icon-large.headerlink,.rst-content h5 .btn .icon-large.headerlink,.btn .rst-content h6 .icon-large.headerlink,.rst-content h6 .btn .icon-large.headerlink,.btn .rst-content dl dt .icon-large.headerlink,.rst-content dl dt .btn .icon-large.headerlink,.nav .icon.icon-large,.nav .wy-inline-validate.wy-inline-validate-success .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-success .nav .icon-large.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-danger .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-danger .nav .icon-large.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-warning .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-warning .nav .icon-large.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-info .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-info .nav .icon-large.wy-input-context,.nav .wy-tag-input-group .wy-tag .icon-large.wy-tag-remove,.wy-tag-input-group .wy-tag .nav .icon-large.wy-tag-remove,.nav .rst-content .icon-large.admonition-title,.rst-content .nav .icon-large.admonition-title,.nav .rst-content h1 .icon-large.headerlink,.rst-content h1 .nav .icon-large.headerlink,.nav .rst-content h2 .icon-large.headerlink,.rst-content h2 .nav .icon-large.headerlink,.nav .rst-content h3 .icon-large.headerlink,.rst-content h3 .nav .icon-large.headerlink,.nav .rst-content h4 .icon-large.headerlink,.rst-content h4 .nav .icon-large.headerlink,.nav .rst-content h5 .icon-large.headerlink,.rst-content h5 .nav .icon-large.headerlink,.nav .rst-content h6 .icon-large.headerlink,.rst-content h6 .nav .icon-large.headerlink,.nav .rst-content dl dt .icon-large.headerlink,.rst-content dl dt .nav .icon-large.headerlink{line-height:0.9em}.btn .icon.icon-spin,.btn .wy-inline-validate.wy-inline-validate-success .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-success .btn .icon-spin.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-danger .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-danger .btn .icon-spin.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-warning .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-warning .btn .icon-spin.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-info .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-info .btn .icon-spin.wy-input-context,.btn .wy-tag-input-group .wy-tag .icon-spin.wy-tag-remove,.wy-tag-input-group .wy-tag .btn .icon-spin.wy-tag-remove,.btn .rst-content .icon-spin.admonition-title,.rst-content .btn .icon-spin.admonition-title,.btn .rst-content h1 .icon-spin.headerlink,.rst-content h1 .btn .icon-spin.headerlink,.btn .rst-content h2 .icon-spin.headerlink,.rst-content h2 .btn .icon-spin.headerlink,.btn .rst-content h3 .icon-spin.headerlink,.rst-content h3 .btn .icon-spin.headerlink,.btn .rst-content h4 .icon-spin.headerlink,.rst-content h4 .btn .icon-spin.headerlink,.btn .rst-content h5 .icon-spin.headerlink,.rst-content h5 .btn .icon-spin.headerlink,.btn .rst-content h6 .icon-spin.headerlink,.rst-content h6 .btn .icon-spin.headerlink,.btn .rst-content dl dt .icon-spin.headerlink,.rst-content dl dt .btn .icon-spin.headerlink,.nav .icon.icon-spin,.nav .wy-inline-validate.wy-inline-validate-success .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-success .nav .icon-spin.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-danger .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-danger .nav .icon-spin.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-warning .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-warning .nav .icon-spin.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-info .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-info .nav .icon-spin.wy-input-context,.nav .wy-tag-input-group .wy-tag .icon-spin.wy-tag-remove,.wy-tag-input-group .wy-tag .nav .icon-spin.wy-tag-remove,.nav .rst-content .icon-spin.admonition-title,.rst-content .nav .icon-spin.admonition-title,.nav .rst-content h1 .icon-spin.headerlink,.rst-content h1 .nav .icon-spin.headerlink,.nav .rst-content h2 .icon-spin.headerlink,.rst-content h2 .nav .icon-spin.headerlink,.nav .rst-content h3 .icon-spin.headerlink,.rst-content h3 .nav .icon-spin.headerlink,.nav .rst-content h4 .icon-spin.headerlink,.rst-content h4 .nav .icon-spin.headerlink,.nav .rst-content h5 .icon-spin.headerlink,.rst-content h5 .nav .icon-spin.headerlink,.nav .rst-content h6 .icon-spin.headerlink,.rst-content h6 .nav .icon-spin.headerlink,.nav .rst-content dl dt .icon-spin.headerlink,.rst-content dl dt .nav .icon-spin.headerlink{display:inline-block}.btn.icon:before,.wy-inline-validate.wy-inline-validate-success .btn.wy-input-context:before,.wy-inline-validate.wy-inline-validate-danger .btn.wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .btn.wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .btn.wy-input-context:before,.wy-tag-input-group .wy-tag .btn.wy-tag-remove:before,.rst-content .btn.admonition-title:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content dl dt .btn.headerlink:before{opacity:0.5;-webkit-transition:opacity 0.05s ease-in;-moz-transition:opacity 0.05s ease-in;transition:opacity 0.05s ease-in}.btn.icon:hover:before,.wy-inline-validate.wy-inline-validate-success .btn.wy-input-context:hover:before,.wy-inline-validate.wy-inline-validate-danger .btn.wy-input-context:hover:before,.wy-inline-validate.wy-inline-validate-warning .btn.wy-input-context:hover:before,.wy-inline-validate.wy-inline-validate-info .btn.wy-input-context:hover:before,.wy-tag-input-group .wy-tag .btn.wy-tag-remove:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content dl dt .btn.headerlink:hover:before{opacity:1}.btn-mini .icon:before,.btn-mini .wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .btn-mini .wy-input-context:before,.btn-mini .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-danger .btn-mini .wy-input-context:before,.btn-mini .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .btn-mini .wy-input-context:before,.btn-mini .wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .btn-mini .wy-input-context:before,.btn-mini .wy-tag-input-group .wy-tag .wy-tag-remove:before,.wy-tag-input-group .wy-tag .btn-mini .wy-tag-remove:before,.btn-mini .rst-content .admonition-title:before,.rst-content .btn-mini .admonition-title:before,.btn-mini .rst-content h1 .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.btn-mini .rst-content dl dt .headerlink:before,.rst-content dl dt .btn-mini .headerlink:before{font-size:14px;vertical-align:-15%}li .icon,li .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success li .wy-input-context,li .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger li .wy-input-context,li .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning li .wy-input-context,li .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info li .wy-input-context,li .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag li .wy-tag-remove,li .rst-content .admonition-title,.rst-content li .admonition-title,li .rst-content h1 .headerlink,.rst-content h1 li .headerlink,li .rst-content h2 .headerlink,.rst-content h2 li .headerlink,li .rst-content h3 .headerlink,.rst-content h3 li .headerlink,li .rst-content h4 .headerlink,.rst-content h4 li .headerlink,li .rst-content h5 .headerlink,.rst-content h5 li .headerlink,li .rst-content h6 .headerlink,.rst-content h6 li .headerlink,li .rst-content dl dt .headerlink,.rst-content dl dt li .headerlink{display:inline-block}li .icon-large:before,li .icon-large:before{width:1.875em}ul.icons{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.icons li .icon,ul.icons li .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success ul.icons li .wy-input-context,ul.icons li .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger ul.icons li .wy-input-context,ul.icons li .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning ul.icons li .wy-input-context,ul.icons li .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info ul.icons li .wy-input-context,ul.icons li .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag ul.icons li .wy-tag-remove,ul.icons li .rst-content .admonition-title,.rst-content ul.icons li .admonition-title,ul.icons li .rst-content h1 .headerlink,.rst-content h1 ul.icons li .headerlink,ul.icons li .rst-content h2 .headerlink,.rst-content h2 ul.icons li .headerlink,ul.icons li .rst-content h3 .headerlink,.rst-content h3 ul.icons li .headerlink,ul.icons li .rst-content h4 .headerlink,.rst-content h4 ul.icons li .headerlink,ul.icons li .rst-content h5 .headerlink,.rst-content h5 ul.icons li .headerlink,ul.icons li .rst-content h6 .headerlink,.rst-content h6 ul.icons li .headerlink,ul.icons li .rst-content dl dt .headerlink,.rst-content dl dt ul.icons li .headerlink{width:0.8em}ul.icons li .icon-large:before,ul.icons li .icon-large:before{vertical-align:baseline}.icon-glass:before{content:"\f000"}.icon-music:before{content:"\f001"}.icon-search:before{content:"\f002"}.icon-envelope-alt:before{content:"\f003"}.icon-heart:before{content:"\f004"}.icon-star:before{content:"\f005"}.icon-star-empty:before{content:"\f006"}.icon-user:before{content:"\f007"}.icon-film:before{content:"\f008"}.icon-th-large:before{content:"\f009"}.icon-th:before{content:"\f00a"}.icon-th-list:before{content:"\f00b"}.icon-ok:before{content:"\f00c"}.icon-remove:before,.wy-tag-input-group .wy-tag .wy-tag-remove:before{content:"\f00d"}.icon-zoom-in:before{content:"\f00e"}.icon-zoom-out:before{content:"\f010"}.icon-power-off:before,.icon-off:before{content:"\f011"}.icon-signal:before{content:"\f012"}.icon-gear:before,.icon-cog:before{content:"\f013"}.icon-trash:before{content:"\f014"}.icon-home:before{content:"\f015"}.icon-file-alt:before{content:"\f016"}.icon-time:before{content:"\f017"}.icon-road:before{content:"\f018"}.icon-download-alt:before{content:"\f019"}.icon-download:before{content:"\f01a"}.icon-upload:before{content:"\f01b"}.icon-inbox:before{content:"\f01c"}.icon-play-circle:before{content:"\f01d"}.icon-rotate-right:before,.icon-repeat:before{content:"\f01e"}.icon-refresh:before{content:"\f021"}.icon-list-alt:before{content:"\f022"}.icon-lock:before{content:"\f023"}.icon-flag:before{content:"\f024"}.icon-headphones:before{content:"\f025"}.icon-volume-off:before{content:"\f026"}.icon-volume-down:before{content:"\f027"}.icon-volume-up:before{content:"\f028"}.icon-qrcode:before{content:"\f029"}.icon-barcode:before{content:"\f02a"}.icon-tag:before{content:"\f02b"}.icon-tags:before{content:"\f02c"}.icon-book:before{content:"\f02d"}.icon-bookmark:before{content:"\f02e"}.icon-print:before{content:"\f02f"}.icon-camera:before{content:"\f030"}.icon-font:before{content:"\f031"}.icon-bold:before{content:"\f032"}.icon-italic:before{content:"\f033"}.icon-text-height:before{content:"\f034"}.icon-text-width:before{content:"\f035"}.icon-align-left:before{content:"\f036"}.icon-align-center:before{content:"\f037"}.icon-align-right:before{content:"\f038"}.icon-align-justify:before{content:"\f039"}.icon-list:before{content:"\f03a"}.icon-indent-left:before{content:"\f03b"}.icon-indent-right:before{content:"\f03c"}.icon-facetime-video:before{content:"\f03d"}.icon-picture:before{content:"\f03e"}.icon-pencil:before{content:"\f040"}.icon-map-marker:before{content:"\f041"}.icon-adjust:before{content:"\f042"}.icon-tint:before{content:"\f043"}.icon-edit:before{content:"\f044"}.icon-share:before{content:"\f045"}.icon-check:before{content:"\f046"}.icon-move:before{content:"\f047"}.icon-step-backward:before{content:"\f048"}.icon-fast-backward:before{content:"\f049"}.icon-backward:before{content:"\f04a"}.icon-play:before{content:"\f04b"}.icon-pause:before{content:"\f04c"}.icon-stop:before{content:"\f04d"}.icon-forward:before{content:"\f04e"}.icon-fast-forward:before{content:"\f050"}.icon-step-forward:before{content:"\f051"}.icon-eject:before{content:"\f052"}.icon-chevron-left:before{content:"\f053"}.icon-chevron-right:before{content:"\f054"}.icon-plus-sign:before{content:"\f055"}.icon-minus-sign:before{content:"\f056"}.icon-remove-sign:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:"\f057"}.icon-ok-sign:before{content:"\f058"}.icon-question-sign:before{content:"\f059"}.icon-info-sign:before{content:"\f05a"}.icon-screenshot:before{content:"\f05b"}.icon-remove-circle:before{content:"\f05c"}.icon-ok-circle:before{content:"\f05d"}.icon-ban-circle:before{content:"\f05e"}.icon-arrow-left:before{content:"\f060"}.icon-arrow-right:before{content:"\f061"}.icon-arrow-up:before{content:"\f062"}.icon-arrow-down:before{content:"\f063"}.icon-mail-forward:before,.icon-share-alt:before{content:"\f064"}.icon-resize-full:before{content:"\f065"}.icon-resize-small:before{content:"\f066"}.icon-plus:before{content:"\f067"}.icon-minus:before{content:"\f068"}.icon-asterisk:before{content:"\f069"}.icon-exclamation-sign:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.rst-content .admonition-title:before{content:"\f06a"}.icon-gift:before{content:"\f06b"}.icon-leaf:before{content:"\f06c"}.icon-fire:before{content:"\f06d"}.icon-eye-open:before{content:"\f06e"}.icon-eye-close:before{content:"\f070"}.icon-warning-sign:before{content:"\f071"}.icon-plane:before{content:"\f072"}.icon-calendar:before{content:"\f073"}.icon-random:before{content:"\f074"}.icon-comment:before{content:"\f075"}.icon-magnet:before{content:"\f076"}.icon-chevron-up:before{content:"\f077"}.icon-chevron-down:before{content:"\f078"}.icon-retweet:before{content:"\f079"}.icon-shopping-cart:before{content:"\f07a"}.icon-folder-close:before{content:"\f07b"}.icon-folder-open:before{content:"\f07c"}.icon-resize-vertical:before{content:"\f07d"}.icon-resize-horizontal:before{content:"\f07e"}.icon-bar-chart:before{content:"\f080"}.icon-twitter-sign:before{content:"\f081"}.icon-facebook-sign:before{content:"\f082"}.icon-camera-retro:before{content:"\f083"}.icon-key:before{content:"\f084"}.icon-gears:before,.icon-cogs:before{content:"\f085"}.icon-comments:before{content:"\f086"}.icon-thumbs-up-alt:before{content:"\f087"}.icon-thumbs-down-alt:before{content:"\f088"}.icon-star-half:before{content:"\f089"}.icon-heart-empty:before{content:"\f08a"}.icon-signout:before{content:"\f08b"}.icon-linkedin-sign:before{content:"\f08c"}.icon-pushpin:before{content:"\f08d"}.icon-external-link:before{content:"\f08e"}.icon-signin:before{content:"\f090"}.icon-trophy:before{content:"\f091"}.icon-github-sign:before{content:"\f092"}.icon-upload-alt:before{content:"\f093"}.icon-lemon:before{content:"\f094"}.icon-phone:before{content:"\f095"}.icon-unchecked:before,.icon-check-empty:before{content:"\f096"}.icon-bookmark-empty:before{content:"\f097"}.icon-phone-sign:before{content:"\f098"}.icon-twitter:before{content:"\f099"}.icon-facebook:before{content:"\f09a"}.icon-github:before{content:"\f09b"}.icon-unlock:before{content:"\f09c"}.icon-credit-card:before{content:"\f09d"}.icon-rss:before{content:"\f09e"}.icon-hdd:before{content:"\f0a0"}.icon-bullhorn:before{content:"\f0a1"}.icon-bell:before{content:"\f0a2"}.icon-certificate:before{content:"\f0a3"}.icon-hand-right:before{content:"\f0a4"}.icon-hand-left:before{content:"\f0a5"}.icon-hand-up:before{content:"\f0a6"}.icon-hand-down:before{content:"\f0a7"}.icon-circle-arrow-left:before{content:"\f0a8"}.icon-circle-arrow-right:before{content:"\f0a9"}.icon-circle-arrow-up:before{content:"\f0aa"}.icon-circle-arrow-down:before{content:"\f0ab"}.icon-globe:before{content:"\f0ac"}.icon-wrench:before{content:"\f0ad"}.icon-tasks:before{content:"\f0ae"}.icon-filter:before{content:"\f0b0"}.icon-briefcase:before{content:"\f0b1"}.icon-fullscreen:before{content:"\f0b2"}.icon-group:before{content:"\f0c0"}.icon-link:before{content:"\f0c1"}.icon-cloud:before{content:"\f0c2"}.icon-beaker:before{content:"\f0c3"}.icon-cut:before{content:"\f0c4"}.icon-copy:before{content:"\f0c5"}.icon-paperclip:before,.icon-paper-clip:before{content:"\f0c6"}.icon-save:before{content:"\f0c7"}.icon-sign-blank:before{content:"\f0c8"}.icon-reorder:before{content:"\f0c9"}.icon-list-ul:before{content:"\f0ca"}.icon-list-ol:before{content:"\f0cb"}.icon-strikethrough:before{content:"\f0cc"}.icon-underline:before{content:"\f0cd"}.icon-table:before{content:"\f0ce"}.icon-magic:before{content:"\f0d0"}.icon-truck:before{content:"\f0d1"}.icon-pinterest:before{content:"\f0d2"}.icon-pinterest-sign:before{content:"\f0d3"}.icon-google-plus-sign:before{content:"\f0d4"}.icon-google-plus:before{content:"\f0d5"}.icon-money:before{content:"\f0d6"}.icon-caret-down:before{content:"\f0d7"}.icon-caret-up:before{content:"\f0d8"}.icon-caret-left:before{content:"\f0d9"}.icon-caret-right:before{content:"\f0da"}.icon-columns:before{content:"\f0db"}.icon-sort:before{content:"\f0dc"}.icon-sort-down:before{content:"\f0dd"}.icon-sort-up:before{content:"\f0de"}.icon-envelope:before{content:"\f0e0"}.icon-linkedin:before{content:"\f0e1"}.icon-rotate-left:before,.icon-undo:before{content:"\f0e2"}.icon-legal:before{content:"\f0e3"}.icon-dashboard:before{content:"\f0e4"}.icon-comment-alt:before{content:"\f0e5"}.icon-comments-alt:before{content:"\f0e6"}.icon-bolt:before{content:"\f0e7"}.icon-sitemap:before{content:"\f0e8"}.icon-umbrella:before{content:"\f0e9"}.icon-paste:before{content:"\f0ea"}.icon-lightbulb:before{content:"\f0eb"}.icon-exchange:before{content:"\f0ec"}.icon-cloud-download:before{content:"\f0ed"}.icon-cloud-upload:before{content:"\f0ee"}.icon-user-md:before{content:"\f0f0"}.icon-stethoscope:before{content:"\f0f1"}.icon-suitcase:before{content:"\f0f2"}.icon-bell-alt:before{content:"\f0f3"}.icon-coffee:before{content:"\f0f4"}.icon-food:before{content:"\f0f5"}.icon-file-text-alt:before{content:"\f0f6"}.icon-building:before{content:"\f0f7"}.icon-hospital:before{content:"\f0f8"}.icon-ambulance:before{content:"\f0f9"}.icon-medkit:before{content:"\f0fa"}.icon-fighter-jet:before{content:"\f0fb"}.icon-beer:before{content:"\f0fc"}.icon-h-sign:before{content:"\f0fd"}.icon-plus-sign-alt:before{content:"\f0fe"}.icon-double-angle-left:before{content:"\f100"}.icon-double-angle-right:before{content:"\f101"}.icon-double-angle-up:before{content:"\f102"}.icon-double-angle-down:before{content:"\f103"}.icon-angle-left:before{content:"\f104"}.icon-angle-right:before{content:"\f105"}.icon-angle-up:before{content:"\f106"}.icon-angle-down:before{content:"\f107"}.icon-desktop:before{content:"\f108"}.icon-laptop:before{content:"\f109"}.icon-tablet:before{content:"\f10a"}.icon-mobile-phone:before{content:"\f10b"}.icon-circle-blank:before{content:"\f10c"}.icon-quote-left:before{content:"\f10d"}.icon-quote-right:before{content:"\f10e"}.icon-spinner:before{content:"\f110"}.icon-circle:before{content:"\f111"}.icon-mail-reply:before,.icon-reply:before{content:"\f112"}.icon-github-alt:before{content:"\f113"}.icon-folder-close-alt:before{content:"\f114"}.icon-folder-open-alt:before{content:"\f115"}.icon-expand-alt:before{content:"\f116"}.icon-collapse-alt:before{content:"\f117"}.icon-smile:before{content:"\f118"}.icon-frown:before{content:"\f119"}.icon-meh:before{content:"\f11a"}.icon-gamepad:before{content:"\f11b"}.icon-keyboard:before{content:"\f11c"}.icon-flag-alt:before{content:"\f11d"}.icon-flag-checkered:before{content:"\f11e"}.icon-terminal:before{content:"\f120"}.icon-code:before{content:"\f121"}.icon-reply-all:before{content:"\f122"}.icon-mail-reply-all:before{content:"\f122"}.icon-star-half-full:before,.icon-star-half-empty:before{content:"\f123"}.icon-location-arrow:before{content:"\f124"}.icon-crop:before{content:"\f125"}.icon-code-fork:before{content:"\f126"}.icon-unlink:before{content:"\f127"}.icon-question:before{content:"\f128"}.icon-info:before{content:"\f129"}.icon-exclamation:before{content:"\f12a"}.icon-superscript:before{content:"\f12b"}.icon-subscript:before{content:"\f12c"}.icon-eraser:before{content:"\f12d"}.icon-puzzle-piece:before{content:"\f12e"}.icon-microphone:before{content:"\f130"}.icon-microphone-off:before{content:"\f131"}.icon-shield:before{content:"\f132"}.icon-calendar-empty:before{content:"\f133"}.icon-fire-extinguisher:before{content:"\f134"}.icon-rocket:before{content:"\f135"}.icon-maxcdn:before{content:"\f136"}.icon-chevron-sign-left:before{content:"\f137"}.icon-chevron-sign-right:before{content:"\f138"}.icon-chevron-sign-up:before{content:"\f139"}.icon-chevron-sign-down:before{content:"\f13a"}.icon-html5:before{content:"\f13b"}.icon-css3:before{content:"\f13c"}.icon-anchor:before{content:"\f13d"}.icon-unlock-alt:before{content:"\f13e"}.icon-bullseye:before{content:"\f140"}.icon-ellipsis-horizontal:before{content:"\f141"}.icon-ellipsis-vertical:before{content:"\f142"}.icon-rss-sign:before{content:"\f143"}.icon-play-sign:before{content:"\f144"}.icon-ticket:before{content:"\f145"}.icon-minus-sign-alt:before{content:"\f146"}.icon-check-minus:before{content:"\f147"}.icon-level-up:before{content:"\f148"}.icon-level-down:before{content:"\f149"}.icon-check-sign:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:"\f14a"}.icon-edit-sign:before{content:"\f14b"}.icon-external-link-sign:before{content:"\f14c"}.icon-share-sign:before{content:"\f14d"}.icon-compass:before{content:"\f14e"}.icon-collapse:before{content:"\f150"}.icon-collapse-top:before{content:"\f151"}.icon-expand:before{content:"\f152"}.icon-euro:before,.icon-eur:before{content:"\f153"}.icon-gbp:before{content:"\f154"}.icon-dollar:before,.icon-usd:before{content:"\f155"}.icon-rupee:before,.icon-inr:before{content:"\f156"}.icon-yen:before,.icon-jpy:before{content:"\f157"}.icon-renminbi:before,.icon-cny:before{content:"\f158"}.icon-won:before,.icon-krw:before{content:"\f159"}.icon-bitcoin:before,.icon-btc:before{content:"\f15a"}.icon-file:before{content:"\f15b"}.icon-file-text:before{content:"\f15c"}.icon-sort-by-alphabet:before{content:"\f15d"}.icon-sort-by-alphabet-alt:before{content:"\f15e"}.icon-sort-by-attributes:before{content:"\f160"}.icon-sort-by-attributes-alt:before{content:"\f161"}.icon-sort-by-order:before{content:"\f162"}.icon-sort-by-order-alt:before{content:"\f163"}.icon-thumbs-up:before{content:"\f164"}.icon-thumbs-down:before{content:"\f165"}.icon-youtube-sign:before{content:"\f166"}.icon-youtube:before{content:"\f167"}.icon-xing:before{content:"\f168"}.icon-xing-sign:before{content:"\f169"}.icon-youtube-play:before{content:"\f16a"}.icon-dropbox:before{content:"\f16b"}.icon-stackexchange:before{content:"\f16c"}.icon-instagram:before{content:"\f16d"}.icon-flickr:before{content:"\f16e"}.icon-adn:before{content:"\f170"}.icon-bitbucket:before{content:"\f171"}.icon-bitbucket-sign:before{content:"\f172"}.icon-tumblr:before{content:"\f173"}.icon-tumblr-sign:before{content:"\f174"}.icon-long-arrow-down:before{content:"\f175"}.icon-long-arrow-up:before{content:"\f176"}.icon-long-arrow-left:before{content:"\f177"}.icon-long-arrow-right:before{content:"\f178"}.icon-apple:before{content:"\f179"}.icon-windows:before{content:"\f17a"}.icon-android:before{content:"\f17b"}.icon-linux:before{content:"\f17c"}.icon-dribbble:before{content:"\f17d"}.icon-skype:before{content:"\f17e"}.icon-foursquare:before{content:"\f180"}.icon-trello:before{content:"\f181"}.icon-female:before{content:"\f182"}.icon-male:before{content:"\f183"}.icon-gittip:before{content:"\f184"}.icon-sun:before{content:"\f185"}.icon-moon:before{content:"\f186"}.icon-archive:before{content:"\f187"}.icon-bug:before{content:"\f188"}.icon-vk:before{content:"\f189"}.icon-weibo:before{content:"\f18a"}.icon-renren:before{content:"\f18b"}.wy-alert,.rst-content .note,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .warning,.rst-content .seealso{padding:12px;line-height:24px;margin-bottom:24px}.wy-alert-title,.rst-content .admonition-title{color:#fff;font-weight:bold;display:block;color:#fff;background:transparent;margin:-12px;padding:6px 12px;margin-bottom:12px}.wy-alert.wy-alert-danger,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.rst-content .wy-alert-danger.seealso{background:#fdf3f2}.wy-alert.wy-alert-danger .wy-alert-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .danger .wy-alert-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .danger .admonition-title,.rst-content .error .admonition-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.seealso .admonition-title{background:#f29f97}.wy-alert.wy-alert-warning,.rst-content .wy-alert-warning.note,.rst-content .attention,.rst-content .caution,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.tip,.rst-content .warning,.rst-content .wy-alert-warning.seealso{background:#ffedcc}.wy-alert.wy-alert-warning .wy-alert-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .attention .wy-alert-title,.rst-content .caution .wy-alert-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .attention .admonition-title,.rst-content .caution .admonition-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .warning .admonition-title,.rst-content .wy-alert-warning.seealso .admonition-title{background:#f0b37e}.wy-alert.wy-alert-info,.rst-content .note,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.rst-content .seealso{background:#e7f2fa}.wy-alert.wy-alert-info .wy-alert-title,.rst-content .note .wy-alert-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .seealso .wy-alert-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.rst-content .note .admonition-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .seealso .admonition-title{background:#6ab0de}.wy-alert.wy-alert-success,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.warning,.rst-content .wy-alert-success.seealso{background:#dbfaf4}.wy-alert.wy-alert-success .wy-alert-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .hint .wy-alert-title,.rst-content .important .wy-alert-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .hint .admonition-title,.rst-content .important .admonition-title,.rst-content .tip .admonition-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.seealso .admonition-title{background:#1abc9c}.wy-alert.wy-alert-neutral,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.rst-content .wy-alert-neutral.seealso{background:#f3f6f6}.wy-alert.wy-alert-neutral strong,.rst-content .wy-alert-neutral.note strong,.rst-content .wy-alert-neutral.attention strong,.rst-content .wy-alert-neutral.caution strong,.rst-content .wy-alert-neutral.danger strong,.rst-content .wy-alert-neutral.error strong,.rst-content .wy-alert-neutral.hint strong,.rst-content .wy-alert-neutral.important strong,.rst-content .wy-alert-neutral.tip strong,.rst-content .wy-alert-neutral.warning strong,.rst-content .wy-alert-neutral.seealso strong{color:#404040}.wy-alert.wy-alert-neutral a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.rst-content .wy-alert-neutral.seealso a{color:#2980b9}.wy-tray-container{position:fixed;top:-50px;left:0;width:100%;-webkit-transition:top 0.2s ease-in;-moz-transition:top 0.2s ease-in;transition:top 0.2s ease-in}.wy-tray-container.on{top:0}.wy-tray-container li{display:none;width:100%;background:#343131;padding:12px 24px;color:#fff;margin-bottom:6px;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,0.1),0px -1px 2px -1px rgba(255,255,255,0.5) inset}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.btn{display:inline-block;*display:inline;zoom:1;line-height:normal;white-space:nowrap;vertical-align:baseline;text-align:center;cursor:pointer;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;font-size:100%;padding:6px 12px;color:#fff;border:1px solid rgba(0,0,0,0.1);border-bottom:solid 3px rgba(0,0,0,0.1);background-color:#27ae60;text-decoration:none;font-weight:500;box-shadow:0px 1px 2px -1px rgba(255,255,255,0.5) inset;-webkit-transition:all 0.1s linear;-moz-transition:all 0.1s linear;transition:all 0.1s linear;outline-none:false}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;color:#fff;outline:0}.btn:active{border-top:solid 3px rgba(0,0,0,0.1);border-bottom:solid 1px rgba(0,0,0,0.1);box-shadow:0px 1px 2px -1px rgba(0,0,0,0.5) inset}.btn[disabled]{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:0.4;cursor:not-allowed;box-shadow:none}.btn-disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:0.4;cursor:not-allowed;box-shadow:none}.btn-disabled:hover,.btn-disabled:focus,.btn-disabled:active{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:0.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9 !important}.btn-info:hover{background-color:#2e8ece !important}.btn-neutral{background-color:#f3f6f6 !important;color:#404040 !important}.btn-neutral:hover{background-color:#e5ebeb !important;color:#404040}.btn-danger{background-color:#e74c3c !important}.btn-danger:hover{background-color:#ea6153 !important}.btn-warning{background-color:#e67e22 !important}.btn-warning:hover{background-color:#e98b39 !important}.btn-invert{background-color:#343131}.btn-invert:hover{background-color:#413d3d !important}.btn-link{background-color:transparent !important;color:#2980b9;border-color:transparent}.btn-link:hover{background-color:transparent !important;color:#409ad5;border-color:transparent}.btn-link:active{background-color:transparent !important;border-color:transparent;border-top:solid 1px transparent;border-bottom:solid 3px transparent}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:before,.wy-btn-group:after{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown:hover .wy-dropdown-menu{display:block}.wy-dropdown .caret:after{font-family:fontawesome-webfont;content:"\f0d7";font-size:70%}.wy-dropdown-menu{position:absolute;top:100%;left:0;display:none;float:left;min-width:100%;background:#fcfcfc;z-index:100;border:solid 1px #cfd7dd;box-shadow:0 5px 5px 0 rgba(0,0,0,0.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:solid 1px #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type="search"]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned input,.wy-form-aligned textarea,.wy-form-aligned select,.wy-form-aligned .wy-help-inline,.wy-form-aligned label{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:0.5em 1em 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:0.5em}fieldset{border:0;margin:0;padding:0}legend{display:block;width:100%;border:0;padding:0;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label{display:block;margin:0 0 0.3125em 0;color:#999;font-size:90%}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button{-webkit-appearance:button;cursor:pointer;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}input[type="button"],input[type="reset"],input[type="submit"]{-webkit-appearance:button;cursor:pointer;*overflow:visible}input[type="text"],input[type="password"],input[type="email"],input[type="url"],input[type="date"],input[type="month"],input[type="time"],input[type="datetime"],input[type="datetime-local"],input[type="week"],input[type="number"],input[type="search"],input[type="tel"],input[type="color"]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border 0.3s linear;-moz-transition:border 0.3s linear;transition:border 0.3s linear}input[type="datetime-local"]{padding:0.34375em 0.625em}input[disabled]{cursor:default}input[type="checkbox"],input[type="radio"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0;margin-right:0.3125em;*height:13px;*width:13px}input[type="search"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}input[type="text"]:focus,input[type="password"]:focus,input[type="email"]:focus,input[type="url"]:focus,input[type="date"]:focus,input[type="month"]:focus,input[type="time"]:focus,input[type="datetime"]:focus,input[type="datetime-local"]:focus,input[type="week"]:focus,input[type="number"]:focus,input[type="search"]:focus,input[type="tel"]:focus,input[type="color"]:focus{outline:0;outline:thin dotted \9;border-color:#2980b9}input.no-focus:focus{border-color:#ccc !important}input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type="text"][disabled],input[type="password"][disabled],input[type="email"][disabled],input[type="url"][disabled],input[type="date"][disabled],input[type="month"][disabled],input[type="time"][disabled],input[type="datetime"][disabled],input[type="datetime-local"][disabled],input[type="week"][disabled],input[type="number"][disabled],input[type="search"][disabled],input[type="tel"][disabled],input[type="color"][disabled]{cursor:not-allowed;background-color:#f3f6f6;color:#cad2d3}input:focus:invalid,textarea:focus:invalid,select:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,textarea:focus:invalid:focus,select:focus:invalid:focus{border-color:#e9322d}input[type="file"]:focus:invalid:focus,input[type="radio"]:focus:invalid:focus,input[type="checkbox"]:focus:invalid:focus{outline-color:#e9322d}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%}select,textarea{padding:0.5em 0.625em;display:inline-block;border:1px solid #ccc;font-size:0.8em;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border 0.3s linear;-moz-transition:border 0.3s linear;transition:border 0.3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}select[disabled],textarea[disabled],input[readonly],select[readonly],textarea[readonly]{cursor:not-allowed;background-color:#fff;color:#cad2d3;border-color:transparent}.wy-checkbox,.wy-radio{margin:0.5em 0;color:#404040 !important;display:block}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{padding:6px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:solid 1px #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:0.5em 0.625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.wy-control-group{margin-bottom:24px;*zoom:1}.wy-control-group:before,.wy-control-group:after{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type="text"],.wy-control-group.wy-control-group-error input[type="password"],.wy-control-group.wy-control-group-error input[type="email"],.wy-control-group.wy-control-group-error input[type="url"],.wy-control-group.wy-control-group-error input[type="date"],.wy-control-group.wy-control-group-error input[type="month"],.wy-control-group.wy-control-group-error input[type="time"],.wy-control-group.wy-control-group-error input[type="datetime"],.wy-control-group.wy-control-group-error input[type="datetime-local"],.wy-control-group.wy-control-group-error input[type="week"],.wy-control-group.wy-control-group-error input[type="number"],.wy-control-group.wy-control-group-error input[type="search"],.wy-control-group.wy-control-group-error input[type="tel"],.wy-control-group.wy-control-group-error input[type="color"]{border:solid 2px #e74c3c}.wy-control-group.wy-control-group-error textarea{border:solid 2px #e74c3c}.wy-control-group.fluid-input input[type="text"],.wy-control-group.fluid-input input[type="password"],.wy-control-group.fluid-input input[type="email"],.wy-control-group.fluid-input input[type="url"],.wy-control-group.fluid-input input[type="date"],.wy-control-group.fluid-input input[type="month"],.wy-control-group.fluid-input input[type="time"],.wy-control-group.fluid-input input[type="datetime"],.wy-control-group.fluid-input input[type="datetime-local"],.wy-control-group.fluid-input input[type="week"],.wy-control-group.fluid-input input[type="number"],.wy-control-group.fluid-input input[type="search"],.wy-control-group.fluid-input input[type="tel"],.wy-control-group.fluid-input input[type="color"]{width:100%}.wy-form-message-inline{display:inline-block;padding-left:0.3em;color:#666;vertical-align:middle;font-size:90%}.wy-form-message{display:block;color:#ccc;font-size:70%;margin-top:0.3125em;font-style:italic}.wy-tag-input-group{padding:4px 4px 0px 4px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border 0.3s linear;-moz-transition:border 0.3s linear;transition:border 0.3s linear}.wy-tag-input-group .wy-tag{display:inline-block;background-color:rgba(0,0,0,0.1);padding:0.5em 0.625em;border-radius:2px;position:relative;margin-bottom:4px}.wy-tag-input-group .wy-tag .wy-tag-remove{color:#ccc;margin-left:5px}.wy-tag-input-group .wy-tag .wy-tag-remove:hover{color:#e74c3c}.wy-tag-input-group label{margin-left:5px;display:inline-block;margin-bottom:0}.wy-tag-input-group input{border:none;font-size:100%;margin-bottom:4px;box-shadow:none}.wy-form-upload{border:solid 1px #ccc;border-bottom:solid 3px #ccc;background-color:#fff;padding:24px;display:inline-block;text-align:center;cursor:pointer;color:#404040;-webkit-transition:border-color 0.1s ease-in;-moz-transition:border-color 0.1s ease-in;transition:border-color 0.1s ease-in;*zoom:1}.wy-form-upload:before,.wy-form-upload:after{display:table;content:""}.wy-form-upload:after{clear:both}@media screen and (max-width: 480px){.wy-form-upload{width:100%}}.wy-form-upload .image-drop{display:none}.wy-form-upload .image-desktop{display:none}.wy-form-upload .image-loading{display:none}.wy-form-upload .wy-form-upload-icon{display:block;font-size:32px;color:#b3b3b3}.wy-form-upload .image-drop .wy-form-upload-icon{color:#27ae60}.wy-form-upload p{font-size:90%}.wy-form-upload .wy-form-upload-image{float:left;margin-right:24px}@media screen and (max-width: 480px){.wy-form-upload .wy-form-upload-image{width:100%;margin-bottom:24px}}.wy-form-upload img{max-width:125px;max-height:125px;opacity:0.9;-webkit-transition:opacity 0.1s ease-in;-moz-transition:opacity 0.1s ease-in;transition:opacity 0.1s ease-in}.wy-form-upload .wy-form-upload-content{float:left}@media screen and (max-width: 480px){.wy-form-upload .wy-form-upload-content{width:100%}}.wy-form-upload:hover{border-color:#b3b3b3;color:#404040}.wy-form-upload:hover .image-desktop{display:block}.wy-form-upload:hover .image-drag{display:none}.wy-form-upload:hover img{opacity:1}.wy-form-upload:active{border-top:solid 3px #ccc;border-bottom:solid 1px #ccc}.wy-form-upload.wy-form-upload-big{width:100%;text-align:center;padding:72px}.wy-form-upload.wy-form-upload-big .wy-form-upload-content{float:none}.wy-form-upload.wy-form-upload-file p{margin-bottom:0}.wy-form-upload.wy-form-upload-file .wy-form-upload-icon{display:inline-block;font-size:inherit}.wy-form-upload.wy-form-upload-drop{background-color:#ddf7e8}.wy-form-upload.wy-form-upload-drop .image-drop{display:block}.wy-form-upload.wy-form-upload-drop .image-desktop{display:none}.wy-form-upload.wy-form-upload-drop .image-drag{display:none}.wy-form-upload.wy-form-upload-loading .image-drag{display:none}.wy-form-upload.wy-form-upload-loading .image-desktop{display:none}.wy-form-upload.wy-form-upload-loading .image-loading{display:block}.wy-form-upload.wy-form-upload-loading .wy-input-prefix{display:none}.wy-form-upload.wy-form-upload-loading p{margin-bottom:0}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}.wy-form-gallery-manage{margin-left:-12px;margin-right:-12px}.wy-form-gallery-manage li{float:left;padding:12px;width:20%;cursor:pointer}@media screen and (max-width: 768px){.wy-form-gallery-manage li{width:25%}}@media screen and (max-width: 480px){.wy-form-gallery-manage li{width:50%}}.wy-form-gallery-manage li:active{cursor:move}.wy-form-gallery-manage li>a{padding:12px;background-color:#fff;border:solid 1px #e1e4e5;border-bottom:solid 3px #e1e4e5;display:inline-block;-webkit-transition:all 0.1s ease-in;-moz-transition:all 0.1s ease-in;transition:all 0.1s ease-in}.wy-form-gallery-manage li>a:active{border:solid 1px #ccc;border-top:solid 3px #ccc}.wy-form-gallery-manage img{width:100%;-webkit-transition:all 0.05s ease-in;-moz-transition:all 0.05s ease-in;transition:all 0.05s ease-in}li.wy-form-gallery-edit{position:relative;color:#fff;padding:24px;width:100%;display:block;background-color:#343131;border-radius:4px}li.wy-form-gallery-edit .arrow{position:absolute;display:block;top:-50px;left:50%;margin-left:-25px;z-index:500;height:0;width:0;border-color:transparent;border-style:solid;border-width:25px;border-bottom-color:#343131}@media only screen and (max-width: 480px){.wy-form button[type="submit"]{margin:0.7em 0 0}.wy-form input[type="text"],.wy-form input[type="password"],.wy-form input[type="email"],.wy-form input[type="url"],.wy-form input[type="date"],.wy-form input[type="month"],.wy-form input[type="time"],.wy-form input[type="datetime"],.wy-form input[type="datetime-local"],.wy-form input[type="week"],.wy-form input[type="number"],.wy-form input[type="search"],.wy-form input[type="tel"],.wy-form input[type="color"]{margin-bottom:0.3em;display:block}.wy-form label{margin-bottom:0.3em;display:block}.wy-form input[type="password"],.wy-form input[type="email"],.wy-form input[type="url"],.wy-form input[type="date"],.wy-form input[type="month"],.wy-form input[type="time"],.wy-form input[type="datetime"],.wy-form input[type="datetime-local"],.wy-form input[type="week"],.wy-form input[type="number"],.wy-form input[type="search"],.wy-form input[type="tel"],.wy-form input[type="color"]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:0.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-controls{margin:1.5em 0 0 0}.wy-form .wy-help-inline,.wy-form-message-inline,.wy-form-message{display:block;font-size:80%;padding:0.2em 0 0.8em}}@media screen and (max-width: 768px){.tablet-hide{display:none}}@media screen and (max-width: 480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.wy-grid-one-col{*zoom:1;max-width:68em;margin-left:auto;margin-right:auto;max-width:1066px;margin-top:1.618em}.wy-grid-one-col:before,.wy-grid-one-col:after{display:table;content:""}.wy-grid-one-col:after{clear:both}.wy-grid-one-col section{display:block;float:left;margin-right:2.35765%;width:100%;background:#fcfcfc;padding:1.618em;margin-right:0}.wy-grid-one-col section:last-child{margin-right:0}.wy-grid-index-card{*zoom:1;max-width:68em;margin-left:auto;margin-right:auto;max-width:460px;margin-top:1.618em;background:#fcfcfc;padding:1.618em}.wy-grid-index-card:before,.wy-grid-index-card:after{display:table;content:""}.wy-grid-index-card:after{clear:both}.wy-grid-index-card header,.wy-grid-index-card section,.wy-grid-index-card aside{display:block;float:left;margin-right:2.35765%;width:100%}.wy-grid-index-card header:last-child,.wy-grid-index-card section:last-child,.wy-grid-index-card aside:last-child{margin-right:0}.wy-grid-index-card.twocol{max-width:768px}.wy-grid-index-card.twocol section{display:block;float:left;margin-right:2.35765%;width:48.82117%}.wy-grid-index-card.twocol section:last-child{margin-right:0}.wy-grid-index-card.twocol aside{display:block;float:left;margin-right:2.35765%;width:48.82117%}.wy-grid-index-card.twocol aside:last-child{margin-right:0}.wy-grid-search-filter{*zoom:1;max-width:68em;margin-left:auto;margin-right:auto;margin-bottom:24px}.wy-grid-search-filter:before,.wy-grid-search-filter:after{display:table;content:""}.wy-grid-search-filter:after{clear:both}.wy-grid-search-filter .wy-grid-search-filter-input{display:block;float:left;margin-right:2.35765%;width:74.41059%}.wy-grid-search-filter .wy-grid-search-filter-input:last-child{margin-right:0}.wy-grid-search-filter .wy-grid-search-filter-btn{display:block;float:left;margin-right:2.35765%;width:23.23176%}.wy-grid-search-filter .wy-grid-search-filter-btn:last-child{margin-right:0}.wy-table,.rst-content table.docutils,.rst-content table.field-list{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.wy-table caption,.rst-content table.docutils caption,.rst-content table.field-list caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.wy-table td,.rst-content table.docutils td,.rst-content table.field-list td,.wy-table th,.rst-content table.docutils th,.rst-content table.field-list th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.wy-table td:first-child,.rst-content table.docutils td:first-child,.rst-content table.field-list td:first-child,.wy-table th:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list th:first-child{border-left-width:0}.wy-table thead,.rst-content table.docutils thead,.rst-content table.field-list thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.wy-table thead th,.rst-content table.docutils thead th,.rst-content table.field-list thead th{font-weight:bold;border-bottom:solid 2px #e1e4e5}.wy-table td,.rst-content table.docutils td,.rst-content table.field-list td{background-color:transparent;vertical-align:middle}.wy-table td p,.rst-content table.docutils td p,.rst-content table.field-list td p{line-height:18px;margin-bottom:0}.wy-table .wy-table-cell-min,.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min{width:1%;padding-right:0}.wy-table .wy-table-cell-min input[type=checkbox],.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox],.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:gray;font-size:90%}.wy-table-tertiary{color:gray;font-size:80%}.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td,.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td{background-color:#f3f6f6}.wy-table-backed{background-color:#f3f6f6}.wy-table-bordered-all,.rst-content table.docutils{border:1px solid #e1e4e5}.wy-table-bordered-all td,.rst-content table.docutils td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.wy-table-bordered-all tbody>tr:last-child td,.rst-content table.docutils tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px 0;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0 !important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}html{height:100%;overflow-x:hidden}body{font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;font-weight:normal;color:#404040;min-height:100%;overflow-x:hidden;background:#edf0f2}a{color:#2980b9;text-decoration:none}a:hover{color:#3091d1}.link-danger{color:#e74c3c}.link-danger:hover{color:#d62c1a}.text-left{text-align:left}.text-center{text-align:center}.text-right{text-align:right}h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:"Roboto Slab","ff-tisa-web-pro","Georgia",Arial,sans-serif}p{line-height:24px;margin:0;font-size:16px;margin-bottom:24px}h1{font-size:175%}h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}small{font-size:80%}code,.rst-content tt{white-space:nowrap;max-width:100%;background:#fff;border:solid 1px #e1e4e5;font-size:75%;padding:0 5px;font-family:"Incosolata","Consolata","Monaco",monospace;color:#e74c3c;overflow-x:auto}code.code-large,.rst-content tt.code-large{font-size:90%}.full-width{width:100%}.wy-plain-list-disc,.rst-content .section ul,.rst-content .toctree-wrapper ul{list-style:disc;line-height:24px;margin-bottom:24px}.wy-plain-list-disc li,.rst-content .section ul li,.rst-content .toctree-wrapper ul li{list-style:disc;margin-left:24px}.wy-plain-list-disc li ul,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li ul{margin-bottom:0}.wy-plain-list-disc li li,.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li{list-style:circle}.wy-plain-list-disc li li li,.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li{list-style:square}.wy-plain-list-decimal,.rst-content .section ol,.rst-content ol.arabic{list-style:decimal;line-height:24px;margin-bottom:24px}.wy-plain-list-decimal li,.rst-content .section ol li,.rst-content ol.arabic li{list-style:decimal;margin-left:24px}.wy-type-large{font-size:120%}.wy-type-normal{font-size:100%}.wy-type-small{font-size:100%}.wy-type-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22 !important}a.wy-text-warning:hover{color:#eb9950 !important}.wy-text-info{color:#2980b9 !important}a.wy-text-info:hover{color:#409ad5 !important}.wy-text-success{color:#27ae60 !important}a.wy-text-success:hover{color:#36d278 !important}.wy-text-danger{color:#e74c3c !important}a.wy-text-danger:hover{color:#ed7669 !important}.wy-text-neutral{color:#404040 !important}a.wy-text-neutral:hover{color:#595959 !important}.codeblock-example{border:1px solid #e1e4e5;border-bottom:none;padding:24px;padding-top:48px;font-weight:500;background:#fff;position:relative}.codeblock-example:after{content:"Example";position:absolute;top:0px;left:0px;background:#9b59b6;color:#fff;padding:6px 12px}.codeblock-example.prettyprint-example-only{border:1px solid #e1e4e5;margin-bottom:24px}.codeblock,.rst-content .literal-block,div[class^='highlight']{border:1px solid #e1e4e5;padding:0px;overflow-x:auto;background:#fff;margin:1px 0 24px 0}.codeblock div[class^='highlight'],.rst-content .literal-block div[class^='highlight'],div[class^='highlight'] div[class^='highlight']{border:none;background:none;margin:0}div[class^='highlight'] td.code{width:100%}.linenodiv pre{border-right:solid 1px #e6e9ea;margin:0;padding:12px 12px;font-family:"Incosolata","Consolata","Monaco",monospace;font-size:12px;line-height:1.5;color:#d9d9d9}div[class^='highlight'] pre{white-space:pre;margin:0;padding:12px 12px;font-family:"Incosolata","Consolata","Monaco",monospace;font-size:12px;line-height:1.5;display:block;overflow:auto;color:#404040}pre.literal-block{@extends .codeblock;}@media print{.codeblock,.rst-content .literal-block,div[class^='highlight'],div[class^='highlight'] pre{white-space:pre-wrap}}.hll{background-color:#ffc;margin:0 -12px;padding:0 12px;display:block}.c{color:#998;font-style:italic}.err{color:#a61717;background-color:#e3d2d2}.k{font-weight:bold}.o{font-weight:bold}.cm{color:#998;font-style:italic}.cp{color:#999;font-weight:bold}.c1{color:#998;font-style:italic}.cs{color:#999;font-weight:bold;font-style:italic}.gd{color:#000;background-color:#fdd}.gd .x{color:#000;background-color:#faa}.ge{font-style:italic}.gr{color:#a00}.gh{color:#999}.gi{color:#000;background-color:#dfd}.gi .x{color:#000;background-color:#afa}.go{color:#888}.gp{color:#555}.gs{font-weight:bold}.gu{color:purple;font-weight:bold}.gt{color:#a00}.kc{font-weight:bold}.kd{font-weight:bold}.kn{font-weight:bold}.kp{font-weight:bold}.kr{font-weight:bold}.kt{color:#458;font-weight:bold}.m{color:#099}.s{color:#d14}.n{color:#333}.na{color:teal}.nb{color:#0086b3}.nc{color:#458;font-weight:bold}.no{color:teal}.ni{color:purple}.ne{color:#900;font-weight:bold}.nf{color:#900;font-weight:bold}.nn{color:#555}.nt{color:navy}.nv{color:teal}.ow{font-weight:bold}.w{color:#bbb}.mf{color:#099}.mh{color:#099}.mi{color:#099}.mo{color:#099}.sb{color:#d14}.sc{color:#d14}.sd{color:#d14}.s2{color:#d14}.se{color:#d14}.sh{color:#d14}.si{color:#d14}.sx{color:#d14}.sr{color:#009926}.s1{color:#d14}.ss{color:#990073}.bp{color:#999}.vc{color:teal}.vg{color:teal}.vi{color:teal}.il{color:#099}.gc{color:#999;background-color:#eaf2f5}.wy-breadcrumbs li{display:inline-block}.wy-breadcrumbs li.wy-breadcrumbs-aside{float:right}.wy-breadcrumbs li a{display:inline-block;padding:5px}.wy-breadcrumbs li a:first-child{padding-left:0}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width: 480px){.wy-breadcrumbs-extra{display:none}.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:before,.wy-menu-horiz:after{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz ul,.wy-menu-horiz li{display:inline-block}.wy-menu-horiz li:hover{background:rgba(255,255,255,0.1)}.wy-menu-horiz li.divide-left{border-left:solid 1px #404040}.wy-menu-horiz li.divide-right{border-right:solid 1px #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical header{height:32px;display:inline-block;line-height:32px;padding:0 1.618em;display:block;font-weight:bold;text-transform:uppercase;font-size:80%;color:#2980b9;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:solid 1px #404040}.wy-menu-vertical li.divide-bottom{border-bottom:solid 1px #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:gray;border-right:solid 1px #c9c9c9;padding:0.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.wy-menu-vertical li.on a,.wy-menu-vertical li.current>a{color:#404040;padding:0.4045em 1.618em;font-weight:bold;position:relative;background:#fcfcfc;border:none;border-bottom:solid 1px #c9c9c9;border-top:solid 1px #c9c9c9;padding-left:1.618em -4px}.wy-menu-vertical li.on a:hover,.wy-menu-vertical li.current>a:hover{background:#fcfcfc}.wy-menu-vertical li.tocktree-l2.current>a{background:#c9c9c9}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical .local-toc li ul{display:block}.wy-menu-vertical li ul li a{margin-bottom:0;color:#b3b3b3;font-weight:normal}.wy-menu-vertical a{display:inline-block;line-height:18px;padding:0.4045em 1.618em;display:block;position:relative;font-size:90%;color:#b3b3b3}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-side-nav-search{z-index:200;background-color:#2980b9;text-align:center;padding:0.809em;display:block;color:#fcfcfc;margin-bottom:0.809em}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto 0.809em auto;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search>a,.wy-side-nav-search .wy-dropdown>a{color:#fcfcfc;font-size:100%;font-weight:bold;display:inline-block;padding:4px 6px;margin-bottom:0.809em}.wy-side-nav-search>a:hover,.wy-side-nav-search .wy-dropdown>a:hover{background:rgba(255,255,255,0.1)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all 0.2s ease-in;-moz-transition:all 0.2s ease-in;transition:all 0.2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:left repeat-y #fcfcfc;background-image:url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDoxOERBMTRGRDBFMUUxMUUzODUwMkJCOThDMEVFNURFMCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDoxOERBMTRGRTBFMUUxMUUzODUwMkJCOThDMEVFNURFMCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjE4REExNEZCMEUxRTExRTM4NTAyQkI5OEMwRUU1REUwIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjE4REExNEZDMEUxRTExRTM4NTAyQkI5OEMwRUU1REUwIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+EwrlwAAAAA5JREFUeNpiMDU0BAgwAAE2AJgB9BnaAAAAAElFTkSuQmCC);background-size:300px 1px}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:absolute;top:0;left:0;width:300px;overflow:hidden;min-height:100%;background:#343131;z-index:200}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:0.4045em 0.809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:before,.wy-nav-top:after{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:bold}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,0.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:#999}footer p{margin-bottom:12px}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:before,.rst-footer-buttons:after{display:table;content:""}.rst-footer-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:solid 1px #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:solid 1px #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:gray;font-size:90%}@media screen and (max-width: 768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width: 1400px){.wy-nav-content-wrap{background:rgba(0,0,0,0.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.wy-nav-side{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success .rst-versions .rst-current-version .wy-input-context,.rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger .rst-versions .rst-current-version .wy-input-context,.rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .rst-versions .rst-current-version .wy-input-context,.rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info .rst-versions .rst-current-version .wy-input-context,.rst-versions .rst-current-version .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag .rst-versions .rst-current-version .wy-tag-remove,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-content dl dt .rst-versions .rst-current-version .headerlink{color:#fcfcfc}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}}.rst-content img{max-width:100%;height:auto !important}.rst-content .section>img{margin-bottom:24px}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content .note .last,.rst-content .attention .last,.rst-content .caution .last,.rst-content .danger .last,.rst-content .error .last,.rst-content .hint .last,.rst-content .important .last,.rst-content .tip .last,.rst-content .warning .last,.rst-content .seealso .last{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,0.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent !important;border-color:rgba(0,0,0,0.1) !important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha li{list-style:upper-alpha}.rst-content .section ol p,.rst-content .section ul p{margin-bottom:12px}.rst-content .line-block{margin-left:24px}.rst-content .topic-title{font-weight:bold;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0px 0px 24px 24px}.rst-content .align-left{float:left;margin:0px 24px 24px 0px}.rst-content .align-center{margin:auto;display:block}.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content dl dt .headerlink{display:none;visibility:hidden;font-size:14px}.rst-content h1 .headerlink:after,.rst-content h2 .headerlink:after,.rst-content h3 .headerlink:after,.rst-content h4 .headerlink:after,.rst-content h5 .headerlink:after,.rst-content h6 .headerlink:after,.rst-content dl dt .headerlink:after{visibility:visible;content:"\f0c1";font-family:fontawesome-webfont;display:inline-block}.rst-content h1:hover .headerlink,.rst-content h2:hover .headerlink,.rst-content h3:hover .headerlink,.rst-content h4:hover .headerlink,.rst-content h5:hover .headerlink,.rst-content h6:hover .headerlink,.rst-content dl dt:hover .headerlink{display:inline-block}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:solid 1px #e1e4e5}.rst-content .sidebar p,.rst-content .sidebar ul,.rst-content .sidebar dl{font-size:90%}.rst-content .sidebar .last{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:"Roboto Slab","ff-tisa-web-pro","Georgia",Arial,sans-serif;font-weight:bold;background:#e1e4e5;padding:6px 12px;margin:-24px;margin-bottom:24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;display:inline-block;font-weight:bold;padding:0 6px}.rst-content .footnote-reference,.rst-content .citation-reference{vertical-align:super;font-size:90%}.rst-content table.docutils.citation,.rst-content table.docutils.footnote{background:none;border:none;color:#999}.rst-content table.docutils.citation td,.rst-content table.docutils.citation tr,.rst-content table.docutils.footnote td,.rst-content table.docutils.footnote tr{border:none;background-color:transparent !important;white-space:normal}.rst-content table.docutils.citation td.label,.rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}.rst-content table.field-list{border:none}.rst-content table.field-list td{border:none}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left;padding-left:0}.rst-content tt{color:#000}.rst-content tt big,.rst-content tt em{font-size:100% !important;line-height:normal}.rst-content tt .xref,a .rst-content tt{font-weight:bold}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:bold}.rst-content dl p,.rst-content dl table,.rst-content dl ul,.rst-content dl ol{margin-bottom:12px !important}.rst-content dl dd{margin:0 0 12px 24px}.rst-content dl:not(.docutils){margin-bottom:24px}.rst-content dl:not(.docutils) dt{display:inline-block;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:solid 3px #6ab0de;padding:6px;position:relative}.rst-content dl:not(.docutils) dt:before{color:#6ab0de}.rst-content dl:not(.docutils) dt .headerlink{color:#404040;font-size:100% !important}.rst-content dl:not(.docutils) dl dt{margin-bottom:6px;border:none;border-left:solid 3px #ccc;background:#f0f0f0;color:gray}.rst-content dl:not(.docutils) dl dt .headerlink{color:#404040;font-size:100% !important}.rst-content dl:not(.docutils) dt:first-child{margin-top:0}.rst-content dl:not(.docutils) tt{font-weight:bold}.rst-content dl:not(.docutils) tt.descname,.rst-content dl:not(.docutils) tt.descclassname{background-color:transparent;border:none;padding:0;font-size:100% !important}.rst-content dl:not(.docutils) tt.descname{font-weight:bold}.rst-content dl:not(.docutils) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:bold}.rst-content dl:not(.docutils) .property{display:inline-block;padding-right:8px}.rst-content .viewcode-link,.rst-content .viewcode-back{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}@media screen and (max-width: 480px){.rst-content .sidebar{width:100%}}span[id*='MathJax-Span']{color:#404040} diff --git a/docs/_themes/sphinx_rtd_theme/static/favicon.ico b/docs/_themes/sphinx_rtd_theme/static/favicon.ico deleted file mode 100644 index 6970cfde..00000000 Binary files a/docs/_themes/sphinx_rtd_theme/static/favicon.ico and /dev/null differ diff --git a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot deleted file mode 100755 index 0662cb96..00000000 Binary files a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot and /dev/null differ diff --git a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg deleted file mode 100755 index 2edb4ec3..00000000 --- a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg +++ /dev/null @@ -1,399 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf deleted file mode 100755 index d3659246..00000000 Binary files a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf and /dev/null differ diff --git a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff deleted file mode 100755 index b9bd17e1..00000000 Binary files a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff and /dev/null differ diff --git a/docs/_themes/sphinx_rtd_theme/static/js/theme.js b/docs/_themes/sphinx_rtd_theme/static/js/theme.js deleted file mode 100755 index 58e514c0..00000000 --- a/docs/_themes/sphinx_rtd_theme/static/js/theme.js +++ /dev/null @@ -1,16 +0,0 @@ -$( document ).ready(function() { - // Shift nav in mobile when clicking the menu. - $("[data-toggle='wy-nav-top']").click(function() { - $("[data-toggle='wy-nav-shift']").toggleClass("shift"); - $("[data-toggle='rst-versions']").toggleClass("shift"); - }); - // Close menu when you click a link. - $(".wy-menu-vertical .current ul li a").click(function() { - $("[data-toggle='wy-nav-shift']").removeClass("shift"); - $("[data-toggle='rst-versions']").toggleClass("shift"); - }); - $("[data-toggle='rst-current-version']").click(function() { - $("[data-toggle='rst-versions']").toggleClass("shift-up"); - }); - $("table.docutils:not(.field-list").wrap("
"); -}); diff --git a/docs/_themes/sphinx_rtd_theme/theme.conf b/docs/_themes/sphinx_rtd_theme/theme.conf deleted file mode 100755 index 173ca698..00000000 --- a/docs/_themes/sphinx_rtd_theme/theme.conf +++ /dev/null @@ -1,8 +0,0 @@ -[theme] -inherit = basic -stylesheet = css/theme.css - -[options] -typekit_id = hiw1hhg -analytics_id = -canonical_url = \ No newline at end of file diff --git a/docs/_themes/sphinx_rtd_theme/versions.html b/docs/_themes/sphinx_rtd_theme/versions.html deleted file mode 100755 index 93319be8..00000000 --- a/docs/_themes/sphinx_rtd_theme/versions.html +++ /dev/null @@ -1,37 +0,0 @@ -{% if READTHEDOCS %} -{# Add rst-badge after rst-versions for small badge style. #} -
- - Read the Docs - v: {{ current_version }} - - -
-
-
Versions
- {% for slug, url in versions %} -
{{ slug }}
- {% endfor %} -
-
-
Downloads
- {% for type, url in downloads %} -
{{ type }}
- {% endfor %} -
-
-
On Read the Docs
-
- Project Home -
-
- Builds -
-
-
- Free document hosting provided by Read the Docs. - -
-
-{% endif %} - diff --git a/docs/apireference.rst b/docs/apireference.rst index 625d4a8b..05ba3f73 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -87,7 +87,9 @@ Fields .. autoclass:: mongoengine.fields.DictField .. autoclass:: mongoengine.fields.MapField .. autoclass:: mongoengine.fields.ReferenceField +.. autoclass:: mongoengine.fields.LazyReferenceField .. autoclass:: mongoengine.fields.GenericReferenceField +.. autoclass:: mongoengine.fields.GenericLazyReferenceField .. autoclass:: mongoengine.fields.CachedReferenceField .. autoclass:: mongoengine.fields.BinaryField .. autoclass:: mongoengine.fields.FileField diff --git a/docs/changelog.rst b/docs/changelog.rst index e04c48fb..edc0fb1a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,13 +4,72 @@ Changelog Development =========== -- (Fill this out as you fix issues and develop you features). +- QuerySet limit function behaviour: Passing 0 as parameter will return all the documents in the cursor #1611 +- (Fill this out as you fix issues and develop your features). +======= +Changes in 0.15.4 +================= +- Added `DateField` #513 + +Changes in 0.15.3 +================= +- Subfield resolve error in generic_emdedded_document query #1651 #1652 +- use each modifier only with $position #1673 #1675 +- Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704 +- Fix validation error instance in GenericEmbeddedDocumentField #1067 +- Update cached fields when fields argument is given #1712 +- Add a db parameter to register_connection for compatibility with connect +- Use insert_one, insert_many in Document.insert #1491 +- Use new update_one, update_many on document/queryset update #1491 +- Use insert_one, insert_many in Document.insert #1491 +- Fix reload(fields) affect changed fields #1371 +- Fix Read-only access to database fails when trying to create indexes #1338 + +Changes in 0.15.0 +================= +- Add LazyReferenceField and GenericLazyReferenceField to address #1230 + +Changes in 0.14.1 +================= +- Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630 +- Added support for the `$position` param in the `$push` operator #1566 +- Fixed `DateTimeField` interpreting an empty string as today #1533 +- Added a missing `__ne__` method to the `GridFSProxy` class #1632 +- Fixed `BaseQuerySet._fields_to_db_fields` #1553 + +Changes in 0.14.0 +================= +- BREAKING CHANGE: Removed the `coerce_types` param from `QuerySet.as_pymongo` #1549 +- POTENTIAL BREAKING CHANGE: Made EmbeddedDocument not hashable by default #1528 +- Improved code quality #1531, #1540, #1541, #1547 + +Changes in 0.13.0 +================= +- POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see + docs/upgrade.rst for details. + +Changes in 0.12.0 +================= +- POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476 +- POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476 +- Fixed the way `Document.objects.create` works with duplicate IDs #1485 +- Fixed connecting to a replica set with PyMongo 2.x #1436 +- Fixed using sets in field choices #1481 +- Fixed deleting items from a `ListField` #1318 +- Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237 +- Fixed behavior of a `dec` update operator #1450 +- Added a `rename` update operator #1454 +- Added validation for the `db_field` parameter #1448 +- Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440 +- Fixed the error message displayed when validating unicode URLs #1486 +- Raise an error when trying to save an abstract document #1449 Changes in 0.11.0 ================= - BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 - BREAKING CHANGE: Dropped Python 2.6 support. #1428 - BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 +- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334 - Fixed absent rounding for DecimalField when `force_string` is set. #1103 Changes in 0.10.8 diff --git a/docs/code/tumblelog.py b/docs/code/tumblelog.py index c10160ea..796336e6 100644 --- a/docs/code/tumblelog.py +++ b/docs/code/tumblelog.py @@ -45,27 +45,27 @@ post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' post2.tags = ['mongoengine'] post2.save() -print 'ALL POSTS' -print +print('ALL POSTS') +print() for post in Post.objects: - print post.title + print(post.title) #print '=' * post.title.count() - print "=" * 20 + print("=" * 20) if isinstance(post, TextPost): - print post.content + print(post.content) if isinstance(post, LinkPost): - print 'Link:', post.link_url + print('Link:', post.link_url) - print -print + print() +print() -print 'POSTS TAGGED \'MONGODB\'' -print +print('POSTS TAGGED \'MONGODB\'') +print() for post in Post.objects(tags='mongodb'): - print post.title -print + print(post.title) +print() num_posts = Post.objects(tags='mongodb').count() -print 'Found %d posts with tag "mongodb"' % num_posts +print('Found %d posts with tag "mongodb"' % num_posts) diff --git a/docs/conf.py b/docs/conf.py index cddd35db..468e71e0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,6 +13,10 @@ import sys, os +import sphinx_rtd_theme + +import mongoengine + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. @@ -44,7 +48,6 @@ copyright = u'2009, MongoEngine Authors' # |version| and |release|, also used in various other places throughout the # built documents. # -import mongoengine # The short X.Y version. version = mongoengine.get_version() # The full version, including alpha/beta/rc tags. @@ -97,10 +100,12 @@ html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +html_theme_options = { + 'canonical_url': 'http://docs.mongoengine.org/en/latest/' +} # Add any paths that contain custom themes here, relative to this directory. -html_theme_path = ['_themes'] +html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". @@ -199,7 +204,3 @@ latex_documents = [ #latex_use_modindex = True autoclass_content = 'both' - -html_theme_options = dict( - canonical_url='http://docs.mongoengine.org/en/latest/' -) diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index 48926499..5dac6ae9 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -18,10 +18,10 @@ provide the :attr:`host` and :attr:`port` arguments to connect('project1', host='192.168.1.35', port=12345) -If the database requires authentication, :attr:`username` and :attr:`password` -arguments should be provided:: +If the database requires authentication, :attr:`username`, :attr:`password` +and :attr:`authentication_source` arguments should be provided:: - connect('project1', username='webapp', password='pwd123') + connect('project1', username='webapp', password='pwd123', authentication_source='admin') URI style connections are also supported -- just supply the URI as the :attr:`host` to @@ -33,7 +33,7 @@ the :attr:`host` to corresponding parameters in :func:`~mongoengine.connect`: :: connect( - name='test', + db='test', username='user', password='12345', host='mongodb://admin:qwerty@localhost/production' @@ -42,13 +42,18 @@ the :attr:`host` to will establish connection to ``production`` database using ``admin`` username and ``qwerty`` password. -ReplicaSets -=========== +Replica Sets +============ -MongoEngine supports -:class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`. To use them, -please use an URI style connection and provide the ``replicaSet`` name -in the connection kwargs. +MongoEngine supports connecting to replica sets:: + + from mongoengine import connect + + # Regular connect + connect('dbname', replicaset='rs-name') + + # MongoDB URI-style connect + connect(host='mongodb://localhost/dbname?replicaSet=rs-name') Read preferences are supported through the connection or via individual queries by passing the read_preference :: @@ -59,76 +64,74 @@ queries by passing the read_preference :: Multiple Databases ================== -Multiple database support was added in MongoEngine 0.6. To use multiple -databases you can use :func:`~mongoengine.connect` and provide an `alias` name -for the connection - if no `alias` is provided then "default" is used. +To use multiple databases you can use :func:`~mongoengine.connect` and provide +an `alias` name for the connection - if no `alias` is provided then "default" +is used. In the background this uses :func:`~mongoengine.register_connection` to store the data and you can register all aliases up front if required. Individual documents can also support multiple databases by providing a -`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects -to point across databases and collections. Below is an example schema, using -3 different databases to store data:: +`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` +objects to point across databases and collections. Below is an example schema, +using 3 different databases to store data:: class User(Document): name = StringField() - meta = {"db_alias": "user-db"} + meta = {'db_alias': 'user-db'} class Book(Document): name = StringField() - meta = {"db_alias": "book-db"} + meta = {'db_alias': 'book-db'} class AuthorBooks(Document): author = ReferenceField(User) book = ReferenceField(Book) - meta = {"db_alias": "users-books-db"} + meta = {'db_alias': 'users-books-db'} Context Managers ================ -Sometimes you may want to switch the database or collection to query against -for a class. +Sometimes you may want to switch the database or collection to query against. For example, archiving older data into a separate database for performance reasons or writing functions that dynamically choose collections to write -document to. +a document to. Switch Database --------------- The :class:`~mongoengine.context_managers.switch_db` context manager allows you to change the database alias for a given class allowing quick and easy -access the same User document across databases:: +access to the same User document across databases:: from mongoengine.context_managers import switch_db class User(Document): name = StringField() - meta = {"db_alias": "user-db"} + meta = {'db_alias': 'user-db'} with switch_db(User, 'archive-user-db') as User: - User(name="Ross").save() # Saves the 'archive-user-db' + User(name='Ross').save() # Saves the 'archive-user-db' Switch Collection ----------------- The :class:`~mongoengine.context_managers.switch_collection` context manager allows you to change the collection for a given class allowing quick and easy -access the same Group document across collection:: +access to the same Group document across collection:: from mongoengine.context_managers import switch_collection class Group(Document): name = StringField() - Group(name="test").save() # Saves in the default db + Group(name='test').save() # Saves in the default db with switch_collection(Group, 'group2000') as Group: - Group(name="hello Group 2000 collection!").save() # Saves in group2000 collection - + Group(name='hello Group 2000 collection!').save() # Saves in group2000 collection .. note:: Make sure any aliases have been registered with diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index f59f856e..bf74ad8c 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -22,7 +22,7 @@ objects** as class attributes to the document class:: class Page(Document): title = StringField(max_length=200, required=True) - date_modified = DateTimeField(default=datetime.datetime.now) + date_modified = DateTimeField(default=datetime.datetime.utcnow) As BSON (the binary format for storing data in mongodb) is order dependent, documents are serialized based on their field order. @@ -80,6 +80,7 @@ are as follows: * :class:`~mongoengine.fields.FloatField` * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` * :class:`~mongoengine.fields.GenericReferenceField` +* :class:`~mongoengine.fields.GenericLazyReferenceField` * :class:`~mongoengine.fields.GeoPointField` * :class:`~mongoengine.fields.ImageField` * :class:`~mongoengine.fields.IntField` @@ -87,6 +88,7 @@ are as follows: * :class:`~mongoengine.fields.MapField` * :class:`~mongoengine.fields.ObjectIdField` * :class:`~mongoengine.fields.ReferenceField` +* :class:`~mongoengine.fields.LazyReferenceField` * :class:`~mongoengine.fields.SequenceField` * :class:`~mongoengine.fields.SortedListField` * :class:`~mongoengine.fields.StringField` @@ -150,7 +152,7 @@ arguments can be set on all fields: .. note:: If set, this field is also accessible through the `pk` field. :attr:`choices` (Default: None) - An iterable (e.g. a list or tuple) of choices to which the value of this + An iterable (e.g. list, tuple or set) of choices to which the value of this field should be limited. Can be either be a nested tuples of value (stored in mongo) and a @@ -214,8 +216,8 @@ document class as the first argument:: Dictionary Fields ----------------- -Often, an embedded document may be used instead of a dictionary – generally -embedded documents are recommended as dictionaries don’t support validation +Often, an embedded document may be used instead of a dictionary – generally +embedded documents are recommended as dictionaries don’t support validation or custom field types. However, sometimes you will not know the structure of what you want to store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate:: @@ -224,7 +226,7 @@ store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate user = ReferenceField(User) answers = DictField() - survey_response = SurveyResponse(date=datetime.now(), user=request.user) + survey_response = SurveyResponse(date=datetime.utcnow(), user=request.user) response_form = ResponseForm(request.POST) survey_response.answers = response_form.cleaned_data() survey_response.save() @@ -361,11 +363,6 @@ Its value can take any of the following constants: In Django, be sure to put all apps that have such delete rule declarations in their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. - -.. warning:: - Signals are not triggered when doing cascading updates / deletes - if this - is required you must manually handle the update / delete. - Generic reference fields '''''''''''''''''''''''' A second kind of reference field also exists, @@ -516,6 +513,9 @@ If a dictionary is passed then the following options are available: Allows you to automatically expire data from a collection by setting the time in seconds to expire the a field. +:attr:`name` (Optional) + Allows you to specify a name for the index + .. note:: Inheritance adds extra fields indices see: :ref:`document-inheritance`. @@ -529,14 +529,15 @@ There are a few top level defaults for all indexes that can be set:: title = StringField() rating = StringField() meta = { - 'index_options': {}, + 'index_opts': {}, 'index_background': True, + 'index_cls': False, + 'auto_create_index': True, 'index_drop_dups': True, - 'index_cls': False } -:attr:`index_options` (Optional) +:attr:`index_opts` (Optional) Set any default index options - see the `full options list `_ :attr:`index_background` (Optional) @@ -545,6 +546,12 @@ There are a few top level defaults for all indexes that can be set:: :attr:`index_cls` (Optional) A way to turn off a specific index for _cls. +:attr:`auto_create_index` (Optional) + When this is True (default), MongoEngine will ensure that the correct + indexes exist in MongoDB each time a command is run. This can be disabled + in systems where indexes are managed separately. Disabling this will improve + performance. + :attr:`index_drop_dups` (Optional) Set the default value for if an index should drop duplicates @@ -623,7 +630,7 @@ collection after a given period. See the official documentation for more information. A common usecase might be session data:: class Session(Document): - created = DateTimeField(default=datetime.now) + created = DateTimeField(default=datetime.utcnow) meta = { 'indexes': [ {'fields': ['created'], 'expireAfterSeconds': 3600} @@ -729,6 +736,9 @@ document.:: .. note:: From 0.8 onwards :attr:`allow_inheritance` defaults to False, meaning you must set it to True to use inheritance. + + Setting :attr:`allow_inheritance` to True should also be used in + :class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it Working with existing data -------------------------- diff --git a/docs/guide/document-instances.rst b/docs/guide/document-instances.rst index 0e9fcef6..64f17c08 100644 --- a/docs/guide/document-instances.rst +++ b/docs/guide/document-instances.rst @@ -57,7 +57,8 @@ document values for example:: def clean(self): """Ensures that only published essays have a `pub_date` and - automatically sets the pub_date if published and not set""" + automatically sets `pub_date` if essay is published and `pub_date` + is not set""" if self.status == 'Draft' and self.pub_date is not None: msg = 'Draft entries should not have a publication date.' raise ValidationError(msg) diff --git a/docs/guide/gridfs.rst b/docs/guide/gridfs.rst index 68e7a6d2..f7380e89 100644 --- a/docs/guide/gridfs.rst +++ b/docs/guide/gridfs.rst @@ -53,7 +53,8 @@ Deletion Deleting stored files is achieved with the :func:`delete` method:: - marmot.photo.delete() + marmot.photo.delete() # Deletes the GridFS document + marmot.save() # Saves the GridFS reference (being None) contained in the marmot instance .. warning:: @@ -71,4 +72,5 @@ Files can be replaced with the :func:`replace` method. This works just like the :func:`put` method so even metadata can (and should) be replaced:: another_marmot = open('another_marmot.png', 'rb') - marmot.photo.replace(another_marmot, content_type='image/png') + marmot.photo.replace(another_marmot, content_type='image/png') # Replaces the GridFS document + marmot.save() # Replaces the GridFS reference contained in marmot instance diff --git a/docs/guide/installing.rst b/docs/guide/installing.rst index e93f0485..b89d48f0 100644 --- a/docs/guide/installing.rst +++ b/docs/guide/installing.rst @@ -2,13 +2,13 @@ Installing MongoEngine ====================== -To use MongoEngine, you will need to download `MongoDB `_ +To use MongoEngine, you will need to download `MongoDB `_ and ensure it is running in an accessible location. You will also need `PyMongo `_ to use MongoEngine, but if you install MongoEngine using setuptools, then the dependencies will be handled for you. -MongoEngine is available on PyPI, so to use it you can use :program:`pip`: +MongoEngine is available on PyPI, so you can use :program:`pip`: .. code-block:: console diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 913de5d6..f1594dd2 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -340,14 +340,19 @@ Javascript code that is executed on the database server. Counting results ---------------- -Just as with limiting and skipping results, there is a method on -:class:`~mongoengine.queryset.QuerySet` objects -- -:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic -way of achieving this:: +Just as with limiting and skipping results, there is a method on a +:class:`~mongoengine.queryset.QuerySet` object -- +:meth:`~mongoengine.queryset.QuerySet.count`:: - num_users = len(User.objects) + num_users = User.objects.count() -Even if len() is the Pythonic way of counting results, keep in mind that if you concerned about performance, :meth:`~mongoengine.queryset.QuerySet.count` is the way to go since it only execute a server side count query, while len() retrieves the results, places them in cache, and finally counts them. If we compare the performance of the two operations, len() is much slower than :meth:`~mongoengine.queryset.QuerySet.count`. +You could technically use ``len(User.objects)`` to get the same result, but it +would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`. +When you execute a server-side count query, you let MongoDB do the heavy +lifting and you receive a single integer over the wire. Meanwhile, len() +retrieves all the results, places them in a local cache, and finally counts +them. If we compare the performance of the two operations, len() is much slower +than :meth:`~mongoengine.queryset.QuerySet.count`. Further aggregation ------------------- @@ -479,6 +484,8 @@ operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the first positional argument to :attr:`Document.objects` when you filter it by calling it with keyword arguments:: + from mongoengine.queryset.visitor import Q + # Get published posts Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now())) @@ -558,6 +565,15 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: >>> post.tags ['database', 'mongodb'] +From MongoDB version 2.6, push operator supports $position value which allows +to push values with index. + >>> post = BlogPost(title="Test", tags=["mongo"]) + >>> post.save() + >>> post.update(push__tags__0=["database", "code"]) + >>> post.reload() + >>> post.tags + ['database', 'code', 'mongo'] + .. note:: Currently only top level lists are handled, future versions of mongodb / pymongo plan to support nested positional operators. See `The $ positional diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst index 797a4869..06bccb3b 100644 --- a/docs/guide/signals.rst +++ b/docs/guide/signals.rst @@ -43,10 +43,10 @@ Available signals include: has taken place but before saving. `post_save` - Called within :meth:`~mongoengine.Document.save` after all actions - (validation, insert/update, cascades, clearing dirty flags) have completed - successfully. Passed the additional boolean keyword argument `created` to - indicate if the save was an insert or an update. + Called within :meth:`~mongoengine.Document.save` after most actions + (validation, insert/update, and cascades, but not clearing dirty flags) have + completed successfully. Passed the additional boolean keyword argument + `created` to indicate if the save was an insert or an update. `pre_delete` Called within :meth:`~mongoengine.Document.delete` prior to @@ -113,6 +113,10 @@ handlers within your subclass:: signals.pre_save.connect(Author.pre_save, sender=Author) signals.post_save.connect(Author.post_save, sender=Author) +.. warning:: + + Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently. + Finally, you can also use this small decorator to quickly create a number of signals and attach them to your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: @@ -142,11 +146,4 @@ cleaner looking while still allowing manual execution of the callback:: modified = DateTimeField() -ReferenceFields and Signals ---------------------------- - -Currently `reverse_delete_rule` does not trigger signals on the other part of -the relationship. If this is required you must manually handle the -reverse deletion. - .. _blinker: http://pypi.python.org/pypi/blinker diff --git a/docs/guide/text-indexes.rst b/docs/guide/text-indexes.rst index 725ad369..92a4471a 100644 --- a/docs/guide/text-indexes.rst +++ b/docs/guide/text-indexes.rst @@ -48,4 +48,4 @@ Ordering by text score :: - objects = News.objects.search('mongo').order_by('$text_score') + objects = News.objects.search_text('mongo').order_by('$text_score') diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 311d2888..bcd0d17f 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -3,11 +3,10 @@ Tutorial ======== This tutorial introduces **MongoEngine** by means of example --- we will walk -through how to create a simple **Tumblelog** application. A Tumblelog is a type -of blog where posts are not constrained to being conventional text-based posts. -As well as text-based entries, users may post images, links, videos, etc. For -simplicity's sake, we'll stick to text, image and link entries in our -application. As the purpose of this tutorial is to introduce MongoEngine, we'll +through how to create a simple **Tumblelog** application. A tumblelog is a +blog that supports mixed media content, including text, images, links, video, +audio, etc. For simplicity's sake, we'll stick to text, image, and link +entries. As the purpose of this tutorial is to introduce MongoEngine, we'll focus on the data-modelling side of the application, leaving out a user interface. @@ -16,14 +15,14 @@ Getting started Before we start, make sure that a copy of MongoDB is running in an accessible location --- running it locally will be easier, but if that is not an option -then it may be run on a remote server. If you haven't installed mongoengine, +then it may be run on a remote server. If you haven't installed MongoEngine, simply use pip to install it like so:: $ pip install mongoengine Before we can start using MongoEngine, we need to tell it how to connect to our instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` -function. If running locally the only argument we need to provide is the name +function. If running locally, the only argument we need to provide is the name of the MongoDB database to use:: from mongoengine import * @@ -39,18 +38,18 @@ Defining our documents MongoDB is *schemaless*, which means that no schema is enforced by the database --- we may add and remove fields however we want and MongoDB won't complain. This makes life a lot easier in many regards, especially when there is a change -to the data model. However, defining schemata for our documents can help to -iron out bugs involving incorrect types or missing fields, and also allow us to +to the data model. However, defining schemas for our documents can help to iron +out bugs involving incorrect types or missing fields, and also allow us to define utility methods on our documents in the same way that traditional :abbr:`ORMs (Object-Relational Mappers)` do. In our Tumblelog application we need to store several different types of -information. We will need to have a collection of **users**, so that we may +information. We will need to have a collection of **users**, so that we may link posts to an individual. We also need to store our different types of **posts** (eg: text, image and link) in the database. To aid navigation of our Tumblelog, posts may have **tags** associated with them, so that the list of posts shown to the user may be limited to posts that have been assigned a -specific tag. Finally, it would be nice if **comments** could be added to +specific tag. Finally, it would be nice if **comments** could be added to posts. We'll start with **users**, as the other document models are slightly more involved. @@ -78,7 +77,7 @@ Now we'll think about how to store the rest of the information. If we were using a relational database, we would most likely have a table of **posts**, a table of **comments** and a table of **tags**. To associate the comments with individual posts, we would put a column in the comments table that contained a -foreign key to the posts table. We'd also need a link table to provide the +foreign key to the posts table. We'd also need a link table to provide the many-to-many relationship between posts and tags. Then we'd need to address the problem of storing the specialised post-types (text, image and link). There are several ways we can achieve this, but each of them have their problems --- none @@ -87,7 +86,7 @@ of them stand out as particularly intuitive solutions. Posts ^^^^^ -Happily mongoDB *isn't* a relational database, so we're not going to do it that +Happily MongoDB *isn't* a relational database, so we're not going to do it that way. As it turns out, we can use MongoDB's schemaless nature to provide us with a much nicer solution. We will store all of the posts in *one collection* and each post type will only store the fields it needs. If we later want to add @@ -96,7 +95,7 @@ using* the new fields we need to support video posts. This fits with the Object-Oriented principle of *inheritance* nicely. We can think of :class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and :class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports -this kind of modelling out of the box --- all you need do is turn on inheritance +this kind of modeling out of the box --- all you need do is turn on inheritance by setting :attr:`allow_inheritance` to True in the :attr:`meta`:: class Post(Document): @@ -128,8 +127,8 @@ link table, we can just store a list of tags in each post. So, for both efficiency and simplicity's sake, we'll store the tags as strings directly within the post, rather than storing references to tags in a separate collection. Especially as tags are generally very short (often even shorter -than a document's id), this denormalisation won't impact very strongly on the -size of our database. So let's take a look that the code our modified +than a document's id), this denormalization won't impact the size of the +database very strongly. Let's take a look at the code of our modified :class:`Post` class:: class Post(Document): @@ -141,7 +140,7 @@ The :class:`~mongoengine.fields.ListField` object that is used to define a Post' takes a field object as its first argument --- this means that you can have lists of any type of field (including lists). -.. note:: We don't need to modify the specialised post types as they all +.. note:: We don't need to modify the specialized post types as they all inherit from :class:`Post`. Comments @@ -149,12 +148,12 @@ Comments A comment is typically associated with *one* post. In a relational database, to display a post with its comments, we would have to retrieve the post from the -database, then query the database again for the comments associated with the +database and then query the database again for the comments associated with the post. This works, but there is no real reason to be storing the comments separately from their associated posts, other than to work around the relational model. Using MongoDB we can store the comments as a list of *embedded documents* directly on a post document. An embedded document should -be treated no differently that a regular document; it just doesn't have its own +be treated no differently than a regular document; it just doesn't have its own collection in the database. Using MongoEngine, we can define the structure of embedded documents, along with utility methods, in exactly the same way we do with regular documents:: @@ -207,7 +206,10 @@ object:: ross.last_name = 'Lawley' ross.save() -Now that we've got our user in the database, let's add a couple of posts:: +Assign another user to a variable called ``john``, just like we did above with +``ross``. + +Now that we've got our users in the database, let's add a couple of posts:: post1 = TextPost(title='Fun with MongoEngine', author=john) post1.content = 'Took a look at MongoEngine today, looks pretty cool.' @@ -219,8 +221,8 @@ Now that we've got our user in the database, let's add a couple of posts:: post2.tags = ['mongoengine'] post2.save() -.. note:: If you change a field on a object that has already been saved, then - call :meth:`save` again, the document will be updated. +.. note:: If you change a field on an object that has already been saved and + then call :meth:`save` again, the document will be updated. Accessing our data ================== @@ -232,17 +234,17 @@ used to access the documents in the database collection associated with that class. So let's see how we can get our posts' titles:: for post in Post.objects: - print post.title + print(post.title) Retrieving type-specific information ------------------------------------ -This will print the titles of our posts, one on each line. But What if we want +This will print the titles of our posts, one on each line. But what if we want to access the type-specific data (link_url, content, etc.)? One way is simply to use the :attr:`objects` attribute of a subclass of :class:`Post`:: for post in TextPost.objects: - print post.content + print(post.content) Using TextPost's :attr:`objects` attribute only returns documents that were created using :class:`TextPost`. Actually, there is a more general rule here: @@ -259,16 +261,14 @@ instances of :class:`Post` --- they were instances of the subclass of practice:: for post in Post.objects: - print post.title - print '=' * len(post.title) + print(post.title) + print('=' * len(post.title)) if isinstance(post, TextPost): - print post.content + print(post.content) if isinstance(post, LinkPost): - print 'Link:', post.link_url - - print + print('Link: {}'.format(post.link_url)) This would print the title of each post, followed by the content if it was a text post, and "Link: " if it was a link post. @@ -283,7 +283,7 @@ your query. Let's adjust our query so that only posts with the tag "mongodb" are returned:: for post in Post.objects(tags='mongodb'): - print post.title + print(post.title) There are also methods available on :class:`~mongoengine.queryset.QuerySet` objects that allow different results to be returned, for example, calling @@ -292,11 +292,11 @@ the first matched by the query you provide. Aggregation functions may also be used on :class:`~mongoengine.queryset.QuerySet` objects:: num_posts = Post.objects(tags='mongodb').count() - print 'Found %d posts with tag "mongodb"' % num_posts + print('Found {} posts with tag "mongodb"'.format(num_posts)) -Learning more about mongoengine +Learning more about MongoEngine ------------------------------- -If you got this far you've made a great start, so well done! The next step on -your mongoengine journey is the `full user guide `_, where you -can learn indepth about how to use mongoengine and mongodb. +If you got this far you've made a great start, so well done! The next step on +your MongoEngine journey is the `full user guide `_, where +you can learn in-depth about how to use MongoEngine and MongoDB. diff --git a/docs/upgrade.rst b/docs/upgrade.rst index c0ae7205..65d13359 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -2,6 +2,46 @@ Upgrading ######### +Development +*********** +(Fill this out whenever you introduce breaking changes to MongoEngine) + +0.14.0 +****** +This release includes a few bug fixes and a significant code cleanup. The most +important change is that `QuerySet.as_pymongo` no longer supports a +`coerce_types` mode. If you used it in the past, a) please let us know of your +use case, b) you'll need to override `as_pymongo` to get the desired outcome. + +This release also makes the EmbeddedDocument not hashable by default. If you +use embedded documents in sets or dictionaries, you might have to override +`__hash__` and implement a hashing logic specific to your use case. See #1528 +for the reason behind this change. + +0.13.0 +****** +This release adds Unicode support to the `EmailField` and changes its +structure significantly. Previously, email addresses containing Unicode +characters didn't work at all. Starting with v0.13.0, domains with Unicode +characters are supported out of the box, meaning some emails that previously +didn't pass validation now do. Make sure the rest of your application can +accept such email addresses. Additionally, if you subclassed the `EmailField` +in your application and overrode `EmailField.EMAIL_REGEX`, you will have to +adjust your code to override `EmailField.USER_REGEX`, `EmailField.DOMAIN_REGEX`, +and potentially `EmailField.UTF8_USER_REGEX`. + +0.12.0 +****** +This release includes various fixes for the `BaseQuerySet` methods and how they +are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size +to an already-existing queryset wouldn't modify the underlying PyMongo cursor. +This has been fixed now, so you'll need to make sure that your code didn't rely +on the broken implementation. + +Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private +`_clone_into`. If you directly used that method in your code, you'll need to +rename its occurrences. + 0.11.0 ****** This release includes a major rehaul of MongoEngine's code quality and diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index f8969592..e6dc6b9d 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -23,7 +23,7 @@ __all__ = (list(document.__all__) + list(fields.__all__) + list(signals.__all__) + list(errors.__all__)) -VERSION = (0, 11, 0) +VERSION = (0, 15, 3) def get_version(): diff --git a/mongoengine/base/__init__.py b/mongoengine/base/__init__.py index da31b922..e069a147 100644 --- a/mongoengine/base/__init__.py +++ b/mongoengine/base/__init__.py @@ -15,7 +15,7 @@ __all__ = ( 'UPDATE_OPERATORS', '_document_registry', 'get_document', # datastructures - 'BaseDict', 'BaseList', 'EmbeddedDocumentList', + 'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference', # document 'BaseDocument', diff --git a/mongoengine/base/common.py b/mongoengine/base/common.py index da2b8b68..dd177920 100644 --- a/mongoengine/base/common.py +++ b/mongoengine/base/common.py @@ -3,9 +3,10 @@ from mongoengine.errors import NotRegistered __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') -UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', - 'push_all', 'pull', 'pull_all', 'add_to_set', - 'set_on_insert', 'min', 'max']) +UPDATE_OPERATORS = {'set', 'unset', 'inc', 'dec', 'mul', + 'pop', 'push', 'push_all', 'pull', + 'pull_all', 'add_to_set', 'set_on_insert', + 'min', 'max', 'rename'} _document_registry = {} diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index 5e90a2e5..0197ad10 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,12 +1,13 @@ import itertools import weakref +from bson import DBRef import six from mongoengine.common import _import_class from mongoengine.errors import DoesNotExist, MultipleObjectsReturned -__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList') +__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference') class BaseDict(dict): @@ -127,8 +128,8 @@ class BaseList(list): return value def __iter__(self): - for i in xrange(self.__len__()): - yield self[i] + for v in super(BaseList, self).__iter__(): + yield v def __setitem__(self, key, value, *args, **kwargs): if isinstance(key, slice): @@ -137,11 +138,8 @@ class BaseList(list): self._mark_as_changed(key) return super(BaseList, self).__setitem__(key, value) - def __delitem__(self, key, *args, **kwargs): - if isinstance(key, slice): - self._mark_as_changed() - else: - self._mark_as_changed(key) + def __delitem__(self, key): + self._mark_as_changed() return super(BaseList, self).__delitem__(key) def __setslice__(self, *args, **kwargs): @@ -189,7 +187,7 @@ class BaseList(list): self._mark_as_changed() return super(BaseList, self).remove(*args, **kwargs) - def reverse(self, *args, **kwargs): + def reverse(self): self._mark_as_changed() return super(BaseList, self).reverse() @@ -236,6 +234,9 @@ class EmbeddedDocumentList(BaseList): Filters the list by only including embedded documents with the given keyword arguments. + This method only supports simple comparison (e.g: .filter(name='John Doe')) + and does not support operators like __gte, __lte, __icontains like queryset.filter does + :param kwargs: The keyword arguments corresponding to the fields to filter on. *Multiple arguments are treated as if they are ANDed together.* @@ -353,7 +354,8 @@ class EmbeddedDocumentList(BaseList): def update(self, **update): """ - Updates the embedded documents with the given update values. + Updates the embedded documents with the given replacement values. This + function does not support mongoDB update operators such as ``inc__``. .. note:: The embedded document changes are not automatically saved @@ -375,7 +377,7 @@ class EmbeddedDocumentList(BaseList): class StrictDict(object): __slots__ = () - _special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create']) + _special_fields = {'get', 'pop', 'iteritems', 'items', 'keys', 'create'} _classes = {} def __init__(self, **kwargs): @@ -432,7 +434,7 @@ class StrictDict(object): def __eq__(self, other): return self.items() == other.items() - def __neq__(self, other): + def __ne__(self, other): return self.items() != other.items() @classmethod @@ -450,40 +452,40 @@ class StrictDict(object): return cls._classes[allowed_keys] -class SemiStrictDict(StrictDict): - __slots__ = ('_extras', ) - _classes = {} +class LazyReference(DBRef): + __slots__ = ('_cached_doc', 'passthrough', 'document_type') - def __getattr__(self, attr): - try: - super(SemiStrictDict, self).__getattr__(attr) - except AttributeError: - try: - return self.__getattribute__('_extras')[attr] - except KeyError as e: - raise AttributeError(e) + def fetch(self, force=False): + if not self._cached_doc or force: + self._cached_doc = self.document_type.objects.get(pk=self.pk) + if not self._cached_doc: + raise DoesNotExist('Trying to dereference unknown document %s' % (self)) + return self._cached_doc - def __setattr__(self, attr, value): - try: - super(SemiStrictDict, self).__setattr__(attr, value) - except AttributeError: - try: - self._extras[attr] = value - except AttributeError: - self._extras = {attr: value} + @property + def pk(self): + return self.id - def __delattr__(self, attr): - try: - super(SemiStrictDict, self).__delattr__(attr) - except AttributeError: - try: - del self._extras[attr] - except KeyError as e: - raise AttributeError(e) + def __init__(self, document_type, pk, cached_doc=None, passthrough=False): + self.document_type = document_type + self._cached_doc = cached_doc + self.passthrough = passthrough + super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk) - def __iter__(self): + def __getitem__(self, name): + if not self.passthrough: + raise KeyError() + document = self.fetch() + return document[name] + + def __getattr__(self, name): + if not object.__getattribute__(self, 'passthrough'): + raise AttributeError() + document = self.fetch() try: - extras_iter = iter(self.__getattribute__('_extras')) - except AttributeError: - extras_iter = () - return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter) + return document[name] + except KeyError: + raise AttributeError() + + def __repr__(self): + return "" % (self.document_type, self.pk) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 03dc7562..84acb5a2 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -13,13 +13,14 @@ from mongoengine import signals from mongoengine.base.common import get_document from mongoengine.base.datastructures import (BaseDict, BaseList, EmbeddedDocumentList, - SemiStrictDict, StrictDict) + LazyReference, + StrictDict) from mongoengine.base.fields import ComplexBaseField from mongoengine.common import _import_class from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, LookUpError, OperationError, ValidationError) -__all__ = ('BaseDocument',) +__all__ = ('BaseDocument', 'NON_FIELD_ERRORS') NON_FIELD_ERRORS = '__all__' @@ -79,8 +80,7 @@ class BaseDocument(object): if self.STRICT and not self._dynamic: self._data = StrictDict.create(allowed_keys=self._fields_ordered)() else: - self._data = SemiStrictDict.create( - allowed_keys=self._fields_ordered)() + self._data = {} self._dynamic_fields = SON() @@ -100,13 +100,11 @@ class BaseDocument(object): for key, value in values.iteritems(): if key in self._fields or key == '_id': setattr(self, key, value) - elif self._dynamic: + else: dynamic_data[key] = value else: FileField = _import_class('FileField') for key, value in values.iteritems(): - if key == '__auto_convert': - continue key = self._reverse_db_field_map.get(key, key) if key in self._fields or key in ('id', 'pk', '_cls'): if __auto_convert and value is not None: @@ -147,7 +145,7 @@ class BaseDocument(object): if not hasattr(self, name) and not name.startswith('_'): DynamicField = _import_class('DynamicField') - field = DynamicField(db_field=name) + field = DynamicField(db_field=name, null=True) field.name = name self._dynamic_fields[name] = field self._fields_ordered += (name,) @@ -272,13 +270,6 @@ class BaseDocument(object): def __ne__(self, other): return not self.__eq__(other) - def __hash__(self): - if getattr(self, 'pk', None) is None: - # For new object - return super(BaseDocument, self).__hash__() - else: - return hash(self.pk) - def clean(self): """ Hook for doing document level data cleaning before validation is run. @@ -311,7 +302,7 @@ class BaseDocument(object): data['_cls'] = self._class_name # only root fields ['test1.a', 'test2'] => ['test1', 'test2'] - root_fields = set([f.split('.')[0] for f in fields]) + root_fields = {f.split('.')[0] for f in fields} for field_name in self: if root_fields and field_name not in root_fields: @@ -344,7 +335,7 @@ class BaseDocument(object): value = field.generate() self._data[field_name] = value - if value is not None: + if (value is not None) or (field.null): if use_db_field: data[field.db_field] = value else: @@ -402,16 +393,26 @@ class BaseDocument(object): raise ValidationError(message, errors=errors) def to_json(self, *args, **kwargs): - """Converts a document to JSON. - :param use_db_field: Set to True by default but enables the output of the json structure with the field names - and not the mongodb store db_names in case of set to False + """Convert this document to JSON. + + :param use_db_field: Serialize field names as they appear in + MongoDB (as opposed to attribute names on this document). + Defaults to True. """ use_db_field = kwargs.pop('use_db_field', True) return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs) @classmethod def from_json(cls, json_data, created=False): - """Converts json data to an unsaved document instance""" + """Converts json data to a Document instance + + :param json_data: The json data to load into the Document + :param created: If True, the document will be considered as a brand new document + If False and an id is provided, it will consider that the data being + loaded corresponds to what's already in the database (This has an impact of subsequent call to .save()) + If False and no id is provided, it will consider the data as a new document + (default ``False``) + """ return cls._from_son(json_util.loads(json_data), created=created) def __expand_dynamic_values(self, name, value): @@ -494,7 +495,7 @@ class BaseDocument(object): else: data = getattr(data, part, None) - if hasattr(data, '_changed_fields'): + if not isinstance(data, LazyReference) and hasattr(data, '_changed_fields'): if getattr(data, '_is_document', False): continue @@ -566,7 +567,7 @@ class BaseDocument(object): continue elif isinstance(field, SortedListField) and field._ordering: # if ordering is affected whole list is changed - if any(map(lambda d: field._ordering in d._changed_fields, data)): + if any(field._ordering in d._changed_fields for d in data): changed_fields.append(db_field_name) continue @@ -675,12 +676,20 @@ class BaseDocument(object): if not only_fields: only_fields = [] + if son and not isinstance(son, dict): + raise ValueError("The source SON object needs to be of type 'dict'") + # Get the class name from the document, falling back to the given # class if unavailable class_name = son.get('_cls', cls._class_name) - # Convert SON to a dict, making sure each key is a string - data = {str(key): value for key, value in son.iteritems()} + # Convert SON to a data dict, making sure each key is a string and + # corresponds to the right db field. + data = {} + for key, value in son.iteritems(): + key = str(key) + key = cls._db_field_map.get(key, key) + data[key] = value # Return correct subclass for document type if class_name != cls._class_name: @@ -1077,5 +1086,11 @@ class BaseDocument(object): """Return the display value for a choice field""" value = getattr(self, field.name) if field.choices and isinstance(field.choices[0], (list, tuple)): - return dict(field.choices).get(value, value) + if value is None: + return None + sep = getattr(field, 'display_sep', ' ') + values = value if field.__class__.__name__ in ('ListField', 'SortedListField') else [value] + return sep.join([ + six.text_type(dict(field.choices).get(val, val)) + for val in values or []]) return value diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index b0644cec..a0726aa6 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -41,7 +41,7 @@ class BaseField(object): """ :param db_field: The database field to store this field in (defaults to the name of the field) - :param name: Depreciated - use db_field + :param name: Deprecated - use db_field :param required: If the field is required. Whether it has to have a value or not. Defaults to False. :param default: (optional) The default value for this field if no value @@ -55,7 +55,7 @@ class BaseField(object): field. Generally this is deprecated in favour of the `FIELD.validate` method :param choices: (optional) The valid choices - :param null: (optional) Is the field value can be null. If no and there is a default value + :param null: (optional) If the field value can be null. If no and there is a default value then the default value is set :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` means that uniqueness won't be enforced for `None` values @@ -81,6 +81,24 @@ class BaseField(object): self.sparse = sparse self._owner_document = None + # Make sure db_field is a string (if it's explicitly defined). + if ( + self.db_field is not None and + not isinstance(self.db_field, six.string_types) + ): + raise TypeError('db_field should be a string.') + + # Make sure db_field doesn't contain any forbidden characters. + if isinstance(self.db_field, six.string_types) and ( + '.' in self.db_field or + '\0' in self.db_field or + self.db_field.startswith('$') + ): + raise ValueError( + 'field names cannot contain dots (".") or null characters ' + '("\\0"), and they must not start with a dollar sign ("$").' + ) + # Detect and report conflicts between metadata and base properties. conflicts = set(dir(self)) & set(kwargs) if conflicts: @@ -112,7 +130,6 @@ class BaseField(object): def __set__(self, instance, value): """Descriptor for assigning a value to a field in a document. """ - # If setting to None and there is a default # Then set the value to the default value if value is None: @@ -182,7 +199,8 @@ class BaseField(object): EmbeddedDocument = _import_class('EmbeddedDocument') choice_list = self.choices - if isinstance(choice_list[0], (list, tuple)): + if isinstance(next(iter(choice_list)), (list, tuple)): + # next(iter) is useful for sets choice_list = [k for k, _ in choice_list] # Choices which are other types of Documents @@ -194,8 +212,10 @@ class BaseField(object): ) ) # Choices which are types other than Documents - elif value not in choice_list: - self.error('Value must be one of %s' % six.text_type(choice_list)) + else: + values = value if isinstance(value, (list, tuple)) else [value] + if len(set(values) - set(choice_list)): + self.error('Value must be one of %s' % six.text_type(choice_list)) def _validate(self, value, **kwargs): # Check the Choices Constraint @@ -481,7 +501,7 @@ class GeoJsonBaseField(BaseField): def validate(self, value): """Validate the GeoJson object based on its type.""" if isinstance(value, dict): - if set(value.keys()) == set(['type', 'coordinates']): + if set(value.keys()) == {'type', 'coordinates'}: if value['type'] != self._type: self.error('%s type must be "%s"' % (self._name, self._type)) diff --git a/mongoengine/base/utils.py b/mongoengine/base/utils.py new file mode 100644 index 00000000..288c2f3e --- /dev/null +++ b/mongoengine/base/utils.py @@ -0,0 +1,22 @@ +import re + + +class LazyRegexCompiler(object): + """Descriptor to allow lazy compilation of regex""" + + def __init__(self, pattern, flags=0): + self._pattern = pattern + self._flags = flags + self._compiled_regex = None + + @property + def compiled_regex(self): + if self._compiled_regex is None: + self._compiled_regex = re.compile(self._pattern, self._flags) + return self._compiled_regex + + def __get__(self, obj, objtype): + return self.compiled_regex + + def __set__(self, instance, value): + raise AttributeError("Can not set attribute LazyRegexCompiler") diff --git a/mongoengine/common.py b/mongoengine/common.py index 3e63e98e..bde7e78c 100644 --- a/mongoengine/common.py +++ b/mongoengine/common.py @@ -34,7 +34,10 @@ def _import_class(cls_name): queryset_classes = ('OperationError',) deref_classes = ('DeReference',) - if cls_name in doc_classes: + if cls_name == 'BaseDocument': + from mongoengine.base import document as module + import_classes = ['BaseDocument'] + elif cls_name in doc_classes: from mongoengine import document as module import_classes = doc_classes elif cls_name in field_classes: diff --git a/mongoengine/connection.py b/mongoengine/connection.py index bb353cff..38ebb243 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -28,7 +28,7 @@ _connections = {} _dbs = {} -def register_connection(alias, name=None, host=None, port=None, +def register_connection(alias, db=None, name=None, host=None, port=None, read_preference=READ_PREFERENCE, username=None, password=None, authentication_source=None, @@ -39,6 +39,7 @@ def register_connection(alias, name=None, host=None, port=None, :param alias: the name that will be used to refer to this connection throughout MongoEngine :param name: the name of the specific database to use + :param db: the name of the database to use, for compatibility with connect :param host: the host name of the :program:`mongod` instance to connect to :param port: the port that the :program:`mongod` instance is running on :param read_preference: The read preference for the collection @@ -51,12 +52,14 @@ def register_connection(alias, name=None, host=None, port=None, MONGODB-CR (MongoDB Challenge Response protocol) for older servers. :param is_mock: explicitly use mongomock for this connection (can also be done by using `mongomock://` as db host prefix) - :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver + :param kwargs: ad-hoc parameters to be passed into the pymongo driver, + for example maxpoolsize, tz_aware, etc. See the documentation + for pymongo's `MongoClient` for a full list. .. versionchanged:: 0.10.6 - added mongomock support """ conn_settings = { - 'name': name or 'test', + 'name': name or db or 'test', 'host': host or 'localhost', 'port': port or 27017, 'read_preference': read_preference, @@ -66,9 +69,9 @@ def register_connection(alias, name=None, host=None, port=None, 'authentication_mechanism': authentication_mechanism } - # Handle uri style connections conn_host = conn_settings['host'] - # host can be a list or a string, so if string, force to a list + + # Host can be a list or a string, so if string, force to a list. if isinstance(conn_host, six.string_types): conn_host = [conn_host] @@ -96,11 +99,23 @@ def register_connection(alias, name=None, host=None, port=None, uri_options = uri_dict['options'] if 'replicaset' in uri_options: - conn_settings['replicaSet'] = True + conn_settings['replicaSet'] = uri_options['replicaset'] if 'authsource' in uri_options: conn_settings['authentication_source'] = uri_options['authsource'] if 'authmechanism' in uri_options: conn_settings['authentication_mechanism'] = uri_options['authmechanism'] + if IS_PYMONGO_3 and 'readpreference' in uri_options: + read_preferences = ( + ReadPreference.NEAREST, + ReadPreference.PRIMARY, + ReadPreference.PRIMARY_PREFERRED, + ReadPreference.SECONDARY, + ReadPreference.SECONDARY_PREFERRED) + read_pf_mode = uri_options['readpreference'].lower() + for preference in read_preferences: + if preference.name.lower() == read_pf_mode: + conn_settings['read_preference'] = preference + break else: resolved_hosts.append(entity) conn_settings['host'] = resolved_hosts @@ -144,13 +159,14 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): raise MongoEngineConnectionError(msg) def _clean_settings(settings_dict): - irrelevant_fields = set([ - 'name', 'username', 'password', 'authentication_source', - 'authentication_mechanism' - ]) + # set literal more efficient than calling set function + irrelevant_fields_set = { + 'name', 'username', 'password', + 'authentication_source', 'authentication_mechanism' + } return { k: v for k, v in settings_dict.items() - if k not in irrelevant_fields + if k not in irrelevant_fields_set } # Retrieve a copy of the connection settings associated with the requested @@ -170,23 +186,22 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): else: connection_class = MongoClient - # Handle replica set connections - if 'replicaSet' in conn_settings: + # For replica set connections with PyMongo 2.x, use + # MongoReplicaSetClient. + # TODO remove this once we stop supporting PyMongo 2.x. + if 'replicaSet' in conn_settings and not IS_PYMONGO_3: + connection_class = MongoReplicaSetClient + conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) + + # hosts_or_uri has to be a string, so if 'host' was provided + # as a list, join its parts and separate them by ',' + if isinstance(conn_settings['hosts_or_uri'], list): + conn_settings['hosts_or_uri'] = ','.join( + conn_settings['hosts_or_uri']) # Discard port since it can't be used on MongoReplicaSetClient conn_settings.pop('port', None) - # Discard replicaSet if it's not a string - if not isinstance(conn_settings['replicaSet'], six.string_types): - del conn_settings['replicaSet'] - - # For replica set connections with PyMongo 2.x, use - # MongoReplicaSetClient. - # TODO remove this once we stop supporting PyMongo 2.x. - if not IS_PYMONGO_3: - connection_class = MongoReplicaSetClient - conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) - # Iterate over all of the connection settings and if a connection with # the same parameters is already established, use it instead of creating # a new one. @@ -242,9 +257,12 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): running on the default port on localhost. If authentication is needed, provide username and password arguments as well. - Multiple databases are supported by using aliases. Provide a separate + Multiple databases are supported by using aliases. Provide a separate `alias` to connect to a different instance of :program:`mongod`. + See the docstring for `register_connection` for more details about all + supported kwargs. + .. versionchanged:: 0.6 - added multiple database support. """ if alias not in _connections: diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index c477575e..ee1f5e01 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -1,9 +1,11 @@ +from contextlib import contextmanager +from pymongo.write_concern import WriteConcern from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db __all__ = ('switch_db', 'switch_collection', 'no_dereference', - 'no_sub_classes', 'query_counter') + 'no_sub_classes', 'query_counter', 'set_write_concern') class switch_db(object): @@ -143,66 +145,83 @@ class no_sub_classes(object): :param cls: the class to turn querying sub classes on """ self.cls = cls + self.cls_initial_subclasses = None def __enter__(self): """Change the objects default and _auto_dereference values.""" - self.cls._all_subclasses = self.cls._subclasses - self.cls._subclasses = (self.cls,) + self.cls_initial_subclasses = self.cls._subclasses + self.cls._subclasses = (self.cls._class_name,) return self.cls def __exit__(self, t, value, traceback): """Reset the default and _auto_dereference values.""" - self.cls._subclasses = self.cls._all_subclasses - delattr(self.cls, '_all_subclasses') - return self.cls + self.cls._subclasses = self.cls_initial_subclasses class query_counter(object): - """Query_counter context manager to get the number of queries.""" + """Query_counter context manager to get the number of queries. + This works by updating the `profiling_level` of the database so that all queries get logged, + resetting the db.system.profile collection at the beginnig of the context and counting the new entries. + + This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes + can interfere with it + + Be aware that: + - Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of + documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches) + - Some queries are ignored by default by the counter (killcursors, db.system.indexes) + """ def __init__(self): - """Construct the query_counter.""" - self.counter = 0 + """Construct the query_counter + """ self.db = get_db() + self.initial_profiling_level = None + self._ctx_query_counter = 0 # number of queries issued by the context - def __enter__(self): - """On every with block we need to drop the profile collection.""" + self._ignored_query = { + 'ns': + {'$ne': '%s.system.indexes' % self.db.name}, + 'op': + {'$ne': 'killcursors'} + } + + def _turn_on_profiling(self): + self.initial_profiling_level = self.db.profiling_level() self.db.set_profiling_level(0) self.db.system.profile.drop() self.db.set_profiling_level(2) + + def _resets_profiling(self): + self.db.set_profiling_level(self.initial_profiling_level) + + def __enter__(self): + self._turn_on_profiling() return self def __exit__(self, t, value, traceback): - """Reset the profiling level.""" - self.db.set_profiling_level(0) + self._resets_profiling() def __eq__(self, value): - """== Compare querycounter.""" counter = self._get_count() return value == counter def __ne__(self, value): - """!= Compare querycounter.""" return not self.__eq__(value) def __lt__(self, value): - """< Compare querycounter.""" return self._get_count() < value def __le__(self, value): - """<= Compare querycounter.""" return self._get_count() <= value def __gt__(self, value): - """> Compare querycounter.""" return self._get_count() > value def __ge__(self, value): - """>= Compare querycounter.""" return self._get_count() >= value def __int__(self): - """int representation.""" return self._get_count() def __repr__(self): @@ -210,8 +229,17 @@ class query_counter(object): return u"%s" % self._get_count() def _get_count(self): - """Get the number of queries.""" - ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}} - count = self.db.system.profile.find(ignore_query).count() - self.counter - self.counter += 1 + """Get the number of queries by counting the current number of entries in db.system.profile + and substracting the queries issued by this context. In fact everytime this is called, 1 query is + issued so we need to balance that + """ + count = self.db.system.profile.find(self._ignored_query).count() - self._ctx_query_counter + self._ctx_query_counter += 1 # Account for the query we just issued to gather the information return count + + +@contextmanager +def set_write_concern(collection, write_concerns): + combined_concerns = dict(collection.write_concern.document.items()) + combined_concerns.update(write_concerns) + yield collection.with_options(write_concern=WriteConcern(**combined_concerns)) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 59204d4d..6c993223 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -3,6 +3,7 @@ import six from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, TopLevelDocumentMetaclass, get_document) +from mongoengine.base.datastructures import LazyReference from mongoengine.connection import get_db from mongoengine.document import Document, EmbeddedDocument from mongoengine.fields import DictField, ListField, MapField, ReferenceField @@ -99,7 +100,10 @@ class DeReference(object): if isinstance(item, (Document, EmbeddedDocument)): for field_name, field in item._fields.iteritems(): v = item._data.get(field_name, None) - if isinstance(v, DBRef): + if isinstance(v, LazyReference): + # LazyReference inherits DBRef but should not be dereferenced here ! + continue + elif isinstance(v, DBRef): reference_map.setdefault(field.document_type, set()).add(v.id) elif isinstance(v, (dict, SON)) and '_ref' in v: reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) @@ -110,6 +114,9 @@ class DeReference(object): if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): key = field_cls reference_map.setdefault(key, set()).update(refs) + elif isinstance(item, LazyReference): + # LazyReference inherits DBRef but should not be dereferenced here ! + continue elif isinstance(item, DBRef): reference_map.setdefault(item.collection, set()).add(item.id) elif isinstance(item, (dict, SON)) and '_ref' in item: @@ -126,7 +133,12 @@ class DeReference(object): """ object_map = {} for collection, dbrefs in self.reference_map.iteritems(): - if hasattr(collection, 'objects'): # We have a document class for the refs + + # we use getattr instead of hasattr because as hasattr swallows any exception under python2 + # so it could hide nasty things without raising exceptions (cfr bug #1688)) + ref_document_cls_exists = (getattr(collection, 'objects', None) is not None) + + if ref_document_cls_exists: col_name = collection._get_collection_name() refs = [dbref for dbref in dbrefs if (col_name, dbref) not in object_map] @@ -134,7 +146,7 @@ class DeReference(object): for key, doc in references.iteritems(): object_map[(col_name, key)] = doc else: # Generic reference: use the refs data to convert to document - if isinstance(doc_type, (ListField, DictField, MapField,)): + if isinstance(doc_type, (ListField, DictField, MapField)): continue refs = [dbref for dbref in dbrefs @@ -230,7 +242,7 @@ class DeReference(object): elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: item_name = '%s.%s' % (name, k) if name else name data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) - elif hasattr(v, 'id'): + elif isinstance(v, DBRef) and hasattr(v, 'id'): data[k] = self.object_map.get((v.collection, v.id), v) if instance and name: diff --git a/mongoengine/document.py b/mongoengine/document.py index e86a45d9..cdeed4c6 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -39,7 +39,7 @@ class InvalidCollectionError(Exception): pass -class EmbeddedDocument(BaseDocument): +class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): """A :class:`~mongoengine.Document` that isn't stored in its own collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as fields on :class:`~mongoengine.Document`\ s through the @@ -58,7 +58,12 @@ class EmbeddedDocument(BaseDocument): # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = DocumentMetaclass - __metaclass__ = DocumentMetaclass + + # A generic embedded document doesn't have any immutable properties + # that describe it uniquely, hence it shouldn't be hashable. You can + # define your own __hash__ method on a subclass if you need your + # embedded documents to be hashable. + __hash__ = None def __init__(self, *args, **kwargs): super(EmbeddedDocument, self).__init__(*args, **kwargs) @@ -89,7 +94,7 @@ class EmbeddedDocument(BaseDocument): self._instance.reload(*args, **kwargs) -class Document(BaseDocument): +class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """The base class used for defining the structure and properties of collections of documents stored in MongoDB. Inherit from this class, and add fields as class attributes to define a document's structure. @@ -144,7 +149,6 @@ class Document(BaseDocument): # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = TopLevelDocumentMetaclass - __metaclass__ = TopLevelDocumentMetaclass __slots__ = ('__objects',) @@ -160,6 +164,15 @@ class Document(BaseDocument): """Set the primary key.""" return setattr(self, self._meta['id_field'], value) + def __hash__(self): + """Return the hash based on the PK of this document. If it's new + and doesn't have a PK yet, return the default object hash instead. + """ + if self.pk is None: + return super(BaseDocument, self).__hash__() + else: + return hash(self.pk) + @classmethod def _get_db(cls): """Some Model using other db_alias""" @@ -167,45 +180,66 @@ class Document(BaseDocument): @classmethod def _get_collection(cls): - """Returns the collection for the document.""" - # TODO: use new get_collection() with PyMongo3 ? + """Return a PyMongo collection for the document.""" if not hasattr(cls, '_collection') or cls._collection is None: - db = cls._get_db() - collection_name = cls._get_collection_name() - # Create collection as a capped collection if specified - if cls._meta.get('max_size') or cls._meta.get('max_documents'): - # Get max document limit and max byte size from meta - max_size = cls._meta.get('max_size') or 10 * 2 ** 20 # 10MB default - max_documents = cls._meta.get('max_documents') - # Round up to next 256 bytes as MongoDB would do it to avoid exception - if max_size % 256: - max_size = (max_size // 256 + 1) * 256 - if collection_name in db.collection_names(): - cls._collection = db[collection_name] - # The collection already exists, check if its capped - # options match the specified capped options - options = cls._collection.options() - if options.get('max') != max_documents or \ - options.get('size') != max_size: - msg = (('Cannot create collection "%s" as a capped ' - 'collection as it already exists') - % cls._collection) - raise InvalidCollectionError(msg) - else: - # Create the collection as a capped collection - opts = {'capped': True, 'size': max_size} - if max_documents: - opts['max'] = max_documents - cls._collection = db.create_collection( - collection_name, **opts - ) + # Get the collection, either capped or regular. + if cls._meta.get('max_size') or cls._meta.get('max_documents'): + cls._collection = cls._get_capped_collection() else: + db = cls._get_db() + collection_name = cls._get_collection_name() cls._collection = db[collection_name] - if cls._meta.get('auto_create_index', True): + + # Ensure indexes on the collection unless auto_create_index was + # set to False. + # Also there is no need to ensure indexes on slave. + db = cls._get_db() + if cls._meta.get('auto_create_index', True) and\ + db.client.is_primary: cls.ensure_indexes() + return cls._collection + @classmethod + def _get_capped_collection(cls): + """Create a new or get an existing capped PyMongo collection.""" + db = cls._get_db() + collection_name = cls._get_collection_name() + + # Get max document limit and max byte size from meta. + max_size = cls._meta.get('max_size') or 10 * 2 ** 20 # 10MB default + max_documents = cls._meta.get('max_documents') + + # MongoDB will automatically raise the size to make it a multiple of + # 256 bytes. We raise it here ourselves to be able to reliably compare + # the options below. + if max_size % 256: + max_size = (max_size // 256 + 1) * 256 + + # If the collection already exists and has different options + # (i.e. isn't capped or has different max/size), raise an error. + if collection_name in db.collection_names(): + collection = db[collection_name] + options = collection.options() + if ( + options.get('max') != max_documents or + options.get('size') != max_size + ): + raise InvalidCollectionError( + 'Cannot create collection "{}" as a capped ' + 'collection as it already exists'.format(cls._collection) + ) + + return collection + + # Create a new capped collection. + opts = {'capped': True, 'size': max_size} + if max_documents: + opts['max'] = max_documents + + return db.create_collection(collection_name, **opts) + def to_mongo(self, *args, **kwargs): data = super(Document, self).to_mongo(*args, **kwargs) @@ -247,6 +281,9 @@ class Document(BaseDocument): elif query[id_field] != self.pk: raise InvalidQueryError('Invalid document modify query: it must modify only this document.') + # Need to add shard key to query, or you get an error + query.update(self._object_key) + updated = self._qs(**query).modify(new=True, **update) if updated is None: return False @@ -267,7 +304,7 @@ class Document(BaseDocument): created. :param force_insert: only try to create a new document, don't allow - updates of existing documents + updates of existing documents. :param validate: validates the document; set to ``False`` to skip. :param clean: call the document clean method, requires `validate` to be True. @@ -287,7 +324,7 @@ class Document(BaseDocument): :param save_condition: only perform save if matching record in db satisfies condition(s) (e.g. version number). Raises :class:`OperationError` if the conditions are not satisfied - :parm signal_kwargs: (optional) kwargs dictionary to be passed to + :param signal_kwargs: (optional) kwargs dictionary to be passed to the signal calls. .. versionchanged:: 0.5 @@ -313,6 +350,9 @@ class Document(BaseDocument): .. versionchanged:: 0.10.7 Add signal_kwargs argument """ + if self._meta.get('abstract'): + raise InvalidDocumentError('Cannot save an abstract document.') + signal_kwargs = signal_kwargs or {} signals.pre_save.send(self.__class__, document=self, **signal_kwargs) @@ -329,68 +369,20 @@ class Document(BaseDocument): signals.pre_save_post_validation.send(self.__class__, document=self, created=created, **signal_kwargs) + if self._meta.get('auto_create_index', True): + self.ensure_indexes() + try: - collection = self._get_collection() - if self._meta.get('auto_create_index', True): - self.ensure_indexes() + # Save a new document or update an existing one if created: - if force_insert: - object_id = collection.insert(doc, **write_concern) - else: - object_id = collection.save(doc, **write_concern) - # In PyMongo 3.0, the save() call calls internally the _update() call - # but they forget to return the _id value passed back, therefore getting it back here - # Correct behaviour in 2.X and in 3.0.1+ versions - if not object_id and pymongo.version_tuple == (3, 0): - pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk) - object_id = ( - self._qs.filter(pk=pk_as_mongo_obj).first() and - self._qs.filter(pk=pk_as_mongo_obj).first().pk - ) # TODO doesn't this make 2 queries? + object_id = self._save_create(doc, force_insert, write_concern) else: - object_id = doc['_id'] - updates, removals = self._delta() - # Need to add shard key to query, or you get an error - if save_condition is not None: - select_dict = transform.query(self.__class__, - **save_condition) - else: - select_dict = {} - select_dict['_id'] = object_id - shard_key = self._meta.get('shard_key', tuple()) - for k in shard_key: - path = self._lookup_field(k.split('.')) - actual_key = [p.db_field for p in path] - val = doc - for ak in actual_key: - val = val[ak] - select_dict['.'.join(actual_key)] = val - - def is_new_object(last_error): - if last_error is not None: - updated = last_error.get('updatedExisting') - if updated is not None: - return not updated - return created - - update_query = {} - - if updates: - update_query['$set'] = updates - if removals: - update_query['$unset'] = removals - if updates or removals: - upsert = save_condition is None - last_error = collection.update(select_dict, update_query, - upsert=upsert, **write_concern) - if not upsert and last_error['n'] == 0: - raise SaveConditionError('Race condition preventing' - ' document update detected') - created = is_new_object(last_error) + object_id, created = self._save_update(doc, save_condition, + write_concern) if cascade is None: - cascade = self._meta.get( - 'cascade', False) or cascade_kwargs is not None + cascade = (self._meta.get('cascade', False) or + cascade_kwargs is not None) if cascade: kwargs = { @@ -403,6 +395,7 @@ class Document(BaseDocument): kwargs.update(cascade_kwargs) kwargs['_refs'] = _refs self.cascade_save(**kwargs) + except pymongo.errors.DuplicateKeyError as err: message = u'Tried to save duplicate unique keys (%s)' raise NotUniqueError(message % six.text_type(err)) @@ -415,16 +408,101 @@ class Document(BaseDocument): raise NotUniqueError(message % six.text_type(err)) raise OperationError(message % six.text_type(err)) + # Make sure we store the PK on this document now that it's saved id_field = self._meta['id_field'] if created or id_field not in self._meta.get('shard_key', []): self[id_field] = self._fields[id_field].to_python(object_id) signals.post_save.send(self.__class__, document=self, created=created, **signal_kwargs) + self._clear_changed_fields() self._created = False + return self + def _save_create(self, doc, force_insert, write_concern): + """Save a new document. + + Helper method, should only be used inside save(). + """ + collection = self._get_collection() + + if force_insert: + return collection.insert(doc, **write_concern) + + object_id = collection.save(doc, **write_concern) + + # In PyMongo 3.0, the save() call calls internally the _update() call + # but they forget to return the _id value passed back, therefore getting it back here + # Correct behaviour in 2.X and in 3.0.1+ versions + if not object_id and pymongo.version_tuple == (3, 0): + pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk) + object_id = ( + self._qs.filter(pk=pk_as_mongo_obj).first() and + self._qs.filter(pk=pk_as_mongo_obj).first().pk + ) # TODO doesn't this make 2 queries? + + return object_id + + def _get_update_doc(self): + """Return a dict containing all the $set and $unset operations + that should be sent to MongoDB based on the changes made to this + Document. + """ + updates, removals = self._delta() + + update_doc = {} + if updates: + update_doc['$set'] = updates + if removals: + update_doc['$unset'] = removals + + return update_doc + + def _save_update(self, doc, save_condition, write_concern): + """Update an existing document. + + Helper method, should only be used inside save(). + """ + collection = self._get_collection() + object_id = doc['_id'] + created = False + + select_dict = {} + if save_condition is not None: + select_dict = transform.query(self.__class__, **save_condition) + + select_dict['_id'] = object_id + + # Need to add shard key to query, or you get an error + shard_key = self._meta.get('shard_key', tuple()) + for k in shard_key: + path = self._lookup_field(k.split('.')) + actual_key = [p.db_field for p in path] + val = doc + for ak in actual_key: + val = val[ak] + select_dict['.'.join(actual_key)] = val + + update_doc = self._get_update_doc() + if update_doc: + upsert = save_condition is None + last_error = collection.update(select_dict, update_doc, + upsert=upsert, **write_concern) + if not upsert and last_error['n'] == 0: + raise SaveConditionError('Race condition preventing' + ' document update detected') + if last_error is not None: + updated_existing = last_error.get('updatedExisting') + if updated_existing is False: + created = True + # !!! This is bad, means we accidentally created a new, + # potentially corrupted document. See + # https://github.com/MongoEngine/mongoengine/issues/564 + + return object_id, created + def cascade_save(self, **kwargs): """Recursively save any references and generic references on the document. @@ -502,12 +580,11 @@ class Document(BaseDocument): """Delete the :class:`~mongoengine.Document` from the database. This will only take effect if the document has been previously saved. - :parm signal_kwargs: (optional) kwargs dictionary to be passed to + :param signal_kwargs: (optional) kwargs dictionary to be passed to the signal calls. :param write_concern: Extra keyword arguments are passed down which - will be used as options for the resultant - ``getLastError`` command. For example, - ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + will be used as options for the resultant ``getLastError`` command. + For example, ``save(..., w: 2, fsync: True)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. @@ -628,7 +705,6 @@ class Document(BaseDocument): obj = obj[0] else: raise self.DoesNotExist('Document does not exist') - for field in obj._data: if not fields or field in fields: try: @@ -636,7 +712,7 @@ class Document(BaseDocument): except (KeyError, AttributeError): try: # If field is a special field, e.g. items is stored as _reserved_items, - # an KeyError is thrown. So try to retrieve the field from _data + # a KeyError is thrown. So try to retrieve the field from _data setattr(self, field, self._reload(field, obj._data.get(field))) except KeyError: # If field is removed from the database while the object @@ -644,7 +720,9 @@ class Document(BaseDocument): # i.e. obj.update(unset__field=1) followed by obj.reload() delattr(self, field) - self._changed_fields = obj._changed_fields + self._changed_fields = list( + set(self._changed_fields) - set(fields) + ) if fields else obj._changed_fields self._created = False return self @@ -828,7 +906,6 @@ class Document(BaseDocument): """ Lists all of the indexes that should be created for given collection. It includes all the indexes from super- and sub-classes. """ - if cls._meta.get('abstract'): return [] @@ -891,8 +968,16 @@ class Document(BaseDocument): """ required = cls.list_indexes() - existing = [info['key'] - for info in cls._get_collection().index_information().values()] + + existing = [] + for info in cls._get_collection().index_information().values(): + if '_fts' in info['key'][0]: + index_type = info['key'][0][1] + text_index_fields = info.get('weights').keys() + existing.append( + [(key, index_type) for key in text_index_fields]) + else: + existing.append(info['key']) missing = [index for index in required if index not in existing] extra = [index for index in existing if index not in required] @@ -909,10 +994,10 @@ class Document(BaseDocument): return {'missing': missing, 'extra': extra} -class DynamicDocument(Document): +class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): """A Dynamic Document class allowing flexible, expandable and uncontrolled schemas. As a :class:`~mongoengine.Document` subclass, acts in the same - way as an ordinary document but has expando style properties. Any data + way as an ordinary document but has expanded style properties. Any data passed or set against the :class:`~mongoengine.DynamicDocument` that is not a field is automatically converted into a :class:`~mongoengine.fields.DynamicField` and data can be attributed to that @@ -926,7 +1011,6 @@ class DynamicDocument(Document): # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = TopLevelDocumentMetaclass - __metaclass__ = TopLevelDocumentMetaclass _dynamic = True @@ -937,11 +1021,12 @@ class DynamicDocument(Document): field_name = args[0] if field_name in self._dynamic_fields: setattr(self, field_name, None) + self._dynamic_fields[field_name].null = False else: super(DynamicDocument, self).__delattr__(*args, **kwargs) -class DynamicEmbeddedDocument(EmbeddedDocument): +class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)): """A Dynamic Embedded Document class allowing flexible, expandable and uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more information about dynamic documents. @@ -950,7 +1035,6 @@ class DynamicEmbeddedDocument(EmbeddedDocument): # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = DocumentMetaclass - __metaclass__ = DocumentMetaclass _dynamic = True diff --git a/mongoengine/errors.py b/mongoengine/errors.py index 2549e822..131596d1 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -50,8 +50,8 @@ class FieldDoesNotExist(Exception): or an :class:`~mongoengine.EmbeddedDocument`. To avoid this behavior on data loading, - you should the :attr:`strict` to ``False`` - in the :attr:`meta` dictionnary. + you should set the :attr:`strict` to ``False`` + in the :attr:`meta` dictionary. """ diff --git a/mongoengine/fields.py b/mongoengine/fields.py index eec31829..7732da2d 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -2,9 +2,9 @@ import datetime import decimal import itertools import re +import socket import time import uuid -import warnings from operator import itemgetter from bson import Binary, DBRef, ObjectId, SON @@ -24,11 +24,15 @@ try: except ImportError: Int64 = long + from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField, - GeoJsonBaseField, ObjectIdField, get_document) + GeoJsonBaseField, LazyReference, ObjectIdField, + get_document) +from mongoengine.base.utils import LazyRegexCompiler +from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.document import Document, EmbeddedDocument -from mongoengine.errors import DoesNotExist, ValidationError +from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError from mongoengine.python_support import StringIO from mongoengine.queryset import DO_NOTHING, QuerySet @@ -38,13 +42,20 @@ except ImportError: Image = None ImageOps = None +if six.PY3: + # Useless as long as 2to3 gets executed + # as it turns `long` into `int` blindly + long = int + + __all__ = ( 'StringField', 'URLField', 'EmailField', 'IntField', 'LongField', - 'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', + 'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', 'DateField', 'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField', 'GenericEmbeddedDocumentField', 'DynamicField', 'ListField', 'SortedListField', 'EmbeddedDocumentListField', 'DictField', 'MapField', 'ReferenceField', 'CachedReferenceField', + 'LazyReferenceField', 'GenericLazyReferenceField', 'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy', 'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField', 'LineStringField', 'PolygonField', @@ -119,7 +130,7 @@ class URLField(StringField): .. versionadded:: 0.3 """ - _URL_REGEX = re.compile( + _URL_REGEX = LazyRegexCompiler( r'^(?:[a-z0-9\.\-]*)://' # scheme is validated separately r'(?:(?:[A-Z0-9](?:[A-Z0-9-_]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(?>> ComplexDateTimeField()._convert_from_string(a) datetime.datetime(2011, 6, 8, 20, 26, 24, 92284) """ - values = map(int, data.split(self.separator)) + values = [int(d) for d in data.split(self.separator)] return datetime.datetime(*values) def __get__(self, instance, owner): + if instance is None: + return self + data = super(ComplexDateTimeField, self).__get__(instance, owner) - if data is None: - return None if self.null else datetime.datetime.now() - if isinstance(data, datetime.datetime): + + if isinstance(data, datetime.datetime) or data is None: return data return self._convert_from_string(data) def __set__(self, instance, value): - value = self._convert_from_datetime(value) if value else value - return super(ComplexDateTimeField, self).__set__(instance, value) + super(ComplexDateTimeField, self).__set__(instance, value) + value = instance._data[self.name] + if value is not None: + instance._data[self.name] = self._convert_from_datetime(value) def validate(self, value): value = self.to_python(value) @@ -522,9 +650,10 @@ class EmbeddedDocumentField(BaseField): """ def __init__(self, document_type, **kwargs): - if ( - not isinstance(document_type, six.string_types) and - not issubclass(document_type, EmbeddedDocument) + # XXX ValidationError raised outside of the "validate" method. + if not ( + isinstance(document_type, six.string_types) or + issubclass(document_type, EmbeddedDocument) ): self.error('Invalid embedded document class provided to an ' 'EmbeddedDocumentField') @@ -536,9 +665,17 @@ class EmbeddedDocumentField(BaseField): def document_type(self): if isinstance(self.document_type_obj, six.string_types): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: - self.document_type_obj = self.owner_document + resolved_document_type = self.owner_document else: - self.document_type_obj = get_document(self.document_type_obj) + resolved_document_type = get_document(self.document_type_obj) + + if not issubclass(resolved_document_type, EmbeddedDocument): + # Due to the late resolution of the document_type + # There is a chance that it won't be an EmbeddedDocument (#1661) + self.error('Invalid embedded document class provided to an ' + 'EmbeddedDocumentField') + self.document_type_obj = resolved_document_type + return self.document_type_obj def to_python(self, value): @@ -566,7 +703,11 @@ class EmbeddedDocumentField(BaseField): def prepare_query_value(self, op, value): if value is not None and not isinstance(value, self.document_type): - value = self.document_type._from_son(value) + try: + value = self.document_type._from_son(value) + except ValueError: + raise InvalidQueryError("Querying the embedded document '%s' failed, due to an invalid query value" % + (self.document_type._class_name,)) super(EmbeddedDocumentField, self).prepare_query_value(op, value) return self.to_mongo(value) @@ -593,16 +734,28 @@ class GenericEmbeddedDocumentField(BaseField): return value def validate(self, value, clean=True): + if self.choices and isinstance(value, SON): + for choice in self.choices: + if value['_cls'] == choice._class_name: + return True + if not isinstance(value, EmbeddedDocument): self.error('Invalid embedded document instance provided to an ' 'GenericEmbeddedDocumentField') value.validate(clean=clean) + def lookup_member(self, member_name): + if self.choices: + for choice in self.choices: + field = choice._fields.get(member_name) + if field: + return field + return None + def to_mongo(self, document, use_db_field=True, fields=None): if document is None: return None - data = document.to_mongo(use_db_field, fields) if '_cls' not in data: data['_cls'] = document._class_name @@ -686,6 +839,17 @@ class ListField(ComplexBaseField): kwargs.setdefault('default', lambda: []) super(ListField, self).__init__(**kwargs) + def __get__(self, instance, owner): + if instance is None: + # Document class being used rather than a document object + return self + value = instance._data.get(self.name) + LazyReferenceField = _import_class('LazyReferenceField') + GenericLazyReferenceField = _import_class('GenericLazyReferenceField') + if isinstance(self.field, (LazyReferenceField, GenericLazyReferenceField)) and value: + instance._data[self.name] = [self.field.build_lazyref(x) for x in value] + return super(ListField, self).__get__(instance, owner) + def validate(self, value): """Make sure that a list of valid fields is being used.""" if (not isinstance(value, (list, tuple, QuerySet)) or @@ -796,12 +960,10 @@ class DictField(ComplexBaseField): .. versionchanged:: 0.5 - Can now handle complex / varying types of data """ - def __init__(self, basecls=None, field=None, *args, **kwargs): + def __init__(self, field=None, *args, **kwargs): self.field = field self._auto_dereference = False - self.basecls = basecls or BaseField - if not issubclass(self.basecls, BaseField): - self.error('DictField only accepts dict values') + kwargs.setdefault('default', lambda: {}) super(DictField, self).__init__(*args, **kwargs) @@ -820,7 +982,7 @@ class DictField(ComplexBaseField): super(DictField, self).validate(value) def lookup_member(self, member_name): - return DictField(basecls=self.basecls, db_field=member_name) + return DictField(db_field=member_name) def prepare_query_value(self, op, value): match_operators = ['contains', 'icontains', 'startswith', @@ -850,6 +1012,7 @@ class MapField(DictField): """ def __init__(self, field=None, *args, **kwargs): + # XXX ValidationError raised outside of the "validate" method. if not isinstance(field, BaseField): self.error('Argument to MapField constructor must be a valid ' 'field') @@ -860,6 +1023,15 @@ class ReferenceField(BaseField): """A reference to a document that will be automatically dereferenced on access (lazily). + Note this means you will get a database I/O access everytime you access + this field. This is necessary because the field returns a :class:`~mongoengine.Document` + which precise type can depend of the value of the `_cls` field present in the + document in database. + In short, using this type of field can lead to poor performances (especially + if you access this field only to retrieve it `pk` field which is already + known before dereference). To solve this you should consider using the + :class:`~mongoengine.fields.LazyReferenceField`. + Use the `reverse_delete_rule` to handle what should happen if the document the field is referencing is deleted. EmbeddedDocuments, DictFields and MapFields does not support reverse_delete_rule and an `InvalidDocumentError` @@ -878,15 +1050,13 @@ class ReferenceField(BaseField): .. code-block:: python - class Bar(Document): - content = StringField() - foo = ReferenceField('Foo') + class Org(Document): + owner = ReferenceField('User') - Foo.register_delete_rule(Bar, 'foo', NULLIFY) + class User(Document): + org = ReferenceField('Org', reverse_delete_rule=CASCADE) - .. note :: - `reverse_delete_rule` does not trigger pre / post delete signals to be - triggered. + User.register_delete_rule(Org, 'owner', DENY) .. versionchanged:: 0.5 added `reverse_delete_rule` """ @@ -904,6 +1074,7 @@ class ReferenceField(BaseField): A reference to an abstract document type is always stored as a :class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`. """ + # XXX ValidationError raised outside of the "validate" method. if ( not isinstance(document_type, six.string_types) and not issubclass(document_type, Document) @@ -958,6 +1129,8 @@ class ReferenceField(BaseField): if isinstance(document, Document): # We need the id from the saved object to create the DBRef id_ = document.pk + + # XXX ValidationError raised outside of the "validate" method. if id_ is None: self.error('You can only reference documents once they have' ' been saved to the database') @@ -997,19 +1170,20 @@ class ReferenceField(BaseField): return self.to_mongo(value) def validate(self, value): - - if not isinstance(value, (self.document_type, DBRef)): - self.error('A ReferenceField only accepts DBRef or documents') + if not isinstance(value, (self.document_type, LazyReference, DBRef, ObjectId)): + self.error('A ReferenceField only accepts DBRef, LazyReference, ObjectId or documents') if isinstance(value, Document) and value.id is None: self.error('You can only reference documents once they have been ' 'saved to the database') - if self.document_type._meta.get('abstract') and \ - not isinstance(value, self.document_type): + if ( + self.document_type._meta.get('abstract') and + not isinstance(value, self.document_type) + ): self.error( '%s is not an instance of abstract reference type %s' % ( - self.document_type._class_name) + value, self.document_type._class_name) ) def lookup_member(self, member_name): @@ -1032,6 +1206,7 @@ class CachedReferenceField(BaseField): if fields is None: fields = [] + # XXX ValidationError raised outside of the "validate" method. if ( not isinstance(document_type, six.string_types) and not issubclass(document_type, Document) @@ -1106,6 +1281,7 @@ class CachedReferenceField(BaseField): id_field_name = self.document_type._meta['id_field'] id_field = self.document_type._fields[id_field_name] + # XXX ValidationError raised outside of the "validate" method. if isinstance(document, Document): # We need the id from the saved object to create the DBRef id_ = document.pk @@ -1114,7 +1290,6 @@ class CachedReferenceField(BaseField): ' been saved to the database') else: self.error('Only accept a document object') - # TODO: should raise here or will fail next statement value = SON(( ('_id', id_field.to_mongo(id_)), @@ -1132,16 +1307,20 @@ class CachedReferenceField(BaseField): if value is None: return None + # XXX ValidationError raised outside of the "validate" method. if isinstance(value, Document): if value.pk is None: self.error('You can only reference documents once they have' ' been saved to the database') - return {'_id': value.pk} + value_dict = {'_id': value.pk} + for field in self.fields: + value_dict.update({field: value[field]}) + + return value_dict raise NotImplementedError def validate(self, value): - if not isinstance(value, self.document_type): self.error('A CachedReferenceField only accepts documents') @@ -1174,6 +1353,12 @@ class GenericReferenceField(BaseField): """A reference to *any* :class:`~mongoengine.document.Document` subclass that will be automatically dereferenced on access (lazily). + Note this field works the same way as :class:`~mongoengine.document.ReferenceField`, + doing database I/O access the first time it is accessed (even if it's to access + it ``pk`` or ``id`` field). + To solve this you should consider using the + :class:`~mongoengine.fields.GenericLazyReferenceField`. + .. note :: * Any documents used as a generic reference must be registered in the document registry. Importing the model will automatically register @@ -1196,6 +1381,8 @@ class GenericReferenceField(BaseField): elif isinstance(choice, type) and issubclass(choice, Document): self.choices.append(choice._class_name) else: + # XXX ValidationError raised outside of the "validate" + # method. self.error('Invalid choices provided: must be a list of' 'Document subclasses and/or six.string_typess') @@ -1259,6 +1446,7 @@ class GenericReferenceField(BaseField): # We need the id from the saved object to create the DBRef id_ = document.id if id_ is None: + # XXX ValidationError raised outside of the "validate" method. self.error('You can only reference documents once they have' ' been saved to the database') else: @@ -1344,9 +1532,11 @@ class GridFSProxy(object): def __get__(self, instance, value): return self - def __nonzero__(self): + def __bool__(self): return bool(self.grid_id) + __nonzero__ = __bool__ # For Py2 support + def __getstate__(self): self_dict = self.__dict__ self_dict['_fs'] = None @@ -1364,9 +1554,9 @@ class GridFSProxy(object): return '<%s: %s>' % (self.__class__.__name__, self.grid_id) def __str__(self): - name = getattr( - self.get(), 'filename', self.grid_id) if self.get() else '(no file)' - return '<%s: %s>' % (self.__class__.__name__, name) + gridout = self.get() + filename = getattr(gridout, 'filename') if gridout else '' + return '<%s: %s (%s)>' % (self.__class__.__name__, filename, self.grid_id) def __eq__(self, other): if isinstance(other, GridFSProxy): @@ -1376,6 +1566,9 @@ class GridFSProxy(object): else: return False + def __ne__(self, other): + return not self == other + @property def fs(self): if not self._fs: @@ -2049,3 +2242,201 @@ class MultiPolygonField(GeoJsonBaseField): .. versionadded:: 0.9 """ _type = 'MultiPolygon' + + +class LazyReferenceField(BaseField): + """A really lazy reference to a document. + Unlike the :class:`~mongoengine.fields.ReferenceField` it will + **not** be automatically (lazily) dereferenced on access. + Instead, access will return a :class:`~mongoengine.base.LazyReference` class + instance, allowing access to `pk` or manual dereference by using + ``fetch()`` method. + + .. versionadded:: 0.15 + """ + + def __init__(self, document_type, passthrough=False, dbref=False, + reverse_delete_rule=DO_NOTHING, **kwargs): + """Initialises the Reference Field. + + :param dbref: Store the reference as :class:`~pymongo.dbref.DBRef` + or as the :class:`~pymongo.objectid.ObjectId`.id . + :param reverse_delete_rule: Determines what to do when the referring + object is deleted + :param passthrough: When trying to access unknown fields, the + :class:`~mongoengine.base.datastructure.LazyReference` instance will + automatically call `fetch()` and try to retrive the field on the fetched + document. Note this only work getting field (not setting or deleting). + """ + # XXX ValidationError raised outside of the "validate" method. + if ( + not isinstance(document_type, six.string_types) and + not issubclass(document_type, Document) + ): + self.error('Argument to LazyReferenceField constructor must be a ' + 'document class or a string') + + self.dbref = dbref + self.passthrough = passthrough + self.document_type_obj = document_type + self.reverse_delete_rule = reverse_delete_rule + super(LazyReferenceField, self).__init__(**kwargs) + + @property + def document_type(self): + if isinstance(self.document_type_obj, six.string_types): + if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: + self.document_type_obj = self.owner_document + else: + self.document_type_obj = get_document(self.document_type_obj) + return self.document_type_obj + + def build_lazyref(self, value): + if isinstance(value, LazyReference): + if value.passthrough != self.passthrough: + value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough) + elif value is not None: + if isinstance(value, self.document_type): + value = LazyReference(self.document_type, value.pk, passthrough=self.passthrough) + elif isinstance(value, DBRef): + value = LazyReference(self.document_type, value.id, passthrough=self.passthrough) + else: + # value is the primary key of the referenced document + value = LazyReference(self.document_type, value, passthrough=self.passthrough) + return value + + def __get__(self, instance, owner): + """Descriptor to allow lazy dereferencing.""" + if instance is None: + # Document class being used rather than a document object + return self + + value = self.build_lazyref(instance._data.get(self.name)) + if value: + instance._data[self.name] = value + + return super(LazyReferenceField, self).__get__(instance, owner) + + def to_mongo(self, value): + if isinstance(value, LazyReference): + pk = value.pk + elif isinstance(value, self.document_type): + pk = value.pk + elif isinstance(value, DBRef): + pk = value.id + else: + # value is the primary key of the referenced document + pk = value + id_field_name = self.document_type._meta['id_field'] + id_field = self.document_type._fields[id_field_name] + pk = id_field.to_mongo(pk) + if self.dbref: + return DBRef(self.document_type._get_collection_name(), pk) + else: + return pk + + def validate(self, value): + if isinstance(value, LazyReference): + if value.collection != self.document_type._get_collection_name(): + self.error('Reference must be on a `%s` document.' % self.document_type) + pk = value.pk + elif isinstance(value, self.document_type): + pk = value.pk + elif isinstance(value, DBRef): + # TODO: check collection ? + collection = self.document_type._get_collection_name() + if value.collection != collection: + self.error("DBRef on bad collection (must be on `%s`)" % collection) + pk = value.id + else: + # value is the primary key of the referenced document + id_field_name = self.document_type._meta['id_field'] + id_field = getattr(self.document_type, id_field_name) + pk = value + try: + id_field.validate(pk) + except ValidationError: + self.error( + "value should be `{0}` document, LazyReference or DBRef on `{0}` " + "or `{0}`'s primary key (i.e. `{1}`)".format( + self.document_type.__name__, type(id_field).__name__)) + + if pk is None: + self.error('You can only reference documents once they have been ' + 'saved to the database') + + def prepare_query_value(self, op, value): + if value is None: + return None + super(LazyReferenceField, self).prepare_query_value(op, value) + return self.to_mongo(value) + + def lookup_member(self, member_name): + return self.document_type._fields.get(member_name) + + +class GenericLazyReferenceField(GenericReferenceField): + """A reference to *any* :class:`~mongoengine.document.Document` subclass. + Unlike the :class:`~mongoengine.fields.GenericReferenceField` it will + **not** be automatically (lazily) dereferenced on access. + Instead, access will return a :class:`~mongoengine.base.LazyReference` class + instance, allowing access to `pk` or manual dereference by using + ``fetch()`` method. + + .. note :: + * Any documents used as a generic reference must be registered in the + document registry. Importing the model will automatically register + it. + + * You can use the choices param to limit the acceptable Document types + + .. versionadded:: 0.15 + """ + + def __init__(self, *args, **kwargs): + self.passthrough = kwargs.pop('passthrough', False) + super(GenericLazyReferenceField, self).__init__(*args, **kwargs) + + def _validate_choices(self, value): + if isinstance(value, LazyReference): + value = value.document_type._class_name + super(GenericLazyReferenceField, self)._validate_choices(value) + + def build_lazyref(self, value): + if isinstance(value, LazyReference): + if value.passthrough != self.passthrough: + value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough) + elif value is not None: + if isinstance(value, (dict, SON)): + value = LazyReference(get_document(value['_cls']), value['_ref'].id, passthrough=self.passthrough) + elif isinstance(value, Document): + value = LazyReference(type(value), value.pk, passthrough=self.passthrough) + return value + + def __get__(self, instance, owner): + if instance is None: + return self + + value = self.build_lazyref(instance._data.get(self.name)) + if value: + instance._data[self.name] = value + + return super(GenericLazyReferenceField, self).__get__(instance, owner) + + def validate(self, value): + if isinstance(value, LazyReference) and value.pk is None: + self.error('You can only reference documents once they have been' + ' saved to the database') + return super(GenericLazyReferenceField, self).validate(value) + + def to_mongo(self, document): + if document is None: + return None + + if isinstance(document, LazyReference): + return SON(( + ('_cls', document.document_type._class_name), + ('_ref', DBRef(document.document_type._get_collection_name(), document.pk)) + )) + else: + return super(GenericLazyReferenceField, self).to_mongo(document) diff --git a/mongoengine/python_support.py b/mongoengine/python_support.py index e51e1bc9..e884b4ea 100644 --- a/mongoengine/python_support.py +++ b/mongoengine/python_support.py @@ -6,11 +6,7 @@ import pymongo import six -if pymongo.version_tuple[0] < 3: - IS_PYMONGO_3 = False -else: - IS_PYMONGO_3 = True - +IS_PYMONGO_3 = pymongo.version_tuple[0] >= 3 # six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. StringIO = six.BytesIO diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 3ee978b8..0be48654 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -2,7 +2,6 @@ from __future__ import absolute_import import copy import itertools -import operator import pprint import re import warnings @@ -18,7 +17,7 @@ from mongoengine import signals from mongoengine.base import get_document from mongoengine.common import _import_class from mongoengine.connection import get_db -from mongoengine.context_managers import switch_db +from mongoengine.context_managers import set_write_concern, switch_db from mongoengine.errors import (InvalidQueryError, LookUpError, NotUniqueError, OperationError) from mongoengine.python_support import IS_PYMONGO_3 @@ -67,7 +66,6 @@ class BaseQuerySet(object): self._scalar = [] self._none = False self._as_pymongo = False - self._as_pymongo_coerce = False self._search_text = None # If inheritance is allowed, only return instances and instances of @@ -86,6 +84,7 @@ class BaseQuerySet(object): self._batch_size = None self.only_fields = [] self._max_time_ms = None + self._comment = None def __call__(self, q_obj=None, class_check=True, read_preference=None, **query): @@ -157,44 +156,49 @@ class BaseQuerySet(object): # self._cursor def __getitem__(self, key): - """Support skip and limit using getitem and slicing syntax.""" + """Return a document instance corresponding to a given index if + the key is an integer. If the key is a slice, translate its + bounds into a skip and a limit, and return a cloned queryset + with that skip/limit applied. For example: + + >>> User.objects[0] + + >>> User.objects[1:3] + [, ] + """ queryset = self.clone() - # Slice provided + # Handle a slice if isinstance(key, slice): - try: - queryset._cursor_obj = queryset._cursor[key] - queryset._skip, queryset._limit = key.start, key.stop - if key.start and key.stop: - queryset._limit = key.stop - key.start - except IndexError as err: - # PyMongo raises an error if key.start == key.stop, catch it, - # bin it, kill it. - start = key.start or 0 - if start >= 0 and key.stop >= 0 and key.step is None: - if start == key.stop: - queryset.limit(0) - queryset._skip = key.start - queryset._limit = key.stop - start - return queryset - raise err + queryset._cursor_obj = queryset._cursor[key] + queryset._skip, queryset._limit = key.start, key.stop + if key.start and key.stop: + queryset._limit = key.stop - key.start + # Allow further QuerySet modifications to be performed return queryset - # Integer index provided + + # Handle an index elif isinstance(key, int): if queryset._scalar: return queryset._get_scalar( - queryset._document._from_son(queryset._cursor[key], - _auto_dereference=self._auto_dereference, - only_fields=self.only_fields)) + queryset._document._from_son( + queryset._cursor[key], + _auto_dereference=self._auto_dereference, + only_fields=self.only_fields + ) + ) if queryset._as_pymongo: return queryset._get_as_pymongo(queryset._cursor[key]) - return queryset._document._from_son(queryset._cursor[key], - _auto_dereference=self._auto_dereference, - only_fields=self.only_fields) - raise AttributeError + return queryset._document._from_son( + queryset._cursor[key], + _auto_dereference=self._auto_dereference, + only_fields=self.only_fields + ) + + raise AttributeError('Provide a slice or an integer index') def __iter__(self): raise NotImplementedError @@ -204,14 +208,12 @@ class BaseQuerySet(object): queryset = self.order_by() return False if queryset.first() is None else True - def __nonzero__(self): - """Avoid to open all records in an if stmt in Py2.""" - return self._has_data() - def __bool__(self): """Avoid to open all records in an if stmt in Py3.""" return self._has_data() + __nonzero__ = __bool__ # For Py2 support + # Core functions def all(self): @@ -264,13 +266,13 @@ class BaseQuerySet(object): queryset = queryset.filter(*q_objs, **query) try: - result = queryset.next() + result = six.next(queryset) except StopIteration: msg = ('%s matching query does not exist.' % queryset._document._class_name) raise queryset._document.DoesNotExist(msg) try: - queryset.next() + six.next(queryset) except StopIteration: return result @@ -285,7 +287,7 @@ class BaseQuerySet(object): .. versionadded:: 0.4 """ - return self._document(**kwargs).save() + return self._document(**kwargs).save(force_insert=True) def first(self): """Retrieve the first object matching the query.""" @@ -345,11 +347,24 @@ class BaseQuerySet(object): documents=docs, **signal_kwargs) raw = [doc.to_mongo() for doc in docs] + + with set_write_concern(self._collection, write_concern) as collection: + insert_func = collection.insert_many + if return_one: + raw = raw[0] + insert_func = collection.insert_one + try: - ids = self._collection.insert(raw, **write_concern) + inserted_result = insert_func(raw) + ids = return_one and [inserted_result.inserted_id] or inserted_result.inserted_ids except pymongo.errors.DuplicateKeyError as err: message = 'Could not save document (%s)' raise NotUniqueError(message % six.text_type(err)) + except pymongo.errors.BulkWriteError as err: + # inserting documents that already have an _id field will + # give huge performance debt or raise + message = u'Document must not have _id value before bulk write (%s)' + raise NotUniqueError(message % six.text_type(err)) except pymongo.errors.OperationFailure as err: message = 'Could not save document (%s)' if re.match('^E1100[01] duplicate key', six.text_type(err)): @@ -363,7 +378,6 @@ class BaseQuerySet(object): signals.post_bulk_insert.send( self._document, documents=docs, loaded=False, **signal_kwargs) return return_one and ids[0] or ids - documents = self.in_bulk(ids) results = [] for obj_id in ids: @@ -379,7 +393,7 @@ class BaseQuerySet(object): :meth:`skip` that has been applied to this cursor into account when getting the count """ - if self._limit == 0 and with_limit_and_skip or self._none: + if self._limit == 0 and with_limit_and_skip is False or self._none: return 0 return self._cursor.count(with_limit_and_skip=with_limit_and_skip) @@ -481,8 +495,9 @@ class BaseQuerySet(object): ``save(..., write_concern={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. - :param full_result: Return the full result rather than just the number - updated. + :param full_result: Return the full result dictionary rather than just the number + updated, e.g. return + ``{'n': 2, 'nModified': 2, 'ok': 1.0, 'updatedExisting': True}``. :param update: Django-style update keyword arguments .. versionadded:: 0.2 @@ -505,12 +520,15 @@ class BaseQuerySet(object): else: update['$set'] = {'_cls': queryset._document._class_name} try: - result = queryset._collection.update(query, update, multi=multi, - upsert=upsert, **write_concern) + with set_write_concern(queryset._collection, write_concern) as collection: + update_func = collection.update_one + if multi: + update_func = collection.update_many + result = update_func(query, update, upsert=upsert) if full_result: return result - elif result: - return result['n'] + elif result.raw_result: + return result.raw_result['n'] except pymongo.errors.DuplicateKeyError as err: raise NotUniqueError(u'Update failed (%s)' % six.text_type(err)) except pymongo.errors.OperationFailure as err: @@ -539,10 +557,10 @@ class BaseQuerySet(object): write_concern=write_concern, full_result=True, **update) - if atomic_update['updatedExisting']: + if atomic_update.raw_result['updatedExisting']: document = self.get() else: - document = self._document.objects.with_id(atomic_update['upserted']) + document = self._document.objects.with_id(atomic_update.upserted_id) return document def update_one(self, upsert=False, write_concern=None, **update): @@ -706,39 +724,37 @@ class BaseQuerySet(object): with switch_db(self._document, alias) as cls: collection = cls._get_collection() - return self.clone_into(self.__class__(self._document, collection)) + return self._clone_into(self.__class__(self._document, collection)) def clone(self): - """Creates a copy of the current - :class:`~mongoengine.queryset.QuerySet` + """Create a copy of the current queryset.""" + return self._clone_into(self.__class__(self._document, self._collection_obj)) - .. versionadded:: 0.5 + def _clone_into(self, new_qs): + """Copy all of the relevant properties of this queryset to + a new queryset (which has to be an instance of + :class:`~mongoengine.queryset.base.BaseQuerySet`). """ - return self.clone_into(self.__class__(self._document, self._collection_obj)) - - def clone_into(self, cls): - """Creates a copy of the current - :class:`~mongoengine.queryset.base.BaseQuerySet` into another child class - """ - if not isinstance(cls, BaseQuerySet): + if not isinstance(new_qs, BaseQuerySet): raise OperationError( - '%s is not a subclass of BaseQuerySet' % cls.__name__) + '%s is not a subclass of BaseQuerySet' % new_qs.__name__) copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', - '_where_clause', '_loaded_fields', '_ordering', '_snapshot', - '_timeout', '_class_check', '_slave_okay', '_read_preference', - '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', + '_where_clause', '_loaded_fields', '_ordering', + '_snapshot', '_timeout', '_class_check', '_slave_okay', + '_read_preference', '_iter', '_scalar', '_as_pymongo', '_limit', '_skip', '_hint', '_auto_dereference', - '_search_text', 'only_fields', '_max_time_ms') + '_search_text', 'only_fields', '_max_time_ms', + '_comment') for prop in copy_props: val = getattr(self, prop) - setattr(cls, prop, copy.copy(val)) + setattr(new_qs, prop, copy.copy(val)) if self._cursor_obj: - cls._cursor_obj = self._cursor_obj.clone() + new_qs._cursor_obj = self._cursor_obj.clone() - return cls + return new_qs def select_related(self, max_depth=1): """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or @@ -756,11 +772,16 @@ class BaseQuerySet(object): """Limit the number of returned documents to `n`. This may also be achieved using array-slicing syntax (e.g. ``User.objects[:5]``). - :param n: the maximum number of objects to return + :param n: the maximum number of objects to return if n is greater than 0. + When 0 is passed, returns all the documents in the cursor """ queryset = self.clone() - queryset._limit = n if n != 0 else 1 - # Return self to allow chaining + queryset._limit = n + + # If a cursor object has already been created, apply the limit to it. + if queryset._cursor_obj: + queryset._cursor_obj.limit(queryset._limit) + return queryset def skip(self, n): @@ -771,6 +792,11 @@ class BaseQuerySet(object): """ queryset = self.clone() queryset._skip = n + + # If a cursor object has already been created, apply the skip to it. + if queryset._cursor_obj: + queryset._cursor_obj.skip(queryset._skip) + return queryset def hint(self, index=None): @@ -788,6 +814,11 @@ class BaseQuerySet(object): """ queryset = self.clone() queryset._hint = index + + # If a cursor object has already been created, apply the hint to it. + if queryset._cursor_obj: + queryset._cursor_obj.hint(queryset._hint) + return queryset def batch_size(self, size): @@ -801,6 +832,11 @@ class BaseQuerySet(object): """ queryset = self.clone() queryset._batch_size = size + + # If a cursor object has already been created, apply the batch size to it. + if queryset._cursor_obj: + queryset._cursor_obj.batch_size(queryset._batch_size) + return queryset def distinct(self, field): @@ -900,18 +936,25 @@ class BaseQuerySet(object): return self.fields(**fields) def fields(self, _only_called=False, **kwargs): - """Manipulate how you load this document's fields. Used by `.only()` - and `.exclude()` to manipulate which fields to retrieve. Fields also - allows for a greater level of control for example: + """Manipulate how you load this document's fields. Used by `.only()` + and `.exclude()` to manipulate which fields to retrieve. If called + directly, use a set of kwargs similar to the MongoDB projection + document. For example: - Retrieving a Subrange of Array Elements: + Include only a subset of fields: - You can use the $slice operator to retrieve a subrange of elements in - an array. For example to get the first 5 comments:: + posts = BlogPost.objects(...).fields(author=1, title=1) - post = BlogPost.objects(...).fields(slice__comments=5) + Exclude a specific field: - :param kwargs: A dictionary identifying what to include + posts = BlogPost.objects(...).fields(comments=0) + + To retrieve a subrange of array elements: + + posts = BlogPost.objects(...).fields(slice__comments=5) + + :param kwargs: A set of keyword arguments identifying what to + include, exclude, or slice. .. versionadded:: 0.5 """ @@ -927,7 +970,20 @@ class BaseQuerySet(object): key = '.'.join(parts) cleaned_fields.append((key, value)) - fields = sorted(cleaned_fields, key=operator.itemgetter(1)) + # Sort fields by their values, explicitly excluded fields first, then + # explicitly included, and then more complicated operators such as + # $slice. + def _sort_key(field_tuple): + key, value = field_tuple + if isinstance(value, (int)): + return value # 0 for exclusion, 1 for inclusion + else: + return 2 # so that complex values appear last + + fields = sorted(cleaned_fields, key=_sort_key) + + # Clone the queryset, group all fields by their value, convert + # each of them to db_fields, and set the queryset's _loaded_fields queryset = self.clone() for value, group in itertools.groupby(fields, lambda x: x[1]): fields = [field for field, value in group] @@ -953,13 +1009,31 @@ class BaseQuerySet(object): def order_by(self, *keys): """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The order may be specified by prepending each of the keys by a + or a -. - Ascending order is assumed. + Ascending order is assumed. If no keys are passed, existing ordering + is cleared instead. :param keys: fields to order the query results by; keys may be prefixed with **+** or **-** to determine the ordering direction """ queryset = self.clone() - queryset._ordering = queryset._get_order_by(keys) + + old_ordering = queryset._ordering + new_ordering = queryset._get_order_by(keys) + + if queryset._cursor_obj: + + # If a cursor object has already been created, apply the sort to it + if new_ordering: + queryset._cursor_obj.sort(new_ordering) + + # If we're trying to clear a previous explicit ordering, we need + # to clear the cursor entirely (because PyMongo doesn't allow + # clearing an existing sort on a cursor). + elif old_ordering: + queryset._cursor_obj = None + + queryset._ordering = new_ordering + return queryset def comment(self, text): @@ -1069,16 +1143,15 @@ class BaseQuerySet(object): """An alias for scalar""" return self.scalar(*fields) - def as_pymongo(self, coerce_types=False): + def as_pymongo(self): """Instead of returning Document instances, return raw values from pymongo. - :param coerce_types: Field types (if applicable) would be use to - coerce types. + This method is particularly useful if you don't need dereferencing + and care primarily about the speed of data retrieval. """ queryset = self.clone() queryset._as_pymongo = True - queryset._as_pymongo_coerce = coerce_types return queryset def max_time_ms(self, ms): @@ -1123,6 +1196,10 @@ class BaseQuerySet(object): pipeline = initial_pipeline + list(pipeline) + if IS_PYMONGO_3 and self._read_preference is not None: + return self._collection.with_options(read_preference=self._read_preference) \ + .aggregate(pipeline, cursor={}, **kwargs) + return self._collection.aggregate(pipeline, cursor={}, **kwargs) # JS functionality @@ -1398,27 +1475,31 @@ class BaseQuerySet(object): # Iterator helpers - def next(self): + def __next__(self): """Wrap the result in a :class:`~mongoengine.Document` object. """ if self._limit == 0 or self._none: raise StopIteration - raw_doc = self._cursor.next() + raw_doc = six.next(self._cursor) + if self._as_pymongo: return self._get_as_pymongo(raw_doc) - doc = self._document._from_son(raw_doc, - _auto_dereference=self._auto_dereference, only_fields=self.only_fields) + + doc = self._document._from_son( + raw_doc, _auto_dereference=self._auto_dereference, + only_fields=self.only_fields) if self._scalar: return self._get_scalar(doc) return doc + next = __next__ # For Python2 support + def rewind(self): """Rewind the cursor to its unevaluated state. - .. versionadded:: 0.3 """ self._iter = False @@ -1468,43 +1549,57 @@ class BaseQuerySet(object): @property def _cursor(self): - if self._cursor_obj is None: + """Return a PyMongo cursor object corresponding to this queryset.""" - # In PyMongo 3+, we define the read preference on a collection - # level, not a cursor level. Thus, we need to get a cloned - # collection object using `with_options` first. - if IS_PYMONGO_3 and self._read_preference is not None: - self._cursor_obj = self._collection\ - .with_options(read_preference=self._read_preference)\ - .find(self._query, **self._cursor_args) - else: - self._cursor_obj = self._collection.find(self._query, - **self._cursor_args) - # Apply where clauses to cursor - if self._where_clause: - where_clause = self._sub_js_fields(self._where_clause) - self._cursor_obj.where(where_clause) + # If _cursor_obj already exists, return it immediately. + if self._cursor_obj is not None: + return self._cursor_obj - if self._ordering: - # Apply query ordering - self._cursor_obj.sort(self._ordering) - elif self._ordering is None and self._document._meta['ordering']: - # Otherwise, apply the ordering from the document model, unless - # it's been explicitly cleared via order_by with no arguments - order = self._get_order_by(self._document._meta['ordering']) - self._cursor_obj.sort(order) + # Create a new PyMongo cursor. + # XXX In PyMongo 3+, we define the read preference on a collection + # level, not a cursor level. Thus, we need to get a cloned collection + # object using `with_options` first. + if IS_PYMONGO_3 and self._read_preference is not None: + self._cursor_obj = self._collection\ + .with_options(read_preference=self._read_preference)\ + .find(self._query, **self._cursor_args) + else: + self._cursor_obj = self._collection.find(self._query, + **self._cursor_args) + # Apply "where" clauses to cursor + if self._where_clause: + where_clause = self._sub_js_fields(self._where_clause) + self._cursor_obj.where(where_clause) - if self._limit is not None: - self._cursor_obj.limit(self._limit) + # Apply ordering to the cursor. + # XXX self._ordering can be equal to: + # * None if we didn't explicitly call order_by on this queryset. + # * A list of PyMongo-style sorting tuples. + # * An empty list if we explicitly called order_by() without any + # arguments. This indicates that we want to clear the default + # ordering. + if self._ordering: + # explicit ordering + self._cursor_obj.sort(self._ordering) + elif self._ordering is None and self._document._meta['ordering']: + # default ordering + order = self._get_order_by(self._document._meta['ordering']) + self._cursor_obj.sort(order) - if self._skip is not None: - self._cursor_obj.skip(self._skip) + if self._limit is not None: + self._cursor_obj.limit(self._limit) - if self._hint != -1: - self._cursor_obj.hint(self._hint) + if self._skip is not None: + self._cursor_obj.skip(self._skip) - if self._batch_size is not None: - self._cursor_obj.batch_size(self._batch_size) + if self._hint != -1: + self._cursor_obj.hint(self._hint) + + if self._batch_size is not None: + self._cursor_obj.batch_size(self._batch_size) + + if self._comment is not None: + self._cursor_obj.comment(self._comment) return self._cursor_obj @@ -1650,25 +1745,33 @@ class BaseQuerySet(object): return frequencies def _fields_to_dbfields(self, fields): - """Translate fields paths to its db equivalents""" - ret = [] + """Translate fields' paths to their db equivalents.""" subclasses = [] - document = self._document - if document._meta['allow_inheritance']: + if self._document._meta['allow_inheritance']: subclasses = [get_document(x) - for x in document._subclasses][1:] + for x in self._document._subclasses][1:] + + db_field_paths = [] for field in fields: + field_parts = field.split('.') try: - field = '.'.join(f.db_field for f in - document._lookup_field(field.split('.'))) - ret.append(field) + field = '.'.join( + f if isinstance(f, six.string_types) else f.db_field + for f in self._document._lookup_field(field_parts) + ) + db_field_paths.append(field) except LookUpError as err: found = False + + # If a field path wasn't found on the main document, go + # through its subclasses and see if it exists on any of them. for subdoc in subclasses: try: - subfield = '.'.join(f.db_field for f in - subdoc._lookup_field(field.split('.'))) - ret.append(subfield) + subfield = '.'.join( + f if isinstance(f, six.string_types) else f.db_field + for f in subdoc._lookup_field(field_parts) + ) + db_field_paths.append(subfield) found = True break except LookUpError: @@ -1676,10 +1779,17 @@ class BaseQuerySet(object): if not found: raise err - return ret + + return db_field_paths def _get_order_by(self, keys): - """Creates a list of order by fields""" + """Given a list of MongoEngine-style sort keys, return a list + of sorting tuples that can be applied to a PyMongo cursor. For + example: + + >>> qs._get_order_by(['-last_name', 'first_name']) + [('last_name', -1), ('first_name', 1)] + """ key_list = [] for key in keys: if not key: @@ -1692,17 +1802,19 @@ class BaseQuerySet(object): direction = pymongo.ASCENDING if key[0] == '-': direction = pymongo.DESCENDING + if key[0] in ('-', '+'): key = key[1:] + key = key.replace('__', '.') try: key = self._document._translate_field_name(key) except Exception: + # TODO this exception should be more specific pass + key_list.append((key, direction)) - if self._cursor_obj and key_list: - self._cursor_obj.sort(key_list) return key_list def _get_scalar(self, doc): @@ -1719,59 +1831,25 @@ class BaseQuerySet(object): return tuple(data) - def _get_as_pymongo(self, row): - # Extract which fields paths we should follow if .fields(...) was - # used. If not, handle all fields. - if not getattr(self, '__as_pymongo_fields', None): - self.__as_pymongo_fields = [] + def _get_as_pymongo(self, doc): + """Clean up a PyMongo doc, removing fields that were only fetched + for the sake of MongoEngine's implementation, and return it. + """ + # Always remove _cls as a MongoEngine's implementation detail. + if '_cls' in doc: + del doc['_cls'] - for field in self._loaded_fields.fields - set(['_cls']): - self.__as_pymongo_fields.append(field) - while '.' in field: - field, _ = field.rsplit('.', 1) - self.__as_pymongo_fields.append(field) + # If the _id was not included in a .only or was excluded in a .exclude, + # remove it from the doc (we always fetch it so that we can properly + # construct documents). + fields = self._loaded_fields + if fields and '_id' in doc and ( + (fields.value == QueryFieldList.ONLY and '_id' not in fields.fields) or + (fields.value == QueryFieldList.EXCLUDE and '_id' in fields.fields) + ): + del doc['_id'] - all_fields = not self.__as_pymongo_fields - - def clean(data, path=None): - path = path or '' - - if isinstance(data, dict): - new_data = {} - for key, value in data.iteritems(): - new_path = '%s.%s' % (path, key) if path else key - - if all_fields: - include_field = True - elif self._loaded_fields.value == QueryFieldList.ONLY: - include_field = new_path in self.__as_pymongo_fields - else: - include_field = new_path not in self.__as_pymongo_fields - - if include_field: - new_data[key] = clean(value, path=new_path) - data = new_data - elif isinstance(data, list): - data = [clean(d, path=path) for d in data] - else: - if self._as_pymongo_coerce: - # If we need to coerce types, we need to determine the - # type of this field and use the corresponding - # .to_python(...) - EmbeddedDocumentField = _import_class('EmbeddedDocumentField') - - obj = self._document - for chunk in path.split('.'): - obj = getattr(obj, chunk, None) - if obj is None: - break - elif isinstance(obj, EmbeddedDocumentField): - obj = obj.document_type - if obj and data is not None: - data = obj.to_python(data) - return data - - return clean(row) + return doc def _sub_js_fields(self, code): """When fields are specified with [~fieldname] syntax, where @@ -1800,10 +1878,21 @@ class BaseQuerySet(object): return code def _chainable_method(self, method_name, val): + """Call a particular method on the PyMongo cursor call a particular chainable method + with the provided value. + """ queryset = self.clone() - method = getattr(queryset._cursor, method_name) - method(val) + + # Get an existing cursor object or create a new one + cursor = queryset._cursor + + # Find the requested method on the cursor and call it with the + # provided value + getattr(cursor, method_name)(val) + + # Cache the value on the queryset._{method_name} setattr(queryset, '_' + method_name, val) + return queryset # Deprecated diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index 0524c3bb..dba724af 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -63,9 +63,11 @@ class QueryFieldList(object): self._only_called = True return self - def __nonzero__(self): + def __bool__(self): return bool(self.fields) + __nonzero__ = __bool__ # For Py2 support + def as_dict(self): field_list = {field: self.value for field in self.fields} if self.slice: diff --git a/mongoengine/queryset/manager.py b/mongoengine/queryset/manager.py index 199205e9..f93dbb43 100644 --- a/mongoengine/queryset/manager.py +++ b/mongoengine/queryset/manager.py @@ -36,7 +36,7 @@ class QuerySetManager(object): queryset_class = owner._meta.get('queryset_class', self.default) queryset = queryset_class(owner, owner._get_collection()) if self.get_queryset: - arg_count = self.get_queryset.func_code.co_argcount + arg_count = self.get_queryset.__code__.co_argcount if arg_count == 1: queryset = self.get_queryset(queryset) elif arg_count == 2: diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 9c1f24e1..f9fed7b7 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1,3 +1,5 @@ +import six + from mongoengine.errors import OperationError from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING, NULLIFY, PULL) @@ -87,10 +89,10 @@ class QuerySet(BaseQuerySet): yield self._result_cache[pos] pos += 1 - # Raise StopIteration if we already established there were no more + # return if we already established there were no more # docs in the db cursor. if not self._has_more: - raise StopIteration + return # Otherwise, populate more of the cache and repeat. if len(self._result_cache) <= pos: @@ -112,8 +114,8 @@ class QuerySet(BaseQuerySet): # Pull in ITER_CHUNK_SIZE docs from the database and store them in # the result cache. try: - for _ in xrange(ITER_CHUNK_SIZE): - self._result_cache.append(self.next()) + for _ in six.moves.range(ITER_CHUNK_SIZE): + self._result_cache.append(six.next(self)) except StopIteration: # Getting this exception means there are no more docs in the # db cursor. Set _has_more to False so that we can use that @@ -136,13 +138,15 @@ class QuerySet(BaseQuerySet): return self._len def no_cache(self): - """Convert to a non_caching queryset + """Convert to a non-caching queryset .. versionadded:: 0.8.3 Convert to non caching queryset """ if self._result_cache is not None: raise OperationError('QuerySet already cached') - return self.clone_into(QuerySetNoCache(self._document, self._collection)) + + return self._clone_into(QuerySetNoCache(self._document, + self._collection)) class QuerySetNoCache(BaseQuerySet): @@ -153,7 +157,7 @@ class QuerySetNoCache(BaseQuerySet): .. versionadded:: 0.8.3 Convert to caching queryset """ - return self.clone_into(QuerySet(self._document, self._collection)) + return self._clone_into(QuerySet(self._document, self._collection)) def __repr__(self): """Provides the string representation of the QuerySet @@ -164,9 +168,9 @@ class QuerySetNoCache(BaseQuerySet): return '.. queryset mid-iteration ..' data = [] - for _ in xrange(REPR_OUTPUT_SIZE + 1): + for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): try: - data.append(self.next()) + data.append(six.next(self)) except StopIteration: break diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index af59917c..2effa249 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -101,8 +101,8 @@ def query(_doc_cls=None, **kwargs): value = value['_id'] elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): - # 'in', 'nin' and 'all' require a list of values - value = [field.prepare_query_value(op, v) for v in value] + # Raise an error if the in/nin/all/near param is not iterable. + value = _prepare_query_for_iterable(field, op, value) # If we're querying a GenericReferenceField, we need to alter the # key depending on the value: @@ -147,7 +147,7 @@ def query(_doc_cls=None, **kwargs): if op is None or key not in mongo_query: mongo_query[key] = value elif key in mongo_query: - if isinstance(mongo_query[key], dict): + if isinstance(mongo_query[key], dict) and isinstance(value, dict): mongo_query[key].update(value) # $max/minDistance needs to come last - convert to SON value_dict = mongo_query[key] @@ -201,31 +201,37 @@ def update(_doc_cls=None, **update): format. """ mongo_update = {} + for key, value in update.items(): if key == '__raw__': mongo_update.update(value) continue + parts = key.split('__') + # if there is no operator, default to 'set' if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: parts.insert(0, 'set') + # Check for an operator and transform to mongo-style if there is op = None if parts[0] in UPDATE_OPERATORS: op = parts.pop(0) # Convert Pythonic names to Mongo equivalents - if op in ('push_all', 'pull_all'): - op = op.replace('_all', 'All') - elif op == 'dec': + operator_map = { + 'push_all': 'pushAll', + 'pull_all': 'pullAll', + 'dec': 'inc', + 'add_to_set': 'addToSet', + 'set_on_insert': 'setOnInsert' + } + if op == 'dec': # Support decrement by flipping a positive value's sign # and using 'inc' - op = 'inc' - if value > 0: - value = -value - elif op == 'add_to_set': - op = 'addToSet' - elif op == 'set_on_insert': - op = 'setOnInsert' + value = -value + # If the operator doesn't found from operator map, the op value + # will stay unchanged + op = operator_map.get(op, op) match = None if parts[-1] in COMPARISON_OPERATORS: @@ -272,7 +278,15 @@ def update(_doc_cls=None, **update): if isinstance(field, GeoJsonBaseField): value = field.to_mongo(value) - if op in (None, 'set', 'push', 'pull'): + if op == 'pull': + if field.required or value is not None: + if match == 'in' and not isinstance(value, dict): + value = _prepare_query_for_iterable(field, op, value) + else: + value = field.prepare_query_value(op, value) + elif op == 'push' and isinstance(value, (list, tuple, set)): + value = [field.prepare_query_value(op, v) for v in value] + elif op in (None, 'set', 'push'): if field.required or value is not None: value = field.prepare_query_value(op, value) elif op in ('pushAll', 'pullAll'): @@ -284,6 +298,8 @@ def update(_doc_cls=None, **update): value = field.prepare_query_value(op, value) elif op == 'unset': value = 1 + elif op == 'inc': + value = field.prepare_query_value(op, value) if match: match = '$' + match @@ -307,11 +323,17 @@ def update(_doc_cls=None, **update): field_classes = [c.__class__ for c in cleaned_fields] field_classes.reverse() ListField = _import_class('ListField') - if ListField in field_classes: - # Join all fields via dot notation to the last ListField + EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') + if ListField in field_classes or EmbeddedDocumentListField in field_classes: + # Join all fields via dot notation to the last ListField or EmbeddedDocumentListField # Then process as normal + if ListField in field_classes: + _check_field = ListField + else: + _check_field = EmbeddedDocumentListField + last_listField = len( - cleaned_fields) - field_classes.index(ListField) + cleaned_fields) - field_classes.index(_check_field) key = '.'.join(parts[:last_listField]) parts = parts[last_listField:] parts.insert(0, key) @@ -321,10 +343,26 @@ def update(_doc_cls=None, **update): value = {key: value} elif op == 'addToSet' and isinstance(value, list): value = {key: {'$each': value}} + elif op in ('push', 'pushAll'): + if parts[-1].isdigit(): + key = parts[0] + position = int(parts[-1]) + # $position expects an iterable. If pushing a single value, + # wrap it in a list. + if not isinstance(value, (set, tuple, list)): + value = [value] + value = {key: {'$each': value, '$position': position}} + else: + if op == 'pushAll': + op = 'push' # convert to non-deprecated keyword + if not isinstance(value, (set, tuple, list)): + value = [value] + value = {key: {'$each': value}} + else: + value = {key: value} else: value = {key: value} key = '$' + op - if key not in mongo_update: mongo_update[key] = value elif key in mongo_update and isinstance(mongo_update[key], dict): @@ -413,3 +451,22 @@ def _infer_geometry(value): raise InvalidQueryError('Invalid $geometry data. Can be either a ' 'dictionary or (nested) lists of coordinate(s)') + + +def _prepare_query_for_iterable(field, op, value): + # We need a special check for BaseDocument, because - although it's iterable - using + # it as such in the context of this method is most definitely a mistake. + BaseDocument = _import_class('BaseDocument') + + if isinstance(value, BaseDocument): + raise TypeError("When using the `in`, `nin`, `all`, or " + "`near`-operators you can\'t use a " + "`Document`, you must wrap your object " + "in a list (object -> [object]).") + + if not hasattr(value, '__iter__'): + raise TypeError("The `in`, `nin`, `all`, or " + "`near`-operators must be applied to an " + "iterable (e.g. a list).") + + return [field.prepare_query_value(op, v) for v in value] diff --git a/requirements.txt b/requirements.txt index 854ed26d..4e3ea940 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,3 +3,5 @@ pymongo>=2.7.1 six==1.10.0 flake8 flake8-import-order +Sphinx==1.5.5 +sphinx-rtd-theme==0.2.4 diff --git a/setup.cfg b/setup.cfg index 1887c476..fd6192b8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,11 +1,11 @@ [nosetests] verbosity=2 detailed-errors=1 -tests=tests +#tests=tests cover-package=mongoengine [flake8] -ignore=E501,F401,F403,F405,I201 +ignore=E501,F401,F403,F405,I201,I202 exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests -max-complexity=45 +max-complexity=47 application-import-names=mongoengine,tests diff --git a/setup.py b/setup.py index fa682d20..c7632ce3 100644 --- a/setup.py +++ b/setup.py @@ -44,9 +44,8 @@ CLASSIFIERS = [ "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", 'Topic :: Database', @@ -70,9 +69,9 @@ setup( name='mongoengine', version=VERSION, author='Harry Marr', - author_email='harry.marr@{nospam}gmail.com', - maintainer="Ross Lawley", - maintainer_email="ross.lawley@{nospam}gmail.com", + author_email='harry.marr@gmail.com', + maintainer="Stefan Wojcik", + maintainer_email="wojcikstefan@gmail.com", url='http://mongoengine.org/', download_url='https://github.com/MongoEngine/mongoengine/tarball/master', license='MIT', diff --git a/tests/__init__.py b/tests/__init__.py index eab0ddc7..08db7186 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,4 +1,4 @@ -from all_warnings import AllWarnings -from document import * -from queryset import * -from fields import * +from .all_warnings import AllWarnings +from .document import * +from .queryset import * +from .fields import * diff --git a/tests/document/__init__.py b/tests/document/__init__.py index f71376ea..dc35c969 100644 --- a/tests/document/__init__.py +++ b/tests/document/__init__.py @@ -1,13 +1,13 @@ import unittest -from class_methods import * -from delta import * -from dynamic import * -from indexes import * -from inheritance import * -from instance import * -from json_serialisation import * -from validation import * +from .class_methods import * +from .delta import * +from .dynamic import * +from .indexes import * +from .inheritance import * +from .instance import * +from .json_serialisation import * +from .validation import * if __name__ == '__main__': unittest.main() diff --git a/tests/document/class_methods.py b/tests/document/class_methods.py index dd3addb7..5289e483 100644 --- a/tests/document/class_methods.py +++ b/tests/document/class_methods.py @@ -5,6 +5,7 @@ from mongoengine import * from mongoengine.queryset import NULLIFY, PULL from mongoengine.connection import get_db +from tests.utils import needs_mongodb_v26 __all__ = ("ClassMethodsTest", ) @@ -65,10 +66,10 @@ class ClassMethodsTest(unittest.TestCase): """ collection_name = 'person' self.Person(name='Test').save() - self.assertTrue(collection_name in self.db.collection_names()) + self.assertIn(collection_name, self.db.collection_names()) self.Person.drop_collection() - self.assertFalse(collection_name in self.db.collection_names()) + self.assertNotIn(collection_name, self.db.collection_names()) def test_register_delete_rule(self): """Ensure that register delete rule adds a delete rule to the document @@ -187,6 +188,26 @@ class ClassMethodsTest(unittest.TestCase): self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] }) self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] }) + @needs_mongodb_v26 + def test_compare_indexes_for_text_indexes(self): + """ Ensure that compare_indexes behaves correctly for text indexes """ + + class Doc(Document): + a = StringField() + b = StringField() + meta = {'indexes': [ + {'fields': ['$a', "$b"], + 'default_language': 'english', + 'weights': {'a': 10, 'b': 2} + } + ]} + + Doc.drop_collection() + Doc.ensure_indexes() + actual = Doc.compare_indexes() + expected = {'missing': [], 'extra': []} + self.assertEqual(actual, expected) + def test_list_indexes_inheritance(self): """ ensure that all of the indexes are listed regardless of the super- or sub-class that we call it from @@ -319,7 +340,7 @@ class ClassMethodsTest(unittest.TestCase): meta = {'collection': collection_name} Person(name="Test User").save() - self.assertTrue(collection_name in self.db.collection_names()) + self.assertIn(collection_name, self.db.collection_names()) user_obj = self.db[collection_name].find_one() self.assertEqual(user_obj['name'], "Test User") @@ -328,7 +349,7 @@ class ClassMethodsTest(unittest.TestCase): self.assertEqual(user_obj.name, "Test User") Person.drop_collection() - self.assertFalse(collection_name in self.db.collection_names()) + self.assertNotIn(collection_name, self.db.collection_names()) def test_collection_name_and_primary(self): """Ensure that a collection with a specified name may be used. diff --git a/tests/document/delta.py b/tests/document/delta.py index add4fe8d..30296956 100644 --- a/tests/document/delta.py +++ b/tests/document/delta.py @@ -694,7 +694,7 @@ class DeltaTest(unittest.TestCase): organization.employees.append(person) updates, removals = organization._delta() self.assertEqual({}, removals) - self.assertTrue('employees' in updates) + self.assertIn('employees', updates) def test_delta_with_dbref_false(self): person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) @@ -709,7 +709,7 @@ class DeltaTest(unittest.TestCase): organization.employees.append(person) updates, removals = organization._delta() self.assertEqual({}, removals) - self.assertTrue('employees' in updates) + self.assertIn('employees', updates) def test_nested_nested_fields_mark_as_changed(self): class EmbeddedDoc(EmbeddedDocument): diff --git a/tests/document/dynamic.py b/tests/document/dynamic.py index a478df42..94cea134 100644 --- a/tests/document/dynamic.py +++ b/tests/document/dynamic.py @@ -174,8 +174,8 @@ class DynamicTest(unittest.TestCase): Employee.drop_collection() - self.assertTrue('name' in Employee._fields) - self.assertTrue('salary' in Employee._fields) + self.assertIn('name', Employee._fields) + self.assertIn('salary', Employee._fields) self.assertEqual(Employee._get_collection_name(), self.Person._get_collection_name()) @@ -189,7 +189,7 @@ class DynamicTest(unittest.TestCase): self.assertEqual(1, Employee.objects(age=20).count()) joe_bloggs = self.Person.objects.first() - self.assertTrue(isinstance(joe_bloggs, Employee)) + self.assertIsInstance(joe_bloggs, Employee) def test_embedded_dynamic_document(self): """Test dynamic embedded documents""" diff --git a/tests/document/indexes.py b/tests/document/indexes.py index af93e7db..1cbb4ec3 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -2,14 +2,14 @@ import unittest import sys - -import pymongo - from nose.plugins.skip import SkipTest from datetime import datetime +import pymongo from mongoengine import * -from mongoengine.connection import get_db, get_connection +from mongoengine.connection import get_db + +from tests.utils import get_mongodb_version, needs_mongodb_v26 __all__ = ("IndexesTest", ) @@ -70,7 +70,7 @@ class IndexesTest(unittest.TestCase): self.assertEqual(len(info), 4) info = [value['key'] for key, value in info.iteritems()] for expected in expected_specs: - self.assertTrue(expected['fields'] in info) + self.assertIn(expected['fields'], info) def _index_test_inheritance(self, InheritFrom): @@ -102,7 +102,7 @@ class IndexesTest(unittest.TestCase): self.assertEqual(len(info), 4) info = [value['key'] for key, value in info.iteritems()] for expected in expected_specs: - self.assertTrue(expected['fields'] in info) + self.assertIn(expected['fields'], info) class ExtendedBlogPost(BlogPost): title = StringField() @@ -117,7 +117,7 @@ class IndexesTest(unittest.TestCase): info = ExtendedBlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] for expected in expected_specs: - self.assertTrue(expected['fields'] in info) + self.assertIn(expected['fields'], info) def test_indexes_document_inheritance(self): """Ensure that indexes are used when meta[indexes] is specified for @@ -226,7 +226,7 @@ class IndexesTest(unittest.TestCase): list(Person.objects) info = Person.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('rank.title', 1)] in info) + self.assertIn([('rank.title', 1)], info) def test_explicit_geo2d_index(self): """Ensure that geo2d indexes work when created via meta[indexes] @@ -246,7 +246,7 @@ class IndexesTest(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('location.point', '2d')] in info) + self.assertIn([('location.point', '2d')], info) def test_explicit_geo2d_index_embedded(self): """Ensure that geo2d indexes work when created via meta[indexes] @@ -269,7 +269,7 @@ class IndexesTest(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('current.location.point', '2d')] in info) + self.assertIn([('current.location.point', '2d')], info) def test_explicit_geosphere_index(self): """Ensure that geosphere indexes work when created via meta[indexes] @@ -289,7 +289,7 @@ class IndexesTest(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('location.point', '2dsphere')] in info) + self.assertIn([('location.point', '2dsphere')], info) def test_explicit_geohaystack_index(self): """Ensure that geohaystack indexes work when created via meta[indexes] @@ -311,7 +311,7 @@ class IndexesTest(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('location.point', 'geoHaystack')] in info) + self.assertIn([('location.point', 'geoHaystack')], info) def test_create_geohaystack_index(self): """Ensure that geohaystack indexes can be created @@ -323,7 +323,7 @@ class IndexesTest(unittest.TestCase): Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10) info = Place._get_collection().index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('location.point', 'geoHaystack'), ('name', 1)] in info) + self.assertIn([('location.point', 'geoHaystack'), ('name', 1)], info) def test_dictionary_indexes(self): """Ensure that indexes are used when meta[indexes] contains @@ -356,7 +356,7 @@ class IndexesTest(unittest.TestCase): value.get('unique', False), value.get('sparse', False)) for key, value in info.iteritems()] - self.assertTrue(([('addDate', -1)], True, True) in info) + self.assertIn(([('addDate', -1)], True, True), info) BlogPost.drop_collection() @@ -412,7 +412,6 @@ class IndexesTest(unittest.TestCase): User.ensure_indexes() info = User.objects._collection.index_information() self.assertEqual(sorted(info.keys()), ['_cls_1_user_guid_1', '_id_']) - User.drop_collection() def test_embedded_document_index(self): """Tests settings an index on an embedded document @@ -434,7 +433,6 @@ class IndexesTest(unittest.TestCase): info = BlogPost.objects._collection.index_information() self.assertEqual(sorted(info.keys()), ['_id_', 'date.yr_-1']) - BlogPost.drop_collection() def test_list_embedded_document_index(self): """Ensure list embedded documents can be indexed @@ -461,7 +459,6 @@ class IndexesTest(unittest.TestCase): post1 = BlogPost(title="Embedded Indexes tests in place", tags=[Tag(name="about"), Tag(name="time")]) post1.save() - BlogPost.drop_collection() def test_recursive_embedded_objects_dont_break_indexes(self): @@ -494,8 +491,7 @@ class IndexesTest(unittest.TestCase): obj = Test(a=1) obj.save() - connection = get_connection() - IS_MONGODB_3 = connection.server_info()['versionArray'][0] >= 3 + IS_MONGODB_3 = get_mongodb_version()[0] >= 3 # Need to be explicit about covered indexes as mongoDB doesn't know if # the documents returned might have more keys in that here. @@ -623,8 +619,6 @@ class IndexesTest(unittest.TestCase): post3 = BlogPost(title='test3', date=Date(year=2010), slug='test') self.assertRaises(OperationError, post3.save) - BlogPost.drop_collection() - def test_unique_embedded_document(self): """Ensure that uniqueness constraints are applied to fields on embedded documents. """ @@ -652,8 +646,6 @@ class IndexesTest(unittest.TestCase): sub=SubDocument(year=2010, slug='test')) self.assertRaises(NotUniqueError, post3.save) - BlogPost.drop_collection() - def test_unique_embedded_document_in_list(self): """ Ensure that the uniqueness constraints are applied to fields in @@ -684,8 +676,6 @@ class IndexesTest(unittest.TestCase): self.assertRaises(NotUniqueError, post2.save) - BlogPost.drop_collection() - def test_unique_with_embedded_document_and_embedded_unique(self): """Ensure that uniqueness constraints are applied to fields on embedded documents. And work with unique_with as well. @@ -719,8 +709,6 @@ class IndexesTest(unittest.TestCase): sub=SubDocument(year=2009, slug='test-1')) self.assertRaises(NotUniqueError, post3.save) - BlogPost.drop_collection() - def test_ttl_indexes(self): class Log(Document): @@ -733,14 +721,6 @@ class IndexesTest(unittest.TestCase): Log.drop_collection() - if pymongo.version_tuple[0] < 2 and pymongo.version_tuple[1] < 3: - raise SkipTest('pymongo needs to be 2.3 or higher for this test') - - connection = get_connection() - version_array = connection.server_info()['versionArray'] - if version_array[0] < 2 and version_array[1] < 2: - raise SkipTest('MongoDB needs to be 2.2 or higher for this test') - # Indexes are lazy so use list() to perform query list(Log.objects) info = Log.objects._collection.index_information() @@ -768,13 +748,11 @@ class IndexesTest(unittest.TestCase): raise AssertionError("We saved a dupe!") except NotUniqueError: pass - Customer.drop_collection() def test_unique_and_primary(self): """If you set a field as primary, then unexpected behaviour can occur. You won't create a duplicate but you will update an existing document. """ - class User(Document): name = StringField(primary_key=True, unique=True) password = StringField() @@ -790,8 +768,23 @@ class IndexesTest(unittest.TestCase): self.assertEqual(User.objects.count(), 1) self.assertEqual(User.objects.get().password, 'secret2') + def test_unique_and_primary_create(self): + """Create a new record with a duplicate primary key + throws an exception + """ + class User(Document): + name = StringField(primary_key=True) + password = StringField() + User.drop_collection() + User.objects.create(name='huangz', password='secret') + with self.assertRaises(NotUniqueError): + User.objects.create(name='huangz', password='secret2') + + self.assertEqual(User.objects.count(), 1) + self.assertEqual(User.objects.get().password, 'secret') + def test_index_with_pk(self): """Ensure you can use `pk` as part of a query""" @@ -810,7 +803,7 @@ class IndexesTest(unittest.TestCase): info = BlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] index_item = [('_id', 1), ('comments.comment_id', 1)] - self.assertTrue(index_item in info) + self.assertIn(index_item, info) def test_compound_key_embedded(self): @@ -857,8 +850,8 @@ class IndexesTest(unittest.TestCase): info = MyDoc.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('provider_ids.foo', 1)] in info) - self.assertTrue([('provider_ids.bar', 1)] in info) + self.assertIn([('provider_ids.foo', 1)], info) + self.assertIn([('provider_ids.bar', 1)], info) def test_sparse_compound_indexes(self): @@ -874,8 +867,8 @@ class IndexesTest(unittest.TestCase): info['provider_ids.foo_1_provider_ids.bar_1']['key']) self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse']) + @needs_mongodb_v26 def test_text_indexes(self): - class Book(Document): title = DictField() meta = { @@ -883,9 +876,9 @@ class IndexesTest(unittest.TestCase): } indexes = Book.objects._collection.index_information() - self.assertTrue("title_text" in indexes) + self.assertIn("title_text", indexes) key = indexes["title_text"]["key"] - self.assertTrue(('_fts', 'text') in key) + self.assertIn(('_fts', 'text'), key) def test_hashed_indexes(self): @@ -896,8 +889,8 @@ class IndexesTest(unittest.TestCase): } indexes = Book.objects._collection.index_information() - self.assertTrue("ref_id_hashed" in indexes) - self.assertTrue(('ref_id', 'hashed') in indexes["ref_id_hashed"]["key"]) + self.assertIn("ref_id_hashed", indexes) + self.assertIn(('ref_id', 'hashed'), indexes["ref_id_hashed"]["key"]) def test_indexes_after_database_drop(self): """ @@ -1020,7 +1013,7 @@ class IndexesTest(unittest.TestCase): TestDoc.ensure_indexes() index_info = TestDoc._get_collection().index_information() - self.assertTrue('shard_1_1__cls_1_txt_1_1' in index_info) + self.assertIn('shard_1_1__cls_1_txt_1_1', index_info) if __name__ == '__main__': diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index 2897e1d1..b2ab1b52 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -268,7 +268,7 @@ class InheritanceTest(unittest.TestCase): collection = self.db[Animal._get_collection_name()] obj = collection.find_one() - self.assertFalse('_cls' in obj) + self.assertNotIn('_cls', obj) def test_cant_turn_off_inheritance_on_subclass(self): """Ensure if inheritance is on in a subclass you cant turn it off. @@ -298,7 +298,7 @@ class InheritanceTest(unittest.TestCase): # Check that _cls isn't present in simple documents doc = Animal(name='dog') - self.assertFalse('_cls' in doc.to_mongo()) + self.assertNotIn('_cls', doc.to_mongo()) def test_abstract_handle_ids_in_metaclass_properly(self): @@ -374,14 +374,14 @@ class InheritanceTest(unittest.TestCase): pass doc = Comment(content='test') - self.assertFalse('_cls' in doc.to_mongo()) + self.assertNotIn('_cls', doc.to_mongo()) class Comment(EmbeddedDocument): content = StringField() meta = {'allow_inheritance': True} doc = Comment(content='test') - self.assertTrue('_cls' in doc.to_mongo()) + self.assertIn('_cls', doc.to_mongo()) def test_document_inheritance(self): """Ensure mutliple inheritance of abstract documents @@ -434,8 +434,8 @@ class InheritanceTest(unittest.TestCase): for cls in [Animal, Fish, Guppy]: self.assertEqual(cls._meta[k], v) - self.assertFalse('collection' in Animal._meta) - self.assertFalse('collection' in Mammal._meta) + self.assertNotIn('collection', Animal._meta) + self.assertNotIn('collection', Mammal._meta) self.assertEqual(Animal._get_collection_name(), None) self.assertEqual(Mammal._get_collection_name(), None) diff --git a/tests/document/instance.py b/tests/document/instance.py index b92bafa9..e637b3e6 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -8,9 +8,12 @@ import weakref from datetime import datetime from bson import DBRef, ObjectId +from pymongo.errors import DuplicateKeyError + from tests import fixtures from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest, PickleDynamicEmbedded, PickleDynamicTest) +from tests.utils import MongoDBTestCase from mongoengine import * from mongoengine.base import get_document, _document_registry @@ -22,20 +25,17 @@ from mongoengine.queryset import NULLIFY, Q from mongoengine.context_managers import switch_db, query_counter from mongoengine import signals +from tests.utils import needs_mongodb_v26 + TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), '../fields/mongoengine.png') __all__ = ("InstanceTest",) - - -class InstanceTest(unittest.TestCase): +class InstanceTest(MongoDBTestCase): def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() - class Job(EmbeddedDocument): name = StringField() years = IntField() @@ -72,8 +72,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(field._instance, instance) def test_capped_collection(self): - """Ensure that capped collections work properly. - """ + """Ensure that capped collections work properly.""" class Log(Document): date = DateTimeField(default=datetime.now) meta = { @@ -181,8 +180,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual('', repr(doc)) def test_repr_none(self): - """Ensure None values handled correctly - """ + """Ensure None values are handled correctly.""" class Article(Document): title = StringField() @@ -190,25 +188,23 @@ class InstanceTest(unittest.TestCase): return None doc = Article(title=u'привет мир') - self.assertEqual('', repr(doc)) def test_queryset_resurrects_dropped_collection(self): self.Person.drop_collection() - self.assertEqual([], list(self.Person.objects())) + # Ensure works correctly with inhertited classes class Actor(self.Person): pass - # Ensure works correctly with inhertited classes Actor.objects() self.Person.drop_collection() self.assertEqual([], list(Actor.objects())) def test_polymorphic_references(self): - """Ensure that the correct subclasses are returned from a query when - using references / generic references + """Ensure that the correct subclasses are returned from a query + when using references / generic references """ class Animal(Document): meta = {'allow_inheritance': True} @@ -248,7 +244,7 @@ class InstanceTest(unittest.TestCase): Zoo.drop_collection() class Zoo(Document): - animals = ListField(GenericReferenceField(Animal)) + animals = ListField(GenericReferenceField()) # Save a reference to each animal zoo = Zoo(animals=Animal.objects) @@ -258,9 +254,6 @@ class InstanceTest(unittest.TestCase): classes = [a.__class__ for a in Zoo.objects.first().animals] self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) - Zoo.drop_collection() - Animal.drop_collection() - def test_reference_inheritance(self): class Stats(Document): created = DateTimeField(default=datetime.now) @@ -287,8 +280,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(list_stats, CompareStats.objects.first().stats) def test_db_field_load(self): - """Ensure we load data correctly - """ + """Ensure we load data correctly from the right db field.""" class Person(Document): name = StringField(required=True) _rank = StringField(required=False, db_field="rank") @@ -307,8 +299,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(Person.objects.get(name="Fred").rank, "Private") def test_db_embedded_doc_field_load(self): - """Ensure we load embedded document data correctly - """ + """Ensure we load embedded document data correctly.""" class Rank(EmbeddedDocument): title = StringField(required=True) @@ -333,8 +324,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(Person.objects.get(name="Fred").rank, "Private") def test_custom_id_field(self): - """Ensure that documents may be created with custom primary keys. - """ + """Ensure that documents may be created with custom primary keys.""" class User(Document): username = StringField(primary_key=True) name = StringField() @@ -367,7 +357,7 @@ class InstanceTest(unittest.TestCase): user_son = User.objects._collection.find_one() self.assertEqual(user_son['_id'], 'test') - self.assertTrue('username' not in user_son['_id']) + self.assertNotIn('username', user_son['_id']) User.drop_collection() @@ -380,12 +370,9 @@ class InstanceTest(unittest.TestCase): user_son = User.objects._collection.find_one() self.assertEqual(user_son['_id'], 'mongo') - self.assertTrue('username' not in user_son['_id']) - - User.drop_collection() + self.assertNotIn('username', user_son['_id']) def test_document_not_registered(self): - class Place(Document): name = StringField() @@ -407,7 +394,6 @@ class InstanceTest(unittest.TestCase): list(Place.objects.all()) def test_document_registry_regressions(self): - class Location(Document): name = StringField() meta = {'allow_inheritance': True} @@ -421,23 +407,29 @@ class InstanceTest(unittest.TestCase): self.assertEqual(Area, get_document("Location.Area")) def test_creation(self): - """Ensure that document may be created using keyword arguments. - """ + """Ensure that document may be created using keyword arguments.""" person = self.Person(name="Test User", age=30) self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 30) def test_to_dbref(self): - """Ensure that you can get a dbref of a document""" + """Ensure that you can get a dbref of a document.""" person = self.Person(name="Test User", age=30) self.assertRaises(OperationError, person.to_dbref) person.save() - person.to_dbref() + def test_save_abstract_document(self): + """Saving an abstract document should fail.""" + class Doc(Document): + name = StringField() + meta = {'abstract': True} + + with self.assertRaises(InvalidDocumentError): + Doc(name='aaa').save() + def test_reload(self): - """Ensure that attributes may be reloaded. - """ + """Ensure that attributes may be reloaded.""" person = self.Person(name="Test User", age=20) person.save() @@ -470,7 +462,6 @@ class InstanceTest(unittest.TestCase): doc = Animal(superphylum='Deuterostomia') doc.save() doc.reload() - Animal.drop_collection() def test_reload_sharded_nested(self): class SuperPhylum(EmbeddedDocument): @@ -484,11 +475,27 @@ class InstanceTest(unittest.TestCase): doc = Animal(superphylum=SuperPhylum(name='Deuterostomia')) doc.save() doc.reload() - Animal.drop_collection() + + def test_reload_with_changed_fields(self): + """Ensures reloading will not affect changed fields""" + class User(Document): + name = StringField() + number = IntField() + User.drop_collection() + + user = User(name="Bob", number=1).save() + user.name = "John" + user.number = 2 + + self.assertEqual(user._get_changed_fields(), ['name', 'number']) + user.reload('number') + self.assertEqual(user._get_changed_fields(), ['name']) + user.save() + user.reload() + self.assertEqual(user.name, "John") def test_reload_referencing(self): - """Ensures reloading updates weakrefs correctly - """ + """Ensures reloading updates weakrefs correctly.""" class Embedded(EmbeddedDocument): dict_field = DictField() list_field = ListField() @@ -532,7 +539,7 @@ class InstanceTest(unittest.TestCase): doc.save() doc.dict_field['extra'] = 1 doc = doc.reload(10, 'list_field') - self.assertEqual(doc._get_changed_fields(), []) + self.assertEqual(doc._get_changed_fields(), ['dict_field.extra']) self.assertEqual(len(doc.list_field), 5) self.assertEqual(len(doc.dict_field), 3) self.assertEqual(len(doc.embedded_field.list_field), 4) @@ -543,25 +550,17 @@ class InstanceTest(unittest.TestCase): pass f = Foo() - try: + with self.assertRaises(Foo.DoesNotExist): f.reload() - except Foo.DoesNotExist: - pass - except Exception: - self.assertFalse("Threw wrong exception") f.save() f.delete() - try: + + with self.assertRaises(Foo.DoesNotExist): f.reload() - except Foo.DoesNotExist: - pass - except Exception: - self.assertFalse("Threw wrong exception") def test_reload_of_non_strict_with_special_field_name(self): - """Ensures reloading works for documents with meta strict == False - """ + """Ensures reloading works for documents with meta strict == False.""" class Post(Document): meta = { 'strict': False @@ -582,8 +581,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(post.items, ["more lorem", "even more ipsum"]) def test_dictionary_access(self): - """Ensure that dictionary-style field access works properly. - """ + """Ensure that dictionary-style field access works properly.""" person = self.Person(name='Test User', age=30, job=self.Job()) self.assertEqual(person['name'], 'Test User') @@ -596,10 +594,10 @@ class InstanceTest(unittest.TestCase): # Length = length(assigned fields + id) self.assertEqual(len(person), 5) - self.assertTrue('age' in person) + self.assertIn('age', person) person.age = None - self.assertFalse('age' in person) - self.assertFalse('nationality' in person) + self.assertNotIn('age', person) + self.assertNotIn('nationality', person) def test_embedded_document_to_mongo(self): class Person(EmbeddedDocument): @@ -625,17 +623,15 @@ class InstanceTest(unittest.TestCase): self.assertEqual(sub_doc.to_mongo().keys(), ['id']) def test_embedded_document(self): - """Ensure that embedded documents are set up correctly. - """ + """Ensure that embedded documents are set up correctly.""" class Comment(EmbeddedDocument): content = StringField() - self.assertTrue('content' in Comment._fields) - self.assertFalse('id' in Comment._fields) + self.assertIn('content', Comment._fields) + self.assertNotIn('id', Comment._fields) def test_embedded_document_instance(self): - """Ensure that embedded documents can reference parent instance - """ + """Ensure that embedded documents can reference parent instance.""" class Embedded(EmbeddedDocument): string = StringField() @@ -643,6 +639,7 @@ class InstanceTest(unittest.TestCase): embedded_field = EmbeddedDocumentField(Embedded) Doc.drop_collection() + doc = Doc(embedded_field=Embedded(string="Hi")) self.assertHasInstance(doc.embedded_field, doc) @@ -652,7 +649,8 @@ class InstanceTest(unittest.TestCase): def test_embedded_document_complex_instance(self): """Ensure that embedded documents in complex fields can reference - parent instance""" + parent instance. + """ class Embedded(EmbeddedDocument): string = StringField() @@ -668,8 +666,7 @@ class InstanceTest(unittest.TestCase): self.assertHasInstance(doc.embedded_field[0], doc) def test_embedded_document_complex_instance_no_use_db_field(self): - """Ensure that use_db_field is propagated to list of Emb Docs - """ + """Ensure that use_db_field is propagated to list of Emb Docs.""" class Embedded(EmbeddedDocument): string = StringField(db_field='s') @@ -681,7 +678,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(d['embedded_field'], [{'string': 'Hi'}]) def test_instance_is_set_on_setattr(self): - class Email(EmbeddedDocument): email = EmailField() @@ -689,6 +685,7 @@ class InstanceTest(unittest.TestCase): email = EmbeddedDocumentField(Email) Account.drop_collection() + acc = Account() acc.email = Email(email='test@example.com') self.assertHasInstance(acc._data["email"], acc) @@ -698,7 +695,6 @@ class InstanceTest(unittest.TestCase): self.assertHasInstance(acc1._data["email"], acc1) def test_instance_is_set_on_setattr_on_embedded_document_list(self): - class Email(EmbeddedDocument): email = EmailField() @@ -731,12 +727,12 @@ class InstanceTest(unittest.TestCase): t = TestDocument(status="draft", pub_date=datetime.now()) - try: + with self.assertRaises(ValidationError) as cm: t.save() - except ValidationError as e: - expect_msg = "Draft entries may not have a publication date." - self.assertTrue(expect_msg in e.message) - self.assertEqual(e.to_dict(), {'__all__': expect_msg}) + + expected_msg = "Draft entries may not have a publication date." + self.assertIn(expected_msg, cm.exception.message) + self.assertEqual(cm.exception.to_dict(), {'__all__': expected_msg}) t = TestDocument(status="published") t.save(clean=False) @@ -770,12 +766,13 @@ class InstanceTest(unittest.TestCase): TestDocument.drop_collection() t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15)) - try: + + with self.assertRaises(ValidationError) as cm: t.save() - except ValidationError as e: - expect_msg = "Value of z != x + y" - self.assertTrue(expect_msg in e.message) - self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}}) + + expected_msg = "Value of z != x + y" + self.assertIn(expected_msg, cm.exception.message) + self.assertEqual(cm.exception.to_dict(), {'doc': {'__all__': expected_msg}}) t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save() self.assertEqual(t.doc.z, 35) @@ -843,33 +840,45 @@ class InstanceTest(unittest.TestCase): self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())]) + @needs_mongodb_v26 + def test_modify_with_positional_push(self): + class BlogPost(Document): + tags = ListField(StringField()) + + post = BlogPost.objects.create(tags=['python']) + self.assertEqual(post.tags, ['python']) + post.modify(push__tags__0=['code', 'mongo']) + self.assertEqual(post.tags, ['code', 'mongo', 'python']) + + # Assert same order of the list items is maintained in the db + self.assertEqual( + BlogPost._get_collection().find_one({'_id': post.pk})['tags'], + ['code', 'mongo', 'python'] + ) + def test_save(self): - """Ensure that a document may be saved in the database. - """ + """Ensure that a document may be saved in the database.""" + # Create person object and save it to the database person = self.Person(name='Test User', age=30) person.save() + # Ensure that the object is in the database collection = self.db[self.Person._get_collection_name()] person_obj = collection.find_one({'name': 'Test User'}) self.assertEqual(person_obj['name'], 'Test User') self.assertEqual(person_obj['age'], 30) self.assertEqual(person_obj['_id'], person.id) - # Test skipping validation on save + # Test skipping validation on save class Recipient(Document): email = EmailField(required=True) - recipient = Recipient(email='root@localhost') + recipient = Recipient(email='not-an-email') self.assertRaises(ValidationError, recipient.save) - - try: - recipient.save(validate=False) - except ValidationError: - self.fail() + recipient.save(validate=False) def test_save_to_a_value_that_equates_to_false(self): - class Thing(EmbeddedDocument): count = IntField() @@ -889,7 +898,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(user.thing.count, 0) def test_save_max_recursion_not_hit(self): - class Person(Document): name = StringField() parent = ReferenceField('self') @@ -915,7 +923,6 @@ class InstanceTest(unittest.TestCase): p0.save() def test_save_max_recursion_not_hit_with_file_field(self): - class Foo(Document): name = StringField() picture = FileField() @@ -939,7 +946,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(b.picture, b.bar.picture, b.bar.bar.picture) def test_save_cascades(self): - class Person(Document): name = StringField() parent = ReferenceField('self') @@ -962,7 +968,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(p1.name, p.parent.name) def test_save_cascade_kwargs(self): - class Person(Document): name = StringField() parent = ReferenceField('self') @@ -983,7 +988,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(p1.name, p2.parent.name) def test_save_cascade_meta_false(self): - class Person(Document): name = StringField() parent = ReferenceField('self') @@ -1012,7 +1016,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(p1.name, p.parent.name) def test_save_cascade_meta_true(self): - class Person(Document): name = StringField() parent = ReferenceField('self') @@ -1037,7 +1040,6 @@ class InstanceTest(unittest.TestCase): self.assertNotEqual(p1.name, p.parent.name) def test_save_cascades_generically(self): - class Person(Document): name = StringField() parent = GenericReferenceField() @@ -1063,7 +1065,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(p1.name, p.parent.name) def test_save_atomicity_condition(self): - class Widget(Document): toggle = BooleanField(default=False) count = IntField(default=0) @@ -1141,7 +1142,8 @@ class InstanceTest(unittest.TestCase): def test_update(self): """Ensure that an existing document is updated instead of be - overwritten.""" + overwritten. + """ # Create person object and save it to the database person = self.Person(name='Test User', age=30) person.save() @@ -1223,6 +1225,19 @@ class InstanceTest(unittest.TestCase): self.assertEqual(person.name, None) self.assertEqual(person.age, None) + def test_update_rename_operator(self): + """Test the $rename operator.""" + coll = self.Person._get_collection() + doc = self.Person(name='John').save() + raw_doc = coll.find_one({'_id': doc.pk}) + self.assertEqual(set(raw_doc.keys()), set(['_id', '_cls', 'name'])) + + doc.update(rename__name='first_name') + raw_doc = coll.find_one({'_id': doc.pk}) + self.assertEqual(set(raw_doc.keys()), + set(['_id', '_cls', 'first_name'])) + self.assertEqual(raw_doc['first_name'], 'John') + def test_inserts_if_you_set_the_pk(self): p1 = self.Person(name='p1', id=bson.ObjectId()).save() p2 = self.Person(name='p2') @@ -1232,7 +1247,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(2, self.Person.objects.count()) def test_can_save_if_not_included(self): - class EmbeddedDoc(EmbeddedDocument): pass @@ -1319,10 +1333,7 @@ class InstanceTest(unittest.TestCase): doc2.update(set__name=doc1.name) def test_embedded_update(self): - """ - Test update on `EmbeddedDocumentField` fields - """ - + """Test update on `EmbeddedDocumentField` fields.""" class Page(EmbeddedDocument): log_message = StringField(verbose_name="Log message", required=True) @@ -1342,12 +1353,27 @@ class InstanceTest(unittest.TestCase): site = Site.objects.first() self.assertEqual(site.page.log_message, "Error: Dummy message") - def test_embedded_update_db_field(self): - """ - Test update on `EmbeddedDocumentField` fields when db_field is other - than default. + def test_update_list_field(self): + """Test update on `ListField` with $pull + $in. """ + class Doc(Document): + foo = ListField(StringField()) + Doc.drop_collection() + doc = Doc(foo=['a', 'b', 'c']) + doc.save() + + # Update + doc = Doc.objects.first() + doc.update(pull__foo__in=['a', 'c']) + + doc = Doc.objects.first() + self.assertEqual(doc.foo, ['b']) + + def test_embedded_update_db_field(self): + """Test update on `EmbeddedDocumentField` fields when db_field + is other than default. + """ class Page(EmbeddedDocument): log_message = StringField(verbose_name="Log message", db_field="page_log_message", @@ -1370,9 +1396,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(site.page.log_message, "Error: Dummy message") def test_save_only_changed_fields(self): - """Ensure save only sets / unsets changed fields - """ - + """Ensure save only sets / unsets changed fields.""" class User(self.Person): active = BooleanField(default=True) @@ -1431,9 +1455,9 @@ class InstanceTest(unittest.TestCase): user = User.objects.first() # Even if stored as ObjectId's internally mongoengine uses DBRefs # As ObjectId's aren't automatically derefenced - self.assertTrue(isinstance(user._data['orgs'][0], DBRef)) - self.assertTrue(isinstance(user.orgs[0], Organization)) - self.assertTrue(isinstance(user._data['orgs'][0], Organization)) + self.assertIsInstance(user._data['orgs'][0], DBRef) + self.assertIsInstance(user.orgs[0], Organization) + self.assertIsInstance(user._data['orgs'][0], Organization) # Changing a value with query_counter() as q: @@ -1492,8 +1516,8 @@ class InstanceTest(unittest.TestCase): self.assertEqual(q, 3) def test_set_unset_one_operation(self): - """Ensure that $set and $unset actions are performed in the same - operation. + """Ensure that $set and $unset actions are performed in the + same operation. """ class FooBar(Document): foo = StringField(default=None) @@ -1514,9 +1538,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(1, q) def test_save_only_changed_fields_recursive(self): - """Ensure save only sets / unsets changed fields - """ - + """Ensure save only sets / unsets changed fields.""" class Comment(EmbeddedDocument): published = BooleanField(default=True) @@ -1556,8 +1578,7 @@ class InstanceTest(unittest.TestCase): self.assertFalse(person.comments_dict['first_post'].published) def test_delete(self): - """Ensure that document may be deleted using the delete method. - """ + """Ensure that document may be deleted using the delete method.""" person = self.Person(name="Test User", age=30) person.save() self.assertEqual(self.Person.objects.count(), 1) @@ -1565,33 +1586,34 @@ class InstanceTest(unittest.TestCase): self.assertEqual(self.Person.objects.count(), 0) def test_save_custom_id(self): - """Ensure that a document may be saved with a custom _id. - """ + """Ensure that a document may be saved with a custom _id.""" + # Create person object and save it to the database person = self.Person(name='Test User', age=30, id='497ce96f395f2f052a494fd4') person.save() + # Ensure that the object is in the database with the correct _id collection = self.db[self.Person._get_collection_name()] person_obj = collection.find_one({'name': 'Test User'}) self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') def test_save_custom_pk(self): - """ - Ensure that a document may be saved with a custom _id using pk alias. + """Ensure that a document may be saved with a custom _id using + pk alias. """ # Create person object and save it to the database person = self.Person(name='Test User', age=30, pk='497ce96f395f2f052a494fd4') person.save() + # Ensure that the object is in the database with the correct _id collection = self.db[self.Person._get_collection_name()] person_obj = collection.find_one({'name': 'Test User'}) self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') def test_save_list(self): - """Ensure that a list field may be properly saved. - """ + """Ensure that a list field may be properly saved.""" class Comment(EmbeddedDocument): content = StringField() @@ -1614,8 +1636,6 @@ class InstanceTest(unittest.TestCase): for comment_obj, comment in zip(post_obj['comments'], comments): self.assertEqual(comment_obj['content'], comment['content']) - BlogPost.drop_collection() - def test_list_search_by_embedded(self): class User(Document): username = StringField(required=True) @@ -1675,8 +1695,8 @@ class InstanceTest(unittest.TestCase): list(Page.objects.filter(comments__user=u3))) def test_save_embedded_document(self): - """Ensure that a document with an embedded document field may be - saved in the database. + """Ensure that a document with an embedded document field may + be saved in the database. """ class EmployeeDetails(EmbeddedDocument): position = StringField() @@ -1695,13 +1715,13 @@ class InstanceTest(unittest.TestCase): employee_obj = collection.find_one({'name': 'Test Employee'}) self.assertEqual(employee_obj['name'], 'Test Employee') self.assertEqual(employee_obj['age'], 50) + # Ensure that the 'details' embedded object saved correctly self.assertEqual(employee_obj['details']['position'], 'Developer') def test_embedded_update_after_save(self): - """ - Test update of `EmbeddedDocumentField` attached to a newly saved - document. + """Test update of `EmbeddedDocumentField` attached to a newly + saved document. """ class Page(EmbeddedDocument): log_message = StringField(verbose_name="Log message", @@ -1722,8 +1742,8 @@ class InstanceTest(unittest.TestCase): self.assertEqual(site.page.log_message, "Error: Dummy message") def test_updating_an_embedded_document(self): - """Ensure that a document with an embedded document field may be - saved in the database. + """Ensure that a document with an embedded document field may + be saved in the database. """ class EmployeeDetails(EmbeddedDocument): position = StringField() @@ -1758,7 +1778,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(promoted_employee.details, None) def test_object_mixins(self): - class NameMixin(object): name = StringField() @@ -1797,9 +1816,9 @@ class InstanceTest(unittest.TestCase): self.assertEqual(t.count, 12) def test_save_reference(self): - """Ensure that a document reference field may be saved in the database. + """Ensure that a document reference field may be saved in the + database. """ - class BlogPost(Document): meta = {'collection': 'blogpost_1'} content = StringField() @@ -1818,9 +1837,8 @@ class InstanceTest(unittest.TestCase): post_obj = BlogPost.objects.first() # Test laziness - self.assertTrue(isinstance(post_obj._data['author'], - bson.DBRef)) - self.assertTrue(isinstance(post_obj.author, self.Person)) + self.assertIsInstance(post_obj._data['author'], bson.DBRef) + self.assertIsInstance(post_obj.author, self.Person) self.assertEqual(post_obj.author.name, 'Test User') # Ensure that the dereferenced object may be changed and saved @@ -1830,8 +1848,6 @@ class InstanceTest(unittest.TestCase): author = list(self.Person.objects(name='Test User'))[-1] self.assertEqual(author.age, 25) - BlogPost.drop_collection() - def test_duplicate_db_fields_raise_invalid_document_error(self): """Ensure a InvalidDocumentError is thrown if duplicate fields declare the same db_field. @@ -1842,7 +1858,7 @@ class InstanceTest(unittest.TestCase): name2 = StringField(db_field='name') def test_invalid_son(self): - """Raise an error if loading invalid data""" + """Raise an error if loading invalid data.""" class Occurrence(EmbeddedDocument): number = IntField() @@ -1860,10 +1876,14 @@ class InstanceTest(unittest.TestCase): 'occurs': {"hello": None} }) - def test_reverse_delete_rule_cascade_and_nullify(self): - """Ensure that a referenced document is also deleted upon deletion. - """ + # Tests for issue #1438: https://github.com/MongoEngine/mongoengine/issues/1438 + with self.assertRaises(ValueError): + Word._from_son('this is not a valid SON dict') + def test_reverse_delete_rule_cascade_and_nullify(self): + """Ensure that a referenced document is also deleted upon + deletion. + """ class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) @@ -1892,6 +1912,25 @@ class InstanceTest(unittest.TestCase): author.delete() self.assertEqual(BlogPost.objects.count(), 0) + def test_reverse_delete_rule_pull(self): + """Ensure that a referenced document is also deleted with + pull. + """ + class Record(Document): + name = StringField() + children = ListField(ReferenceField('self', reverse_delete_rule=PULL)) + + Record.drop_collection() + + parent_record = Record(name='parent').save() + child_record = Record(name='child').save() + parent_record.children.append(child_record) + parent_record.save() + + child_record.delete() + self.assertEqual(Record.objects(name='parent').get().children, []) + + def test_reverse_delete_rule_with_custom_id_field(self): """Ensure that a referenced document with custom primary key is also deleted upon deletion. @@ -1918,7 +1957,8 @@ class InstanceTest(unittest.TestCase): self.assertEqual(Book.objects.count(), 0) def test_reverse_delete_rule_with_shared_id_among_collections(self): - """Ensure that cascade delete rule doesn't mix id among collections. + """Ensure that cascade delete rule doesn't mix id among + collections. """ class User(Document): id = IntField(primary_key=True) @@ -1949,10 +1989,9 @@ class InstanceTest(unittest.TestCase): self.assertEqual(Book.objects.get(), book_2) def test_reverse_delete_rule_with_document_inheritance(self): - """Ensure that a referenced document is also deleted upon deletion - of a child document. + """Ensure that a referenced document is also deleted upon + deletion of a child document. """ - class Writer(self.Person): pass @@ -1984,10 +2023,9 @@ class InstanceTest(unittest.TestCase): self.assertEqual(BlogPost.objects.count(), 0) def test_reverse_delete_rule_cascade_and_nullify_complex_field(self): - """Ensure that a referenced document is also deleted upon deletion for - complex fields. + """Ensure that a referenced document is also deleted upon + deletion for complex fields. """ - class BlogPost(Document): content = StringField() authors = ListField(ReferenceField( @@ -1996,7 +2034,6 @@ class InstanceTest(unittest.TestCase): self.Person, reverse_delete_rule=NULLIFY)) self.Person.drop_collection() - BlogPost.drop_collection() author = self.Person(name='Test User') @@ -2020,10 +2057,10 @@ class InstanceTest(unittest.TestCase): self.assertEqual(BlogPost.objects.count(), 0) def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self): - """ ensure the pre_delete signal is triggered upon a cascading deletion - setup a blog post with content, an author and editor - delete the author which triggers deletion of blogpost via cascade - blog post's pre_delete signal alters an editor attribute + """Ensure the pre_delete signal is triggered upon a cascading + deletion setup a blog post with content, an author and editor + delete the author which triggers deletion of blogpost via + cascade blog post's pre_delete signal alters an editor attribute. """ class Editor(self.Person): review_queue = IntField(default=0) @@ -2051,6 +2088,7 @@ class InstanceTest(unittest.TestCase): # delete the author, the post is also deleted due to the CASCADE rule author.delete() + # the pre-delete signal should have decremented the editor's queue editor = Editor.objects(name='Max P.').get() self.assertEqual(editor.review_queue, 0) @@ -2059,7 +2097,6 @@ class InstanceTest(unittest.TestCase): """Ensure that Bi-Directional relationships work with reverse_delete_rule """ - class Bar(Document): content = StringField() foo = ReferenceField('Foo') @@ -2105,8 +2142,8 @@ class InstanceTest(unittest.TestCase): mother = ReferenceField('Person', reverse_delete_rule=DENY) def test_reverse_delete_rule_cascade_recurs(self): - """Ensure that a chain of documents is also deleted upon cascaded - deletion. + """Ensure that a chain of documents is also deleted upon + cascaded deletion. """ class BlogPost(Document): content = StringField() @@ -2136,15 +2173,10 @@ class InstanceTest(unittest.TestCase): author.delete() self.assertEqual(Comment.objects.count(), 0) - self.Person.drop_collection() - BlogPost.drop_collection() - Comment.drop_collection() - def test_reverse_delete_rule_deny(self): - """Ensure that a document cannot be referenced if there are still - documents referring to it. + """Ensure that a document cannot be referenced if there are + still documents referring to it. """ - class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=DENY) @@ -2172,11 +2204,7 @@ class InstanceTest(unittest.TestCase): author.delete() self.assertEqual(self.Person.objects.count(), 1) - self.Person.drop_collection() - BlogPost.drop_collection() - def subclasses_and_unique_keys_works(self): - class A(Document): pass @@ -2192,19 +2220,16 @@ class InstanceTest(unittest.TestCase): self.assertEqual(A.objects.count(), 2) self.assertEqual(B.objects.count(), 1) - A.drop_collection() - B.drop_collection() def test_document_hash(self): - """Test document in list, dict, set - """ + """Test document in list, dict, set.""" class User(Document): pass class BlogPost(Document): pass - # Clear old datas + # Clear old data User.drop_collection() BlogPost.drop_collection() @@ -2216,17 +2241,18 @@ class InstanceTest(unittest.TestCase): b1 = BlogPost.objects.create() b2 = BlogPost.objects.create() - # in List + # Make sure docs are properly identified in a list (__eq__ is used + # for the comparison). all_user_list = list(User.objects.all()) + self.assertIn(u1, all_user_list) + self.assertIn(u2, all_user_list) + self.assertIn(u3, all_user_list) + self.assertNotIn(u4, all_user_list) # New object + self.assertNotIn(b1, all_user_list) # Other object + self.assertNotIn(b2, all_user_list) # Other object - self.assertTrue(u1 in all_user_list) - self.assertTrue(u2 in all_user_list) - self.assertTrue(u3 in all_user_list) - self.assertFalse(u4 in all_user_list) # New object - self.assertFalse(b1 in all_user_list) # Other object - self.assertFalse(b2 in all_user_list) # Other object - - # in Dict + # Make sure docs can be used as keys in a dict (__hash__ is used + # for hashing the docs). all_user_dic = {} for u in User.objects.all(): all_user_dic[u] = "OK" @@ -2238,13 +2264,22 @@ class InstanceTest(unittest.TestCase): self.assertEqual(all_user_dic.get(b1, False), False) # Other object self.assertEqual(all_user_dic.get(b2, False), False) # Other object - # in Set + # Make sure docs are properly identified in a set (__hash__ is used + # for hashing the docs). all_user_set = set(User.objects.all()) + self.assertIn(u1, all_user_set) + self.assertNotIn(u4, all_user_set) + self.assertNotIn(b1, all_user_list) + self.assertNotIn(b2, all_user_list) - self.assertTrue(u1 in all_user_set) + # Make sure duplicate docs aren't accepted in the set + self.assertEqual(len(all_user_set), 3) + all_user_set.add(u1) + all_user_set.add(u2) + all_user_set.add(u3) + self.assertEqual(len(all_user_set), 3) def test_picklable(self): - pickle_doc = PickleTest(number=1, string="One", lists=['1', '2']) pickle_doc.embedded = PickleEmbedded() pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved @@ -2270,7 +2305,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(pickle_doc.lists, ["1", "2", "3"]) def test_regular_document_pickle(self): - pickle_doc = PickleTest(number=1, string="One", lists=['1', '2']) pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved pickle_doc.save() @@ -2293,7 +2327,6 @@ class InstanceTest(unittest.TestCase): fixtures.PickleTest = PickleTest def test_dynamic_document_pickle(self): - pickle_doc = PickleDynamicTest( name="test", number=1, string="One", lists=['1', '2']) pickle_doc.embedded = PickleDynamicEmbedded(foo="Bar") @@ -2332,7 +2365,6 @@ class InstanceTest(unittest.TestCase): validate = DictField() def test_mutating_documents(self): - class B(EmbeddedDocument): field1 = StringField(default='field1') @@ -2340,6 +2372,7 @@ class InstanceTest(unittest.TestCase): b = EmbeddedDocumentField(B, default=lambda: B()) A.drop_collection() + a = A() a.save() a.reload() @@ -2363,12 +2396,13 @@ class InstanceTest(unittest.TestCase): self.assertEqual(a.b.field2.c_field, 'new value') def test_can_save_false_values(self): - """Ensures you can save False values on save""" + """Ensures you can save False values on save.""" class Doc(Document): foo = StringField() archived = BooleanField(default=False, required=True) Doc.drop_collection() + d = Doc() d.save() d.archived = False @@ -2377,11 +2411,12 @@ class InstanceTest(unittest.TestCase): self.assertEqual(Doc.objects(archived=False).count(), 1) def test_can_save_false_values_dynamic(self): - """Ensures you can save False values on dynamic docs""" + """Ensures you can save False values on dynamic docs.""" class Doc(DynamicDocument): foo = StringField() Doc.drop_collection() + d = Doc() d.save() d.archived = False @@ -2421,7 +2456,7 @@ class InstanceTest(unittest.TestCase): Collection.update = orig_update def test_db_alias_tests(self): - """ DB Alias tests """ + """DB Alias tests.""" # mongoenginetest - Is default connection alias from setUp() # Register Aliases register_connection('testdb-1', 'mongoenginetest2') @@ -2483,8 +2518,7 @@ class InstanceTest(unittest.TestCase): get_db("testdb-3")[AuthorBooks._get_collection_name()]) def test_db_alias_overrides(self): - """db_alias can be overriden - """ + """Test db_alias can be overriden.""" # Register a connection with db_alias testdb-2 register_connection('testdb-2', 'mongoenginetest2') @@ -2508,8 +2542,7 @@ class InstanceTest(unittest.TestCase): B._get_collection().database.name) def test_db_alias_propagates(self): - """db_alias propagates? - """ + """db_alias propagates?""" register_connection('testdb-1', 'mongoenginetest2') class A(Document): @@ -2522,8 +2555,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual('testdb-1', B._meta.get('db_alias')) def test_db_ref_usage(self): - """ DB Ref usage in dict_fields""" - + """DB Ref usage in dict_fields.""" class User(Document): name = StringField() @@ -2758,7 +2790,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(user.thing._data['data'], [1, 2, 3]) def test_spaces_in_keys(self): - class Embedded(DynamicEmbeddedDocument): pass @@ -2847,7 +2878,6 @@ class InstanceTest(unittest.TestCase): log.machine = "127.0.0.1" def test_kwargs_simple(self): - class Embedded(EmbeddedDocument): name = StringField() @@ -2867,7 +2897,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(classic_doc._data, dict_doc._data) def test_kwargs_complex(self): - class Embedded(EmbeddedDocument): name = StringField() @@ -2890,36 +2919,35 @@ class InstanceTest(unittest.TestCase): self.assertEqual(classic_doc._data, dict_doc._data) def test_positional_creation(self): - """Ensure that document may be created using positional arguments. - """ + """Ensure that document may be created using positional arguments.""" person = self.Person("Test User", 42) self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 42) def test_mixed_creation(self): - """Ensure that document may be created using mixed arguments. - """ + """Ensure that document may be created using mixed arguments.""" person = self.Person("Test User", age=42) self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 42) def test_positional_creation_embedded(self): - """Ensure that embedded document may be created using positional arguments. + """Ensure that embedded document may be created using positional + arguments. """ job = self.Job("Test Job", 4) self.assertEqual(job.name, "Test Job") self.assertEqual(job.years, 4) def test_mixed_creation_embedded(self): - """Ensure that embedded document may be created using mixed arguments. + """Ensure that embedded document may be created using mixed + arguments. """ job = self.Job("Test Job", years=4) self.assertEqual(job.name, "Test Job") self.assertEqual(job.years, 4) def test_mixed_creation_dynamic(self): - """Ensure that document may be created using mixed arguments. - """ + """Ensure that document may be created using mixed arguments.""" class Person(DynamicDocument): name = StringField() @@ -2928,14 +2956,14 @@ class InstanceTest(unittest.TestCase): self.assertEqual(person.age, 42) def test_bad_mixed_creation(self): - """Ensure that document gives correct error when duplicating arguments + """Ensure that document gives correct error when duplicating + arguments. """ with self.assertRaises(TypeError): return self.Person("Test User", 42, name="Bad User") def test_data_contains_id_field(self): - """Ensure that asking for _data returns 'id' - """ + """Ensure that asking for _data returns 'id'.""" class Person(Document): name = StringField() @@ -2943,11 +2971,10 @@ class InstanceTest(unittest.TestCase): Person(name="Harry Potter").save() person = Person.objects.first() - self.assertTrue('id' in person._data.keys()) + self.assertIn('id', person._data.keys()) self.assertEqual(person._data.get('id'), person.id) def test_complex_nesting_document_and_embedded_document(self): - class Macro(EmbeddedDocument): value = DynamicField(default="UNDEFINED") @@ -2990,7 +3017,6 @@ class InstanceTest(unittest.TestCase): system.nodes["node"].parameters["param"].macros["test"].value) def test_embedded_document_equality(self): - class Test(Document): field = StringField(required=True) @@ -3037,36 +3063,36 @@ class InstanceTest(unittest.TestCase): dbref2 = f._data['test2'] obj2 = f.test2 - self.assertTrue(isinstance(dbref2, DBRef)) - self.assertTrue(isinstance(obj2, Test2)) - self.assertTrue(obj2.id == dbref2.id) - self.assertTrue(obj2 == dbref2) - self.assertTrue(dbref2 == obj2) + self.assertIsInstance(dbref2, DBRef) + self.assertIsInstance(obj2, Test2) + self.assertEqual(obj2.id, dbref2.id) + self.assertEqual(obj2, dbref2) + self.assertEqual(dbref2, obj2) dbref3 = f._data['test3'] obj3 = f.test3 - self.assertTrue(isinstance(dbref3, DBRef)) - self.assertTrue(isinstance(obj3, Test3)) - self.assertTrue(obj3.id == dbref3.id) - self.assertTrue(obj3 == dbref3) - self.assertTrue(dbref3 == obj3) + self.assertIsInstance(dbref3, DBRef) + self.assertIsInstance(obj3, Test3) + self.assertEqual(obj3.id, dbref3.id) + self.assertEqual(obj3, dbref3) + self.assertEqual(dbref3, obj3) - self.assertTrue(obj2.id == obj3.id) - self.assertTrue(dbref2.id == dbref3.id) - self.assertFalse(dbref2 == dbref3) - self.assertFalse(dbref3 == dbref2) - self.assertTrue(dbref2 != dbref3) - self.assertTrue(dbref3 != dbref2) + self.assertEqual(obj2.id, obj3.id) + self.assertEqual(dbref2.id, dbref3.id) + self.assertNotEqual(dbref2, dbref3) + self.assertNotEqual(dbref3, dbref2) + self.assertNotEqual(dbref2, dbref3) + self.assertNotEqual(dbref3, dbref2) - self.assertFalse(obj2 == dbref3) - self.assertFalse(dbref3 == obj2) - self.assertTrue(obj2 != dbref3) - self.assertTrue(dbref3 != obj2) + self.assertNotEqual(obj2, dbref3) + self.assertNotEqual(dbref3, obj2) + self.assertNotEqual(obj2, dbref3) + self.assertNotEqual(dbref3, obj2) - self.assertFalse(obj3 == dbref2) - self.assertFalse(dbref2 == obj3) - self.assertTrue(obj3 != dbref2) - self.assertTrue(dbref2 != obj3) + self.assertNotEqual(obj3, dbref2) + self.assertNotEqual(dbref2, obj3) + self.assertNotEqual(obj3, dbref2) + self.assertNotEqual(dbref2, obj3) def test_default_values(self): class Person(Document): @@ -3115,6 +3141,64 @@ class InstanceTest(unittest.TestCase): self.assertEquals(p.id, None) p.id = "12345" # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here + def test_from_son_created_False_without_id(self): + class MyPerson(Document): + name = StringField() + + MyPerson.objects.delete() + + p = MyPerson.from_json('{"name": "a_fancy_name"}', created=False) + self.assertFalse(p._created) + self.assertIsNone(p.id) + p.save() + self.assertIsNotNone(p.id) + saved_p = MyPerson.objects.get(id=p.id) + self.assertEqual(saved_p.name, 'a_fancy_name') + + def test_from_son_created_False_with_id(self): + # 1854 + class MyPerson(Document): + name = StringField() + + MyPerson.objects.delete() + + p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=False) + self.assertFalse(p._created) + self.assertEqual(p._changed_fields, []) + self.assertEqual(p.name, 'a_fancy_name') + self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e')) + p.save() + + with self.assertRaises(DoesNotExist): + # Since created=False and we gave an id in the json and _changed_fields is empty + # mongoengine assumes that the document exits with that structure already + # and calling .save() didn't save anything + MyPerson.objects.get(id=p.id) + + self.assertFalse(p._created) + p.name = 'a new fancy name' + self.assertEqual(p._changed_fields, ['name']) + p.save() + saved_p = MyPerson.objects.get(id=p.id) + self.assertEqual(saved_p.name, p.name) + + def test_from_son_created_True_with_an_id(self): + class MyPerson(Document): + name = StringField() + + MyPerson.objects.delete() + + p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=True) + self.assertTrue(p._created) + self.assertEqual(p._changed_fields, []) + self.assertEqual(p.name, 'a_fancy_name') + self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e')) + p.save() + + saved_p = MyPerson.objects.get(id=p.id) + self.assertEqual(saved_p, p) + self.assertEqual(p.name, 'a_fancy_name') + def test_null_field(self): # 734 class User(Document): @@ -3176,8 +3260,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(idx, 2) def test_falsey_pk(self): - """Ensure that we can create and update a document with Falsey PK. - """ + """Ensure that we can create and update a document with Falsey PK.""" class Person(Document): age = IntField(primary_key=True) height = FloatField() @@ -3189,6 +3272,50 @@ class InstanceTest(unittest.TestCase): person.update(set__height=2.0) + @needs_mongodb_v26 + def test_push_with_position(self): + """Ensure that push with position works properly for an instance.""" + class BlogPost(Document): + slug = StringField() + tags = ListField(StringField()) + + blog = BlogPost() + blog.slug = "ABC" + blog.tags = ["python"] + blog.save() + + blog.update(push__tags__0=["mongodb", "code"]) + blog.reload() + self.assertEqual(blog.tags, ['mongodb', 'code', 'python']) + + def test_push_nested_list(self): + """Ensure that push update works in nested list""" + class BlogPost(Document): + slug = StringField() + tags = ListField() + + blog = BlogPost(slug="test").save() + blog.update(push__tags=["value1", 123]) + blog.reload() + self.assertEqual(blog.tags, [["value1", 123]]) + + def test_accessing_objects_with_indexes_error(self): + insert_result = self.db.company.insert_many([{'name': 'Foo'}, + {'name': 'Foo'}]) # Force 2 doc with same name + REF_OID = insert_result.inserted_ids[0] + self.db.user.insert_one({'company': REF_OID}) # Force 2 doc with same name + + class Company(Document): + name = StringField(unique=True) + + class User(Document): + company = ReferenceField(Company) + + + # Ensure index creation exception aren't swallowed (#1688) + with self.assertRaises(DuplicateKeyError): + User.objects().select_related() + if __name__ == '__main__': unittest.main() diff --git a/tests/document/validation.py b/tests/document/validation.py index 105bc8b0..30a285b2 100644 --- a/tests/document/validation.py +++ b/tests/document/validation.py @@ -20,16 +20,16 @@ class ValidatorErrorTest(unittest.TestCase): # 1st level error schema error.errors = {'1st': ValidationError('bad 1st'), } - self.assertTrue('1st' in error.to_dict()) + self.assertIn('1st', error.to_dict()) self.assertEqual(error.to_dict()['1st'], 'bad 1st') # 2nd level error schema error.errors = {'1st': ValidationError('bad 1st', errors={ '2nd': ValidationError('bad 2nd'), })} - self.assertTrue('1st' in error.to_dict()) - self.assertTrue(isinstance(error.to_dict()['1st'], dict)) - self.assertTrue('2nd' in error.to_dict()['1st']) + self.assertIn('1st', error.to_dict()) + self.assertIsInstance(error.to_dict()['1st'], dict) + self.assertIn('2nd', error.to_dict()['1st']) self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') # moar levels @@ -40,10 +40,10 @@ class ValidatorErrorTest(unittest.TestCase): }), }), })} - self.assertTrue('1st' in error.to_dict()) - self.assertTrue('2nd' in error.to_dict()['1st']) - self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) - self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) + self.assertIn('1st', error.to_dict()) + self.assertIn('2nd', error.to_dict()['1st']) + self.assertIn('3rd', error.to_dict()['1st']['2nd']) + self.assertIn('4th', error.to_dict()['1st']['2nd']['3rd']) self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], 'Inception') @@ -58,7 +58,7 @@ class ValidatorErrorTest(unittest.TestCase): try: User().validate() except ValidationError as e: - self.assertTrue("User:None" in e.message) + self.assertIn("User:None", e.message) self.assertEqual(e.to_dict(), { 'username': 'Field is required', 'name': 'Field is required'}) @@ -68,7 +68,7 @@ class ValidatorErrorTest(unittest.TestCase): try: user.save() except ValidationError as e: - self.assertTrue("User:RossC0" in e.message) + self.assertIn("User:RossC0", e.message) self.assertEqual(e.to_dict(), { 'name': 'Field is required'}) @@ -116,7 +116,7 @@ class ValidatorErrorTest(unittest.TestCase): try: Doc(id="bad").validate() except ValidationError as e: - self.assertTrue("SubDoc:None" in e.message) + self.assertIn("SubDoc:None", e.message) self.assertEqual(e.to_dict(), { "e": {'val': 'OK could not be converted to int'}}) @@ -127,14 +127,14 @@ class ValidatorErrorTest(unittest.TestCase): doc = Doc.objects.first() keys = doc._data.keys() self.assertEqual(2, len(keys)) - self.assertTrue('e' in keys) - self.assertTrue('id' in keys) + self.assertIn('e', keys) + self.assertIn('id', keys) doc.e.val = "OK" try: doc.save() except ValidationError as e: - self.assertTrue("Doc:test" in e.message) + self.assertIn("Doc:test", e.message) self.assertEqual(e.to_dict(), { "e": {'val': 'OK could not be converted to int'}}) diff --git a/tests/fields/__init__.py b/tests/fields/__init__.py index 8e0640db..4994d0c6 100644 --- a/tests/fields/__init__.py +++ b/tests/fields/__init__.py @@ -1,3 +1,3 @@ -from fields import * -from file_tests import * -from geo import * +from .fields import * +from .file_tests import * +from .geo import * diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 482f21d5..13f3aa84 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- -import six -from nose.plugins.skip import SkipTest - import datetime import unittest import uuid import math import itertools import re +import sys + +from nose.plugins.skip import SkipTest import six try: @@ -17,7 +17,7 @@ except ImportError: from decimal import Decimal -from bson import Binary, DBRef, ObjectId +from bson import Binary, DBRef, ObjectId, SON try: from bson.int64 import Int64 except ImportError: @@ -26,38 +26,77 @@ except ImportError: from mongoengine import * from mongoengine.connection import get_db from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList, - _document_registry) -from mongoengine.errors import NotRegistered, DoesNotExist + _document_registry, LazyReference) + +from tests.utils import MongoDBTestCase __all__ = ("FieldTest", "EmbeddedDocumentListFieldTestCase") -class FieldTest(unittest.TestCase): +class FieldTest(MongoDBTestCase): - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() + def test_datetime_from_empty_string(self): + """ + Ensure an exception is raised when trying to + cast an empty string to datetime. + """ + class MyDoc(Document): + dt = DateTimeField() - def tearDown(self): - self.db.drop_collection('fs.files') - self.db.drop_collection('fs.chunks') - self.db.drop_collection('mongoengine.counters') + md = MyDoc(dt='') + self.assertRaises(ValidationError, md.save) + + def test_date_from_empty_string(self): + """ + Ensure an exception is raised when trying to + cast an empty string to datetime. + """ + class MyDoc(Document): + dt = DateField() + + md = MyDoc(dt='') + self.assertRaises(ValidationError, md.save) + + def test_datetime_from_whitespace_string(self): + """ + Ensure an exception is raised when trying to + cast a whitespace-only string to datetime. + """ + class MyDoc(Document): + dt = DateTimeField() + + md = MyDoc(dt=' ') + self.assertRaises(ValidationError, md.save) + + def test_date_from_whitespace_string(self): + """ + Ensure an exception is raised when trying to + cast a whitespace-only string to datetime. + """ + class MyDoc(Document): + dt = DateField() + + md = MyDoc(dt=' ') + self.assertRaises(ValidationError, md.save) def test_default_values_nothing_set(self): - """Ensure that default field values are used when creating a document. + """Ensure that default field values are used when creating + a document. """ class Person(Document): name = StringField() age = IntField(default=30, required=False) userid = StringField(default=lambda: 'test', required=True) created = DateTimeField(default=datetime.datetime.utcnow) + day = DateField(default=datetime.date.today) person = Person(name="Ross") # Confirm saving now would store values data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual( - data_to_be_saved, ['age', 'created', 'name', 'userid']) + self.assertEqual(data_to_be_saved, + ['age', 'created', 'day', 'name', 'userid'] + ) self.assertTrue(person.validate() is None) @@ -65,19 +104,22 @@ class FieldTest(unittest.TestCase): self.assertEqual(person.age, person.age) self.assertEqual(person.userid, person.userid) self.assertEqual(person.created, person.created) + self.assertEqual(person.day, person.day) self.assertEqual(person._data['name'], person.name) self.assertEqual(person._data['age'], person.age) self.assertEqual(person._data['userid'], person.userid) self.assertEqual(person._data['created'], person.created) + self.assertEqual(person._data['day'], person.day) # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) self.assertEqual( - data_to_be_saved, ['age', 'created', 'name', 'userid']) + data_to_be_saved, ['age', 'created', 'day', 'name', 'userid']) def test_default_values_set_to_None(self): - """Ensure that default field values are used when creating a document. + """Ensure that default field values are used even when + we explcitly initialize the doc with None values. """ class Person(Document): name = StringField() @@ -109,7 +151,8 @@ class FieldTest(unittest.TestCase): self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) def test_default_values_when_setting_to_None(self): - """Ensure that default field values are used when creating a document. + """Ensure that default field values are used when creating + a document. """ class Person(Document): name = StringField() @@ -129,10 +172,10 @@ class FieldTest(unittest.TestCase): self.assertTrue(person.validate() is None) - self.assertEqual(person.name, person.name) - self.assertEqual(person.age, person.age) - self.assertEqual(person.userid, person.userid) - self.assertEqual(person.created, person.created) + self.assertEqual(person.name, None) + self.assertEqual(person.age, 30) + self.assertEqual(person.userid, 'test') + self.assertIsInstance(person.created, datetime.datetime) self.assertEqual(person._data['name'], person.name) self.assertEqual(person._data['age'], person.age) @@ -144,7 +187,8 @@ class FieldTest(unittest.TestCase): self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) def test_default_values_when_deleting_value(self): - """Ensure that default field values are used when creating a document. + """Ensure that default field values are used after non-default + values are explicitly deleted. """ class Person(Document): name = StringField() @@ -152,7 +196,8 @@ class FieldTest(unittest.TestCase): userid = StringField(default=lambda: 'test', required=True) created = DateTimeField(default=datetime.datetime.utcnow) - person = Person(name="Ross") + person = Person(name="Ross", age=50, userid='different', + created=datetime.datetime(2014, 6, 12)) del person.name del person.age del person.userid @@ -163,10 +208,11 @@ class FieldTest(unittest.TestCase): self.assertTrue(person.validate() is None) - self.assertEqual(person.name, person.name) - self.assertEqual(person.age, person.age) - self.assertEqual(person.userid, person.userid) - self.assertEqual(person.created, person.created) + self.assertEqual(person.name, None) + self.assertEqual(person.age, 30) + self.assertEqual(person.userid, 'test') + self.assertIsInstance(person.created, datetime.datetime) + self.assertNotEqual(person.created, datetime.datetime(2014, 6, 12)) self.assertEqual(person._data['name'], person.name) self.assertEqual(person._data['age'], person.age) @@ -178,8 +224,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) def test_required_values(self): - """Ensure that required field constraints are enforced. - """ + """Ensure that required field constraints are enforced.""" class Person(Document): name = StringField(required=True) age = IntField(required=True) @@ -191,9 +236,9 @@ class FieldTest(unittest.TestCase): self.assertRaises(ValidationError, person.validate) def test_not_required_handles_none_in_update(self): - """Ensure that every fields should accept None if required is False. + """Ensure that every fields should accept None if required is + False. """ - class HandleNoneFields(Document): str_fld = StringField() int_fld = IntField() @@ -219,17 +264,16 @@ class FieldTest(unittest.TestCase): # Retrive data from db and verify it. ret = HandleNoneFields.objects.all()[0] - self.assertEqual(ret.str_fld, None) - self.assertEqual(ret.int_fld, None) - self.assertEqual(ret.flt_fld, None) + self.assertIsNone(ret.str_fld) + self.assertIsNone(ret.int_fld) + self.assertIsNone(ret.flt_fld) - # Return current time if retrived value is None. - self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime)) + self.assertIsNone(ret.comp_dt_fld) def test_not_required_handles_none_from_database(self): - """Ensure that every fields can handle null values from the database. + """Ensure that every field can handle null values from the + database. """ - class HandleNoneFields(Document): str_fld = StringField(required=True) int_fld = IntField(required=True) @@ -242,26 +286,28 @@ class FieldTest(unittest.TestCase): doc.str_fld = u'spam ham egg' doc.int_fld = 42 doc.flt_fld = 4.2 - doc.com_dt_fld = datetime.datetime.utcnow() + doc.comp_dt_fld = datetime.datetime.utcnow() doc.save() - collection = self.db[HandleNoneFields._get_collection_name()] - obj = collection.update({"_id": doc.id}, {"$unset": { - "str_fld": 1, - "int_fld": 1, - "flt_fld": 1, - "comp_dt_fld": 1} + # Unset all the fields + obj = HandleNoneFields._get_collection().update({"_id": doc.id}, { + "$unset": { + "str_fld": 1, + "int_fld": 1, + "flt_fld": 1, + "comp_dt_fld": 1 + } }) # Retrive data from db and verify it. - ret = HandleNoneFields.objects.all()[0] - - self.assertEqual(ret.str_fld, None) - self.assertEqual(ret.int_fld, None) - self.assertEqual(ret.flt_fld, None) - # Return current time if retrived value is None. - self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime)) + ret = HandleNoneFields.objects.first() + self.assertIsNone(ret.str_fld) + self.assertIsNone(ret.int_fld) + self.assertIsNone(ret.flt_fld) + self.assertIsNone(ret.comp_dt_fld) + # Retrieved object shouldn't pass validation when a re-save is + # attempted. self.assertRaises(ValidationError, ret.validate) def test_int_and_float_ne_operator(self): @@ -289,7 +335,8 @@ class FieldTest(unittest.TestCase): self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count()) def test_object_id_validation(self): - """Ensure that invalid values cannot be assigned to string fields. + """Ensure that invalid values cannot be assigned to an + ObjectIdField. """ class Person(Document): name = StringField() @@ -307,8 +354,7 @@ class FieldTest(unittest.TestCase): person.validate() def test_string_validation(self): - """Ensure that invalid values cannot be assigned to string fields. - """ + """Ensure that invalid values cannot be assigned to string fields.""" class Person(Document): name = StringField(max_length=20) userid = StringField(r'[0-9a-z_]+$') @@ -332,8 +378,7 @@ class FieldTest(unittest.TestCase): person.validate() def test_url_validation(self): - """Ensure that URLFields validate urls properly. - """ + """Ensure that URLFields validate urls properly.""" class Link(Document): url = URLField() @@ -344,6 +389,25 @@ class FieldTest(unittest.TestCase): link.url = 'http://www.google.com:8080' link.validate() + def test_unicode_url_validation(self): + """Ensure unicode URLs are validated properly.""" + class Link(Document): + url = URLField() + + link = Link() + link.url = u'http://привет.com' + + # TODO fix URL validation - this *IS* a valid URL + # For now we just want to make sure that the error message is correct + try: + link.validate() + self.assertTrue(False) + except ValidationError as e: + self.assertEqual( + unicode(e), + u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])" + ) + def test_url_scheme_validation(self): """Ensure that URLFields validate urls with specific schemes properly. """ @@ -469,10 +533,25 @@ class FieldTest(unittest.TestCase): person_2 = Person(height='something invalid') self.assertRaises(ValidationError, person_2.validate) - Person.drop_collection() + def test_db_field_validation(self): + """Ensure that db_field doesn't accept invalid values.""" + + # dot in the name + with self.assertRaises(ValueError): + class User(Document): + name = StringField(db_field='user.name') + + # name starting with $ + with self.assertRaises(ValueError): + class User(Document): + name = StringField(db_field='$name') + + # name containing a null character + with self.assertRaises(ValueError): + class User(Document): + name = StringField(db_field='name\0') def test_decimal_comparison(self): - class Person(Document): money = DecimalField() @@ -525,7 +604,8 @@ class FieldTest(unittest.TestCase): self.assertEqual(expected, actual) def test_boolean_validation(self): - """Ensure that invalid values cannot be assigned to boolean fields. + """Ensure that invalid values cannot be assigned to boolean + fields. """ class Person(Document): admin = BooleanField() @@ -565,8 +645,7 @@ class FieldTest(unittest.TestCase): self.assertRaises(ValidationError, person.validate) def test_uuid_field_binary(self): - """Test UUID fields storing as Binary object - """ + """Test UUID fields storing as Binary object.""" class Person(Document): api_key = UUIDField(binary=True) @@ -590,7 +669,8 @@ class FieldTest(unittest.TestCase): self.assertRaises(ValidationError, person.validate) def test_datetime_validation(self): - """Ensure that invalid values cannot be assigned to datetime fields. + """Ensure that invalid values cannot be assigned to datetime + fields. """ class LogEntry(Document): time = DateTimeField() @@ -614,6 +694,32 @@ class FieldTest(unittest.TestCase): log.time = 'ABC' self.assertRaises(ValidationError, log.validate) + def test_date_validation(self): + """Ensure that invalid values cannot be assigned to datetime + fields. + """ + class LogEntry(Document): + time = DateField() + + log = LogEntry() + log.time = datetime.datetime.now() + log.validate() + + log.time = datetime.date.today() + log.validate() + + log.time = datetime.datetime.now().isoformat(' ') + log.validate() + + if dateutil: + log.time = datetime.datetime.now().isoformat('T') + log.validate() + + log.time = -1 + self.assertRaises(ValidationError, log.validate) + log.time = 'ABC' + self.assertRaises(ValidationError, log.validate) + def test_datetime_tz_aware_mark_as_changed(self): from mongoengine import connection @@ -654,8 +760,6 @@ class FieldTest(unittest.TestCase): log.reload() self.assertEqual(log.date.date(), datetime.date.today()) - LogEntry.drop_collection() - # Post UTC - microseconds are rounded (down) nearest millisecond and # dropped d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) @@ -687,8 +791,51 @@ class FieldTest(unittest.TestCase): self.assertNotEqual(log.date, d1) self.assertEqual(log.date, d2) + def test_date(self): + """Tests showing pymongo date fields + + See: http://api.mongodb.org/python/current/api/bson/son.html#dt + """ + class LogEntry(Document): + date = DateField() + LogEntry.drop_collection() + # Test can save dates + log = LogEntry() + log.date = datetime.date.today() + log.save() + log.reload() + self.assertEqual(log.date, datetime.date.today()) + + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) + d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) + log = LogEntry() + log.date = d1 + log.save() + log.reload() + self.assertEqual(log.date, d1.date()) + self.assertEqual(log.date, d2.date()) + + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) + d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) + log.date = d1 + log.save() + log.reload() + self.assertEqual(log.date, d1.date()) + self.assertEqual(log.date, d2.date()) + + if not six.PY3: + # Pre UTC dates microseconds below 1000 are dropped + # This does not seem to be true in PY3 + d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) + d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) + log.date = d1 + log.save() + log.reload() + self.assertEqual(log.date, d1.date()) + self.assertEqual(log.date, d2.date()) + def test_datetime_usage(self): """Tests for regular datetime fields""" class LogEntry(Document): @@ -710,184 +857,92 @@ class FieldTest(unittest.TestCase): log1 = LogEntry.objects.get(date=d1.isoformat('T')) self.assertEqual(log, log1) - LogEntry.drop_collection() - - # create 60 log entries - for i in range(1950, 2010): + # create additional 19 log entries for a total of 20 + for i in range(1971, 1990): d = datetime.datetime(i, 1, 1, 0, 0, 1) LogEntry(date=d).save() - self.assertEqual(LogEntry.objects.count(), 60) + self.assertEqual(LogEntry.objects.count(), 20) # Test ordering logs = LogEntry.objects.order_by("date") - count = logs.count() i = 0 - while i == count - 1: + while i < 19: self.assertTrue(logs[i].date <= logs[i + 1].date) i += 1 logs = LogEntry.objects.order_by("-date") - count = logs.count() i = 0 - while i == count - 1: + while i < 19: self.assertTrue(logs[i].date >= logs[i + 1].date) i += 1 # Test searching logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 30) - - logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 30) - - logs = LogEntry.objects.filter( - date__lte=datetime.datetime(2011, 1, 1), - date__gte=datetime.datetime(2000, 1, 1), - ) self.assertEqual(logs.count(), 10) - LogEntry.drop_collection() + logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) + self.assertEqual(logs.count(), 10) - def test_complexdatetime_storage(self): - """Tests for complex datetime fields - which can handle microseconds - without rounding. - """ + logs = LogEntry.objects.filter( + date__lte=datetime.datetime(1980, 1, 1), + date__gte=datetime.datetime(1975, 1, 1), + ) + self.assertEqual(logs.count(), 5) + + def test_date_usage(self): + """Tests for regular datetime fields""" class LogEntry(Document): - date = ComplexDateTimeField() - date_with_dots = ComplexDateTimeField(separator='.') + date = DateField() LogEntry.drop_collection() - # Post UTC - microseconds are rounded (down) nearest millisecond and - # dropped - with default datetimefields - d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1) log = LogEntry() log.date = d1 + log.validate() log.save() - log.reload() - self.assertEqual(log.date, d1) - # Post UTC - microseconds are rounded (down) nearest millisecond - with - # default datetimefields - d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) - log.date = d1 - log.save() - log.reload() - self.assertEqual(log.date, d1) - - # Pre UTC dates microseconds below 1000 are dropped - with default - # datetimefields - d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) - log.date = d1 - log.save() - log.reload() - self.assertEqual(log.date, d1) - - # Pre UTC microseconds above 1000 is wonky - with default datetimefields - # log.date has an invalid microsecond value so I can't construct - # a date to compare. - for i in range(1001, 3113, 33): - d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) - log.date = d1 - log.save() - log.reload() - self.assertEqual(log.date, d1) - log1 = LogEntry.objects.get(date=d1) + for query in (d1, d1.isoformat(' ')): + log1 = LogEntry.objects.get(date=query) self.assertEqual(log, log1) - # Test string padding - microsecond = map(int, [math.pow(10, x) for x in range(6)]) - mm = dd = hh = ii = ss = [1, 10] + if dateutil: + log1 = LogEntry.objects.get(date=d1.isoformat('T')) + self.assertEqual(log, log1) - for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): - stored = LogEntry(date=datetime.datetime(*values)).to_mongo()['date'] - self.assertTrue(re.match('^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$', stored) is not None) - - # Test separator - stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()['date_with_dots'] - self.assertTrue(re.match('^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$', stored) is not None) - - LogEntry.drop_collection() - - def test_complexdatetime_usage(self): - """Tests for complex datetime fields - which can handle microseconds - without rounding. - """ - class LogEntry(Document): - date = ComplexDateTimeField() - - LogEntry.drop_collection() - - d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) - log = LogEntry() - log.date = d1 - log.save() - - log1 = LogEntry.objects.get(date=d1) - self.assertEqual(log, log1) - - LogEntry.drop_collection() - - # create 60 log entries - for i in range(1950, 2010): - d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) + # create additional 19 log entries for a total of 20 + for i in range(1971, 1990): + d = datetime.datetime(i, 1, 1, 0, 0, 1) LogEntry(date=d).save() - self.assertEqual(LogEntry.objects.count(), 60) + self.assertEqual(LogEntry.objects.count(), 20) # Test ordering logs = LogEntry.objects.order_by("date") - count = logs.count() i = 0 - while i == count - 1: + while i < 19: self.assertTrue(logs[i].date <= logs[i + 1].date) i += 1 logs = LogEntry.objects.order_by("-date") - count = logs.count() i = 0 - while i == count - 1: + while i < 19: self.assertTrue(logs[i].date >= logs[i + 1].date) i += 1 # Test searching logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 30) - - logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 30) - - logs = LogEntry.objects.filter( - date__lte=datetime.datetime(2011, 1, 1), - date__gte=datetime.datetime(2000, 1, 1), - ) self.assertEqual(logs.count(), 10) - LogEntry.drop_collection() - - # Test microsecond-level ordering/filtering - for microsecond in (99, 999, 9999, 10000): - LogEntry(date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)).save() - - logs = list(LogEntry.objects.order_by('date')) - for next_idx, log in enumerate(logs[:-1], start=1): - next_log = logs[next_idx] - self.assertTrue(log.date < next_log.date) - - logs = list(LogEntry.objects.order_by('-date')) - for next_idx, log in enumerate(logs[:-1], start=1): - next_log = logs[next_idx] - self.assertTrue(log.date > next_log.date) - - logs = LogEntry.objects.filter(date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)) - self.assertEqual(logs.count(), 4) - - LogEntry.drop_collection() - def test_list_validation(self): - """Ensure that a list field only accepts lists with valid elements. - """ + """Ensure that a list field only accepts lists with valid elements.""" + AccessLevelChoices = ( + ('a', u'Administration'), + ('b', u'Manager'), + ('c', u'Staff'), + ) + class User(Document): pass @@ -899,7 +954,13 @@ class FieldTest(unittest.TestCase): comments = ListField(EmbeddedDocumentField(Comment)) tags = ListField(StringField()) authors = ListField(ReferenceField(User)) + authors_as_lazy = ListField(LazyReferenceField(User)) generic = ListField(GenericReferenceField()) + generic_as_lazy = ListField(GenericLazyReferenceField()) + access_list = ListField(choices=AccessLevelChoices, display_sep=', ') + + User.drop_collection() + BlogPost.drop_collection() post = BlogPost(content='Went for a walk today...') post.validate() @@ -914,6 +975,17 @@ class FieldTest(unittest.TestCase): post.tags = ('fun', 'leisure') post.validate() + post.access_list = 'a,b' + self.assertRaises(ValidationError, post.validate) + + post.access_list = ['c', 'd'] + self.assertRaises(ValidationError, post.validate) + + post.access_list = ['a', 'b'] + post.validate() + + self.assertEqual(post.get_access_list_display(), u'Administration, Manager') + post.comments = ['a'] self.assertRaises(ValidationError, post.validate) post.comments = 'yay' @@ -934,6 +1006,15 @@ class FieldTest(unittest.TestCase): post.authors = [user] post.validate() + post.authors_as_lazy = [Comment()] + self.assertRaises(ValidationError, post.validate) + + post.authors_as_lazy = [User()] + self.assertRaises(ValidationError, post.validate) + + post.authors_as_lazy = [user] + post.validate() + post.generic = [1, 2] self.assertRaises(ValidationError, post.validate) @@ -946,8 +1027,17 @@ class FieldTest(unittest.TestCase): post.generic = [user] post.validate() - User.drop_collection() - BlogPost.drop_collection() + post.generic_as_lazy = [1, 2] + self.assertRaises(ValidationError, post.validate) + + post.generic_as_lazy = [User(), Comment()] + self.assertRaises(ValidationError, post.validate) + + post.generic_as_lazy = [Comment()] + self.assertRaises(ValidationError, post.validate) + + post.generic_as_lazy = [user] + post.validate() def test_sorted_list_sorting(self): """Ensure that a sorted list field properly sorts values. @@ -962,6 +1052,8 @@ class FieldTest(unittest.TestCase): ordering='order') tags = SortedListField(StringField()) + BlogPost.drop_collection() + post = BlogPost(content='Went for a walk today...') post.save() @@ -986,8 +1078,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(post.comments[0].content, comment1.content) self.assertEqual(post.comments[1].content, comment2.content) - BlogPost.drop_collection() - def test_reverse_list_sorting(self): """Ensure that a reverse sorted list field properly sorts values""" @@ -1000,6 +1090,8 @@ class FieldTest(unittest.TestCase): ordering='count', reverse=True) name = StringField() + CategoryList.drop_collection() + catlist = CategoryList(name="Top categories") cat1 = Category(name='posts', count=10) cat2 = Category(name='food', count=100) @@ -1012,11 +1104,8 @@ class FieldTest(unittest.TestCase): self.assertEqual(catlist.categories[1].name, cat3.name) self.assertEqual(catlist.categories[2].name, cat1.name) - CategoryList.drop_collection() - def test_list_field(self): - """Ensure that list types work as expected. - """ + """Ensure that list types work as expected.""" class BlogPost(Document): info = ListField() @@ -1052,6 +1141,7 @@ class FieldTest(unittest.TestCase): self.assertEqual( BlogPost.objects.filter(info__100__test__exact='test').count(), 0) + # test queries by list post = BlogPost() post.info = ['1', '2'] post.save() @@ -1063,11 +1153,275 @@ class FieldTest(unittest.TestCase): post.info *= 2 post.save() self.assertEqual(BlogPost.objects(info=['1', '2', '3', '4', '1', '2', '3', '4']).count(), 1) + + def test_list_field_manipulative_operators(self): + """Ensure that ListField works with standard list operators that manipulate the list. + """ + class BlogPost(Document): + ref = StringField() + info = ListField(StringField()) + BlogPost.drop_collection() - def test_list_assignment(self): - """Ensure that list field element assignment and slicing work + post = BlogPost() + post.ref = "1234" + post.info = ['0', '1', '2', '3', '4', '5'] + post.save() + + def reset_post(): + post.info = ['0', '1', '2', '3', '4', '5'] + post.save() + + # '__add__(listB)' + # listA+listB + # operator.add(listA, listB) + reset_post() + temp = ['a', 'b'] + post.info = post.info + temp + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + + # '__delitem__(index)' + # aka 'del list[index]' + # aka 'operator.delitem(list, index)' + reset_post() + del post.info[2] # del from middle ('2') + self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + + # '__delitem__(slice(i, j))' + # aka 'del list[i:j]' + # aka 'operator.delitem(list, slice(i,j))' + reset_post() + del post.info[1:3] # removes '1', '2' + self.assertEqual(post.info, ['0', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '3', '4', '5']) + + # '__iadd__' + # aka 'list += list' + reset_post() + temp = ['a', 'b'] + post.info += temp + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + + # '__imul__' + # aka 'list *= number' + reset_post() + post.info *= 2 + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + + # '__mul__' + # aka 'listA*listB' + reset_post() + post.info = post.info * 2 + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + + # '__rmul__' + # aka 'listB*listA' + reset_post() + post.info = 2 * post.info + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + + # '__setitem__(index, value)' + # aka 'list[index]=value' + # aka 'setitem(list, value)' + reset_post() + post.info[4] = 'a' + self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + + # __setitem__(index, value) with a negative index + reset_post() + post.info[-2] = 'a' + self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + + # '__setitem__(slice(i, j), listB)' + # aka 'listA[i:j] = listB' + # aka 'setitem(listA, slice(i, j), listB)' + reset_post() + post.info[1:3] = ['h', 'e', 'l', 'l', 'o'] + self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + + # '__setitem__(slice(i, j), listB)' with negative i and j + reset_post() + post.info[-5:-3] = ['h', 'e', 'l', 'l', 'o'] + self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + + # negative + + # 'append' + reset_post() + post.info.append('h') + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h']) + + # 'extend' + reset_post() + post.info.extend(['h', 'e', 'l', 'l', 'o']) + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h', 'e', 'l', 'l', 'o']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h', 'e', 'l', 'l', 'o']) + # 'insert' + + # 'pop' + reset_post() + x = post.info.pop(2) + y = post.info.pop() + self.assertEqual(post.info, ['0', '1', '3', '4']) + self.assertEqual(x, '2') + self.assertEqual(y, '5') + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '3', '4']) + + # 'remove' + reset_post() + post.info.remove('2') + self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + + # 'reverse' + reset_post() + post.info.reverse() + self.assertEqual(post.info, ['5', '4', '3', '2', '1', '0']) + post.save() + post.reload() + self.assertEqual(post.info, ['5', '4', '3', '2', '1', '0']) + + # 'sort': though this operator method does manipulate the list, it is + # tested in the 'test_list_field_lexicograpic_operators' function + + def test_list_field_invalid_operators(self): + class BlogPost(Document): + ref = StringField() + info = ListField(StringField()) + + post = BlogPost() + post.ref = "1234" + post.info = ['0', '1', '2', '3', '4', '5'] + + # '__hash__' + # aka 'hash(list)' + self.assertRaises(TypeError, lambda: hash(post.info)) + + def test_list_field_lexicographic_operators(self): + """Ensure that ListField works with standard list operators that + do lexigraphic ordering. """ + class BlogPost(Document): + ref = StringField() + text_info = ListField(StringField()) + oid_info = ListField(ObjectIdField()) + bool_info = ListField(BooleanField()) + + BlogPost.drop_collection() + + blogSmall = BlogPost(ref="small") + blogSmall.text_info = ["a", "a", "a"] + blogSmall.bool_info = [False, False] + blogSmall.save() + blogSmall.reload() + + blogLargeA = BlogPost(ref="big") + blogLargeA.text_info = ["a", "z", "j"] + blogLargeA.bool_info = [False, True] + blogLargeA.save() + blogLargeA.reload() + + blogLargeB = BlogPost(ref="big2") + blogLargeB.text_info = ["a", "z", "j"] + blogLargeB.oid_info = [ + "54495ad94c934721ede76f90", + "54495ad94c934721ede76d23", + "54495ad94c934721ede76d00" + ] + blogLargeB.bool_info = [False, True] + blogLargeB.save() + blogLargeB.reload() + + # '__eq__' aka '==' + self.assertEqual(blogLargeA.text_info, blogLargeB.text_info) + self.assertEqual(blogLargeA.bool_info, blogLargeB.bool_info) + + # '__ge__' aka '>=' + self.assertGreaterEqual(blogLargeA.text_info, blogSmall.text_info) + self.assertGreaterEqual(blogLargeA.text_info, blogLargeB.text_info) + self.assertGreaterEqual(blogLargeA.bool_info, blogSmall.bool_info) + self.assertGreaterEqual(blogLargeA.bool_info, blogLargeB.bool_info) + + # '__gt__' aka '>' + self.assertGreaterEqual(blogLargeA.text_info, blogSmall.text_info) + self.assertGreaterEqual(blogLargeA.bool_info, blogSmall.bool_info) + + # '__le__' aka '<=' + self.assertLessEqual(blogSmall.text_info, blogLargeB.text_info) + self.assertLessEqual(blogLargeA.text_info, blogLargeB.text_info) + self.assertLessEqual(blogSmall.bool_info, blogLargeB.bool_info) + self.assertLessEqual(blogLargeA.bool_info, blogLargeB.bool_info) + + # '__lt__' aka '<' + self.assertLess(blogSmall.text_info, blogLargeB.text_info) + self.assertLess(blogSmall.bool_info, blogLargeB.bool_info) + + # '__ne__' aka '!=' + self.assertNotEqual(blogSmall.text_info, blogLargeB.text_info) + self.assertNotEqual(blogSmall.bool_info, blogLargeB.bool_info) + + # 'sort' + blogLargeB.bool_info = [True, False, True, False] + blogLargeB.text_info.sort() + blogLargeB.oid_info.sort() + blogLargeB.bool_info.sort() + sorted_target_list = [ + ObjectId("54495ad94c934721ede76d00"), + ObjectId("54495ad94c934721ede76d23"), + ObjectId("54495ad94c934721ede76f90") + ] + self.assertEqual(blogLargeB.text_info, ["a", "j", "z"]) + self.assertEqual(blogLargeB.oid_info, sorted_target_list) + self.assertEqual(blogLargeB.bool_info, [False, False, True, True]) + blogLargeB.save() + blogLargeB.reload() + self.assertEqual(blogLargeB.text_info, ["a", "j", "z"]) + self.assertEqual(blogLargeB.oid_info, sorted_target_list) + self.assertEqual(blogLargeB.bool_info, [False, False, True, True]) + + def test_list_assignment(self): + """Ensure that list field element assignment and slicing work.""" class BlogPost(Document): info = ListField() @@ -1112,7 +1466,6 @@ class FieldTest(unittest.TestCase): post.reload() self.assertEqual(post.info, [1, 2, 3, 4, 'n5']) - def test_list_field_passed_in_value(self): class Foo(Document): bars = ListField(ReferenceField("Bar")) @@ -1128,8 +1481,9 @@ class FieldTest(unittest.TestCase): self.assertEqual(repr(foo.bars), '[]') def test_list_field_strict(self): - """Ensure that list field handles validation if provided a strict field type.""" - + """Ensure that list field handles validation if provided + a strict field type. + """ class Simple(Document): mapping = ListField(field=IntField()) @@ -1144,30 +1498,26 @@ class FieldTest(unittest.TestCase): e.mapping = ["abc"] e.save() - Simple.drop_collection() - def test_list_field_rejects_strings(self): - """Strings aren't valid list field data types""" - + """Strings aren't valid list field data types.""" class Simple(Document): mapping = ListField() Simple.drop_collection() + e = Simple() e.mapping = 'hello world' - self.assertRaises(ValidationError, e.save) def test_complex_field_required(self): - """Ensure required cant be None / Empty""" - + """Ensure required cant be None / Empty.""" class Simple(Document): mapping = ListField(required=True) Simple.drop_collection() + e = Simple() e.mapping = [] - self.assertRaises(ValidationError, e.save) class Simple(Document): @@ -1176,18 +1526,17 @@ class FieldTest(unittest.TestCase): Simple.drop_collection() e = Simple() e.mapping = {} - self.assertRaises(ValidationError, e.save) def test_complex_field_same_value_not_changed(self): - """ - If a complex field is set to the same value, it should not be marked as - changed. + """If a complex field is set to the same value, it should not + be marked as changed. """ class Simple(Document): mapping = ListField() Simple.drop_collection() + e = Simple().save() e.mapping = [] self.assertEqual([], e._changed_fields) @@ -1196,12 +1545,12 @@ class FieldTest(unittest.TestCase): mapping = DictField() Simple.drop_collection() + e = Simple().save() e.mapping = {} self.assertEqual([], e._changed_fields) def test_slice_marks_field_as_changed(self): - class Simple(Document): widgets = ListField() @@ -1214,7 +1563,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(simple.widgets, [4]) def test_del_slice_marks_field_as_changed(self): - class Simple(Document): widgets = ListField() @@ -1227,7 +1575,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(simple.widgets, [4]) def test_list_field_with_negative_indices(self): - class Simple(Document): widgets = ListField() @@ -1241,7 +1588,6 @@ class FieldTest(unittest.TestCase): def test_list_field_complex(self): """Ensure that the list fields can handle the complex types.""" - class SettingBase(EmbeddedDocument): meta = {'allow_inheritance': True} @@ -1255,6 +1601,7 @@ class FieldTest(unittest.TestCase): mapping = ListField() Simple.drop_collection() + e = Simple() e.mapping.append(StringSetting(value='foo')) e.mapping.append(IntegerSetting(value=42)) @@ -1265,8 +1612,8 @@ class FieldTest(unittest.TestCase): e.save() e2 = Simple.objects.get(id=e.id) - self.assertTrue(isinstance(e2.mapping[0], StringSetting)) - self.assertTrue(isinstance(e2.mapping[1], IntegerSetting)) + self.assertIsInstance(e2.mapping[0], StringSetting) + self.assertIsInstance(e2.mapping[1], IntegerSetting) # Test querying self.assertEqual( @@ -1292,11 +1639,8 @@ class FieldTest(unittest.TestCase): self.assertEqual( Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1) - Simple.drop_collection() - def test_dict_field(self): - """Ensure that dict types work as expected. - """ + """Ensure that dict types work as expected.""" class BlogPost(Document): info = DictField() @@ -1358,11 +1702,8 @@ class FieldTest(unittest.TestCase): post.reload() self.assertEqual([], post.info['authors']) - BlogPost.drop_collection() - def test_dictfield_dump_document(self): - """Ensure a DictField can handle another document's dump - """ + """Ensure a DictField can handle another document's dump.""" class Doc(Document): field = DictField() @@ -1400,7 +1741,6 @@ class FieldTest(unittest.TestCase): def test_dictfield_strict(self): """Ensure that dict field handles validation if provided a strict field type.""" - class Simple(Document): mapping = DictField(field=IntField()) @@ -1415,11 +1755,8 @@ class FieldTest(unittest.TestCase): e.mapping['somestring'] = "abc" e.save() - Simple.drop_collection() - def test_dictfield_complex(self): """Ensure that the dict field can handle the complex types.""" - class SettingBase(EmbeddedDocument): meta = {'allow_inheritance': True} @@ -1433,6 +1770,7 @@ class FieldTest(unittest.TestCase): mapping = DictField() Simple.drop_collection() + e = Simple() e.mapping['somestring'] = StringSetting(value='foo') e.mapping['someint'] = IntegerSetting(value=42) @@ -1444,8 +1782,8 @@ class FieldTest(unittest.TestCase): e.save() e2 = Simple.objects.get(id=e.id) - self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting)) - self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) + self.assertIsInstance(e2.mapping['somestring'], StringSetting) + self.assertIsInstance(e2.mapping['someint'], IntegerSetting) # Test querying self.assertEqual( @@ -1469,11 +1807,8 @@ class FieldTest(unittest.TestCase): self.assertEqual( Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1) - Simple.drop_collection() - def test_atomic_update_dict_field(self): """Ensure that the entire DictField can be atomically updated.""" - class Simple(Document): mapping = DictField(field=ListField(IntField(required=True))) @@ -1491,11 +1826,8 @@ class FieldTest(unittest.TestCase): with self.assertRaises(ValueError): e.update(set__mapping={"somestrings": ["foo", "bar", ]}) - Simple.drop_collection() - def test_mapfield(self): """Ensure that the MapField handles the declared type.""" - class Simple(Document): mapping = MapField(IntField()) @@ -1513,11 +1845,8 @@ class FieldTest(unittest.TestCase): class NoDeclaredType(Document): mapping = MapField() - Simple.drop_collection() - def test_complex_mapfield(self): """Ensure that the MapField can handle complex declared types.""" - class SettingBase(EmbeddedDocument): meta = {"allow_inheritance": True} @@ -1538,15 +1867,14 @@ class FieldTest(unittest.TestCase): e.save() e2 = Extensible.objects.get(id=e.id) - self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting)) - self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) + self.assertIsInstance(e2.mapping['somestring'], StringSetting) + self.assertIsInstance(e2.mapping['someint'], IntegerSetting) with self.assertRaises(ValidationError): e.mapping['someint'] = 123 e.save() def test_embedded_mapfield_db_field(self): - class Embedded(EmbeddedDocument): number = IntField(default=0, db_field='i') @@ -1583,11 +1911,10 @@ class FieldTest(unittest.TestCase): test.my_map['1'].name = 'test updated' test.save() - Test.drop_collection() - def test_map_field_lookup(self): - """Ensure MapField lookups succeed on Fields without a lookup method""" - + """Ensure MapField lookups succeed on Fields without a lookup + method. + """ class Action(EmbeddedDocument): operation = StringField() object = StringField() @@ -1609,7 +1936,6 @@ class FieldTest(unittest.TestCase): actions__friends__object='beer').count()) def test_map_field_unicode(self): - class Info(EmbeddedDocument): description = StringField() value_list = ListField(field=StringField()) @@ -1627,12 +1953,12 @@ class FieldTest(unittest.TestCase): tree.save() - self.assertEqual(BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description, u"VALUE: éééé") - - BlogPost.drop_collection() + self.assertEqual( + BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description, + u"VALUE: éééé" + ) def test_embedded_db_field(self): - class Embedded(EmbeddedDocument): number = IntField(default=0, db_field='i') @@ -1652,6 +1978,60 @@ class FieldTest(unittest.TestCase): doc = self.db.test.find_one() self.assertEqual(doc['x']['i'], 2) + def test_double_embedded_db_field(self): + """Make sure multiple layers of embedded docs resolve db fields + properly and can be initialized using dicts. + """ + class C(EmbeddedDocument): + txt = StringField() + + class B(EmbeddedDocument): + c = EmbeddedDocumentField(C, db_field='fc') + + class A(Document): + b = EmbeddedDocumentField(B, db_field='fb') + + a = A( + b=B( + c=C(txt='hi') + ) + ) + a.validate() + + a = A(b={'c': {'txt': 'hi'}}) + a.validate() + + def test_double_embedded_db_field_from_son(self): + """Make sure multiple layers of embedded docs resolve db fields + from SON properly. + """ + class C(EmbeddedDocument): + txt = StringField() + + class B(EmbeddedDocument): + c = EmbeddedDocumentField(C, db_field='fc') + + class A(Document): + b = EmbeddedDocumentField(B, db_field='fb') + + a = A._from_son(SON([ + ('fb', SON([ + ('fc', SON([ + ('txt', 'hi') + ])) + ])) + ])) + self.assertEqual(a.b.c.txt, 'hi') + + def test_embedded_document_field_cant_reference_using_a_str_if_it_does_not_exist_yet(self): + raise SkipTest("Using a string reference in an EmbeddedDocumentField does not work if the class isnt registerd yet") + + class MyDoc2(Document): + emb = EmbeddedDocumentField('MyDoc') + + class MyDoc(EmbeddedDocument): + name = StringField() + def test_embedded_document_validation(self): """Ensure that invalid embedded documents cannot be assigned to embedded document fields. @@ -1686,8 +2066,8 @@ class FieldTest(unittest.TestCase): person.validate() def test_embedded_document_inheritance(self): - """Ensure that subclasses of embedded documents may be provided to - EmbeddedDocumentFields of the superclass' type. + """Ensure that subclasses of embedded documents may be provided + to EmbeddedDocumentFields of the superclass' type. """ class User(EmbeddedDocument): name = StringField() @@ -1713,7 +2093,6 @@ class FieldTest(unittest.TestCase): """Ensure that nested list of subclassed embedded documents is handled correctly. """ - class Group(EmbeddedDocument): name = StringField() content = ListField(StringField()) @@ -1735,9 +2114,9 @@ class FieldTest(unittest.TestCase): self.assertEqual(content, User.objects.first().groups[0].content) def test_reference_miss(self): - """Ensure an exception is raised when dereferencing unknow document + """Ensure an exception is raised when dereferencing an unknown + document. """ - class Foo(Document): pass @@ -1766,8 +2145,8 @@ class FieldTest(unittest.TestCase): self.assertEqual(bar.generic_ref, {'_ref': expected, '_cls': 'Foo'}) def test_reference_validation(self): - """Ensure that invalid docment objects cannot be assigned to reference - fields. + """Ensure that invalid document objects cannot be assigned to + reference fields. """ class User(Document): name = StringField() @@ -1779,6 +2158,8 @@ class FieldTest(unittest.TestCase): User.drop_collection() BlogPost.drop_collection() + # Make sure ReferenceField only accepts a document class or a string + # with a document class name. self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument) user = User(name='Test User') @@ -1793,19 +2174,38 @@ class FieldTest(unittest.TestCase): post1.author = post2 self.assertRaises(ValidationError, post1.validate) + # Ensure ObjectID's are accepted as references + user_object_id = user.pk + post3 = BlogPost(content="Chips and curry sauce taste good.") + post3.author = user_object_id + post3.save() + + # Make sure referencing a saved document of the right type works user.save() post1.author = user post1.save() + # Make sure referencing a saved document of the *wrong* type fails post2.save() post1.author = post2 self.assertRaises(ValidationError, post1.validate) - User.drop_collection() - BlogPost.drop_collection() + def test_objectid_reference_fields(self): + """Make sure storing Object ID references works.""" + class Person(Document): + name = StringField() + parent = ReferenceField('self') + + Person.drop_collection() + + p1 = Person(name="John").save() + Person(name="Ross", parent=p1.pk).save() + + p = Person.objects.get(name="Ross") + self.assertEqual(p.parent, p1) def test_dbref_reference_fields(self): - + """Make sure storing references as bson.dbref.DBRef works.""" class Person(Document): name = StringField() parent = ReferenceField('self', dbref=True) @@ -1815,410 +2215,31 @@ class FieldTest(unittest.TestCase): p1 = Person(name="John").save() Person(name="Ross", parent=p1).save() - col = Person._get_collection() - data = col.find_one({'name': 'Ross'}) - self.assertEqual(data['parent'], DBRef('person', p1.pk)) + self.assertEqual( + Person._get_collection().find_one({'name': 'Ross'})['parent'], + DBRef('person', p1.pk) + ) p = Person.objects.get(name="Ross") self.assertEqual(p.parent, p1) def test_dbref_to_mongo(self): + """Make sure that calling to_mongo on a ReferenceField which + has dbref=False, but actually actually contains a DBRef returns + an ID of that DBRef. + """ class Person(Document): name = StringField() parent = ReferenceField('self', dbref=False) - p1 = Person._from_son({'name': "Yakxxx", - 'parent': "50a234ea469ac1eda42d347d"}) - mongoed = p1.to_mongo() - self.assertTrue(isinstance(mongoed['parent'], ObjectId)) - - def test_cached_reference_field_get_and_save(self): - """ - Tests #1047: CachedReferenceField creates DBRefs on to_python, but can't save them on to_mongo - """ - class Animal(Document): - name = StringField() - tag = StringField() - - class Ocorrence(Document): - person = StringField() - animal = CachedReferenceField(Animal) - - Animal.drop_collection() - Ocorrence.drop_collection() - - Ocorrence(person="testte", - animal=Animal(name="Leopard", tag="heavy").save()).save() - p = Ocorrence.objects.get() - p.person = 'new_testte' - p.save() - - def test_cached_reference_fields(self): - class Animal(Document): - name = StringField() - tag = StringField() - - class Ocorrence(Document): - person = StringField() - animal = CachedReferenceField( - Animal, fields=['tag']) - - Animal.drop_collection() - Ocorrence.drop_collection() - - a = Animal(name="Leopard", tag="heavy") - a.save() - - self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal]) - o = Ocorrence(person="teste", animal=a) - o.save() - - p = Ocorrence(person="Wilson") - p.save() - - self.assertEqual(Ocorrence.objects(animal=None).count(), 1) - - self.assertEqual( - a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk}) - - self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') - - # counts - Ocorrence(person="teste 2").save() - Ocorrence(person="teste 3").save() - - count = Ocorrence.objects(animal__tag='heavy').count() - self.assertEqual(count, 1) - - ocorrence = Ocorrence.objects(animal__tag='heavy').first() - self.assertEqual(ocorrence.person, "teste") - self.assertTrue(isinstance(ocorrence.animal, Animal)) - - def test_cached_reference_field_decimal(self): - class PersonAuto(Document): - name = StringField() - salary = DecimalField() - - class SocialTest(Document): - group = StringField() - person = CachedReferenceField( - PersonAuto, - fields=('salary',)) - - PersonAuto.drop_collection() - SocialTest.drop_collection() - - p = PersonAuto(name="Alberto", salary=Decimal('7000.00')) - p.save() - - s = SocialTest(group="dev", person=p) - s.save() - - self.assertEqual( - SocialTest.objects._collection.find_one({'person.salary': 7000.00}), { - '_id': s.pk, - 'group': s.group, - 'person': { - '_id': p.pk, - 'salary': 7000.00 - } - }) - - def test_cached_reference_field_reference(self): - class Group(Document): - name = StringField() - - class Person(Document): - name = StringField() - group = ReferenceField(Group) - - class SocialData(Document): - obs = StringField() - tags = ListField( - StringField()) - person = CachedReferenceField( - Person, - fields=('group',)) - - Group.drop_collection() - Person.drop_collection() - SocialData.drop_collection() - - g1 = Group(name='dev') - g1.save() - - g2 = Group(name="designers") - g2.save() - - p1 = Person(name="Alberto", group=g1) - p1.save() - - p2 = Person(name="Andre", group=g1) - p2.save() - - p3 = Person(name="Afro design", group=g2) - p3.save() - - s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2']) - s1.save() - - s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4']) - s2.save() - - self.assertEqual(SocialData.objects._collection.find_one( - {'tags': 'tag2'}), { - '_id': s1.pk, - 'obs': 'testing 123', - 'tags': ['tag1', 'tag2'], - 'person': { - '_id': p1.pk, - 'group': g1.pk - } - }) - - self.assertEqual(SocialData.objects(person__group=g2).count(), 1) - self.assertEqual(SocialData.objects(person__group=g2).first(), s2) - - def test_cached_reference_field_update_all(self): - class Person(Document): - TYPES = ( - ('pf', "PF"), - ('pj', "PJ") - ) - name = StringField() - tp = StringField( - choices=TYPES - ) - - father = CachedReferenceField('self', fields=('tp',)) - - Person.drop_collection() - - a1 = Person(name="Wilson Father", tp="pj") - a1.save() - - a2 = Person(name='Wilson Junior', tp='pf', father=a1) - a2.save() - - self.assertEqual(dict(a2.to_mongo()), { - "_id": a2.pk, - "name": u"Wilson Junior", - "tp": u"pf", - "father": { - "_id": a1.pk, - "tp": u"pj" - } - }) - - self.assertEqual(Person.objects(father=a1)._query, { - 'father._id': a1.pk - }) - self.assertEqual(Person.objects(father=a1).count(), 1) - - Person.objects.update(set__tp="pf") - Person.father.sync_all() - - a2.reload() - self.assertEqual(dict(a2.to_mongo()), { - "_id": a2.pk, - "name": u"Wilson Junior", - "tp": u"pf", - "father": { - "_id": a1.pk, - "tp": u"pf" - } - }) - - def test_cached_reference_fields_on_embedded_documents(self): - with self.assertRaises(InvalidDocumentError): - class Test(Document): - name = StringField() - - type('WrongEmbeddedDocument', ( - EmbeddedDocument,), { - 'test': CachedReferenceField(Test) - }) - - def test_cached_reference_auto_sync(self): - class Person(Document): - TYPES = ( - ('pf', "PF"), - ('pj', "PJ") - ) - name = StringField() - tp = StringField( - choices=TYPES - ) - - father = CachedReferenceField('self', fields=('tp',)) - - Person.drop_collection() - - a1 = Person(name="Wilson Father", tp="pj") - a1.save() - - a2 = Person(name='Wilson Junior', tp='pf', father=a1) - a2.save() - - a1.tp = 'pf' - a1.save() - - a2.reload() - self.assertEqual(dict(a2.to_mongo()), { - '_id': a2.pk, - 'name': 'Wilson Junior', - 'tp': 'pf', - 'father': { - '_id': a1.pk, - 'tp': 'pf' - } - }) - - def test_cached_reference_auto_sync_disabled(self): - class Persone(Document): - TYPES = ( - ('pf', "PF"), - ('pj', "PJ") - ) - name = StringField() - tp = StringField( - choices=TYPES - ) - - father = CachedReferenceField( - 'self', fields=('tp',), auto_sync=False) - - Persone.drop_collection() - - a1 = Persone(name="Wilson Father", tp="pj") - a1.save() - - a2 = Persone(name='Wilson Junior', tp='pf', father=a1) - a2.save() - - a1.tp = 'pf' - a1.save() - - self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), { - '_id': a2.pk, - 'name': 'Wilson Junior', - 'tp': 'pf', - 'father': { - '_id': a1.pk, - 'tp': 'pj' - } - }) - - def test_cached_reference_embedded_fields(self): - class Owner(EmbeddedDocument): - TPS = ( - ('n', "Normal"), - ('u', "Urgent") - ) - name = StringField() - tp = StringField( - verbose_name="Type", - db_field="t", - choices=TPS) - - class Animal(Document): - name = StringField() - tag = StringField() - - owner = EmbeddedDocumentField(Owner) - - class Ocorrence(Document): - person = StringField() - animal = CachedReferenceField( - Animal, fields=['tag', 'owner.tp']) - - Animal.drop_collection() - Ocorrence.drop_collection() - - a = Animal(name="Leopard", tag="heavy", - owner=Owner(tp='u', name="Wilson Júnior") - ) - a.save() - - o = Ocorrence(person="teste", animal=a) - o.save() - self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), { - '_id': a.pk, - 'tag': 'heavy', - 'owner': { - 't': 'u' - } - }) - self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') - self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u') - - # counts - Ocorrence(person="teste 2").save() - Ocorrence(person="teste 3").save() - - count = Ocorrence.objects( - animal__tag='heavy', animal__owner__tp='u').count() - self.assertEqual(count, 1) - - ocorrence = Ocorrence.objects( - animal__tag='heavy', - animal__owner__tp='u').first() - self.assertEqual(ocorrence.person, "teste") - self.assertTrue(isinstance(ocorrence.animal, Animal)) - - def test_cached_reference_embedded_list_fields(self): - class Owner(EmbeddedDocument): - name = StringField() - tags = ListField(StringField()) - - class Animal(Document): - name = StringField() - tag = StringField() - - owner = EmbeddedDocumentField(Owner) - - class Ocorrence(Document): - person = StringField() - animal = CachedReferenceField( - Animal, fields=['tag', 'owner.tags']) - - Animal.drop_collection() - Ocorrence.drop_collection() - - a = Animal(name="Leopard", tag="heavy", - owner=Owner(tags=['cool', 'funny'], - name="Wilson Júnior") - ) - a.save() - - o = Ocorrence(person="teste 2", animal=a) - o.save() - self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), { - '_id': a.pk, - 'tag': 'heavy', - 'owner': { - 'tags': ['cool', 'funny'] - } - }) - - self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') - self.assertEqual(o.to_mongo()['animal']['owner']['tags'], - ['cool', 'funny']) - - # counts - Ocorrence(person="teste 2").save() - Ocorrence(person="teste 3").save() - - query = Ocorrence.objects( - animal__tag='heavy', animal__owner__tags='cool')._query - self.assertEqual( - query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'}) - - ocorrence = Ocorrence.objects( - animal__tag='heavy', - animal__owner__tags='cool').first() - self.assertEqual(ocorrence.person, "teste 2") - self.assertTrue(isinstance(ocorrence.animal, Animal)) + p = Person( + name='Steve', + parent=DBRef('person', 'abcdefghijklmnop') + ) + self.assertEqual(p.to_mongo(), SON([ + ('name', u'Steve'), + ('parent', 'abcdefghijklmnop') + ])) def test_objectid_reference_fields(self): @@ -2263,9 +2284,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(group_obj.members[0].name, user1.name) self.assertEqual(group_obj.members[1].name, user2.name) - User.drop_collection() - Group.drop_collection() - def test_recursive_reference(self): """Ensure that ReferenceFields can reference their own documents. """ @@ -2275,6 +2293,7 @@ class FieldTest(unittest.TestCase): friends = ListField(ReferenceField('self')) Employee.drop_collection() + bill = Employee(name='Bill Lumbergh') bill.save() @@ -2304,8 +2323,8 @@ class FieldTest(unittest.TestCase): children = ListField(EmbeddedDocumentField('TreeNode')) Tree.drop_collection() - tree = Tree(name="Tree") + tree = Tree(name="Tree") first_child = TreeNode(name="Child 1") tree.children.append(first_child) @@ -2418,9 +2437,6 @@ class FieldTest(unittest.TestCase): post = BlogPost.objects(author=m2).first() self.assertEqual(post.id, post2.id) - Member.drop_collection() - BlogPost.drop_collection() - def test_reference_query_conversion_dbref(self): """Ensure that ReferenceFields can be queried using objects and values of the type of the primary key of the referenced object. @@ -2452,9 +2468,6 @@ class FieldTest(unittest.TestCase): post = BlogPost.objects(author=m2).first() self.assertEqual(post.id, post2.id) - Member.drop_collection() - BlogPost.drop_collection() - def test_drop_abstract_document(self): """Ensure that an abstract document cannot be dropped given it has no underlying collection. @@ -2488,9 +2501,6 @@ class FieldTest(unittest.TestCase): self.assertEquals(Brother.objects[0].sibling.name, sister.name) - Sister.drop_collection() - Brother.drop_collection() - def test_reference_abstract_class(self): """Ensure that an abstract class instance cannot be used in the reference of that abstract class. @@ -2512,9 +2522,6 @@ class FieldTest(unittest.TestCase): brother = Brother(name="Bob", sibling=sister) self.assertRaises(ValidationError, brother.save) - Sister.drop_collection() - Brother.drop_collection() - def test_abstract_reference_base_type(self): """Ensure that an an abstract reference fails validation when given a Document that does not inherit from the abstract type. @@ -2537,9 +2544,6 @@ class FieldTest(unittest.TestCase): brother = Brother(name="Bob", sibling=mother) self.assertRaises(ValidationError, brother.save) - Brother.drop_collection() - Mother.drop_collection() - def test_generic_reference(self): """Ensure that a GenericReferenceField properly dereferences items. """ @@ -2569,7 +2573,7 @@ class FieldTest(unittest.TestCase): bm = Bookmark.objects(bookmark_object=post_1).first() self.assertEqual(bm.bookmark_object, post_1) - self.assertTrue(isinstance(bm.bookmark_object, Post)) + self.assertIsInstance(bm.bookmark_object, Post) bm.bookmark_object = link_1 bm.save() @@ -2577,11 +2581,7 @@ class FieldTest(unittest.TestCase): bm = Bookmark.objects(bookmark_object=link_1).first() self.assertEqual(bm.bookmark_object, link_1) - self.assertTrue(isinstance(bm.bookmark_object, Link)) - - Link.drop_collection() - Post.drop_collection() - Bookmark.drop_collection() + self.assertIsInstance(bm.bookmark_object, Link) def test_generic_reference_list(self): """Ensure that a ListField properly dereferences generic references. @@ -2613,10 +2613,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(user.bookmarks[0], post_1) self.assertEqual(user.bookmarks[1], link_1) - Link.drop_collection() - Post.drop_collection() - User.drop_collection() - def test_generic_reference_document_not_registered(self): """Ensure dereferencing out of the document registry throws a `NotRegistered` error. @@ -2647,9 +2643,6 @@ class FieldTest(unittest.TestCase): except NotRegistered: pass - Link.drop_collection() - User.drop_collection() - def test_generic_reference_is_none(self): class Person(Document): @@ -2657,14 +2650,13 @@ class FieldTest(unittest.TestCase): city = GenericReferenceField() Person.drop_collection() - Person(name="Wilson Jr").save() + Person(name="Wilson Jr").save() self.assertEqual(repr(Person.objects(city=None)), "[]") def test_generic_reference_choices(self): - """Ensure that a GenericReferenceField can handle choices - """ + """Ensure that a GenericReferenceField can handle choices.""" class Link(Document): title = StringField() @@ -2781,10 +2773,6 @@ class FieldTest(unittest.TestCase): user = User.objects.first() self.assertEqual(user.bookmarks, [post_1]) - Link.drop_collection() - Post.drop_collection() - User.drop_collection() - def test_generic_reference_list_item_modification(self): """Ensure that modifications of related documents (through generic reference) don't influence on querying """ @@ -2813,9 +2801,6 @@ class FieldTest(unittest.TestCase): self.assertNotEqual(user, None) self.assertEqual(user.bookmarks[0], post_1) - Post.drop_collection() - User.drop_collection() - def test_generic_reference_filter_by_dbref(self): """Ensure we can search for a specific generic reference by providing its ObjectId. @@ -2843,7 +2828,7 @@ class FieldTest(unittest.TestCase): doc1 = Doc.objects.create() doc2 = Doc.objects.create(ref=doc1) - self.assertTrue(isinstance(doc1.pk, ObjectId)) + self.assertIsInstance(doc1.pk, ObjectId) doc = Doc.objects.get(ref=doc1.pk) self.assertEqual(doc, doc2) @@ -2867,8 +2852,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(MIME_TYPE, attachment_1.content_type) self.assertEqual(BLOB, six.binary_type(attachment_1.blob)) - Attachment.drop_collection() - def test_binary_validation(self): """Ensure that invalid values cannot be assigned to binary fields. """ @@ -2901,12 +2884,7 @@ class FieldTest(unittest.TestCase): attachment_size_limit.blob = six.b('\xe6\x00\xc4\xff') attachment_size_limit.validate() - Attachment.drop_collection() - AttachmentRequired.drop_collection() - AttachmentSizeLimit.drop_collection() - def test_binary_field_primary(self): - class Attachment(Document): id = BinaryField(primary_key=True) @@ -2915,13 +2893,10 @@ class FieldTest(unittest.TestCase): att = Attachment(id=binary_id).save() self.assertEqual(1, Attachment.objects.count()) self.assertEqual(1, Attachment.objects.filter(id=att.id).count()) - # TODO use assertIsNotNone once Python 2.6 support is dropped - self.assertTrue(Attachment.objects.filter(id=att.id).first() is not None) att.delete() self.assertEqual(0, Attachment.objects.count()) def test_binary_field_primary_filter_by_binary_pk_as_str(self): - raise SkipTest("Querying by id as string is not currently supported") class Attachment(Document): @@ -2931,31 +2906,45 @@ class FieldTest(unittest.TestCase): binary_id = uuid.uuid4().bytes att = Attachment(id=binary_id).save() self.assertEqual(1, Attachment.objects.filter(id=binary_id).count()) - # TODO use assertIsNotNone once Python 2.6 support is dropped - self.assertTrue(Attachment.objects.filter(id=binary_id).first() is not None) att.delete() self.assertEqual(0, Attachment.objects.count()) - def test_choices_validation(self): - """Ensure that value is in a container of allowed values. + def test_choices_allow_using_sets_as_choices(self): + """Ensure that sets can be used when setting choices """ class Shirt(Document): - size = StringField(max_length=3, choices=( - ('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), - ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) + size = StringField(choices={'M', 'L'}) - Shirt.drop_collection() + Shirt(size='M').validate() + + def test_choices_validation_allow_no_value(self): + """Ensure that .validate passes and no value was provided + for a field setup with choices + """ + class Shirt(Document): + size = StringField(choices=('S', 'M')) shirt = Shirt() shirt.validate() - shirt.size = "S" + def test_choices_validation_accept_possible_value(self): + """Ensure that value is in a container of allowed values. + """ + class Shirt(Document): + size = StringField(choices=('S', 'M')) + + shirt = Shirt(size='S') shirt.validate() - shirt.size = "XS" - self.assertRaises(ValidationError, shirt.validate) + def test_choices_validation_reject_unknown_value(self): + """Ensure that unallowed value are rejected upon validation + """ + class Shirt(Document): + size = StringField(choices=('S', 'M')) - Shirt.drop_collection() + shirt = Shirt(size="XS") + with self.assertRaises(ValidationError): + shirt.validate() def test_choices_validation_documents(self): """ @@ -3084,8 +3073,6 @@ class FieldTest(unittest.TestCase): shirt.size = "XS" self.assertRaises(ValidationError, shirt.validate) - Shirt.drop_collection() - def test_simple_choices_get_field_display(self): """Test dynamic helper for returning the display value of a choices field. @@ -3116,8 +3103,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(shirt.get_style_display(), 'Z') self.assertRaises(ValidationError, shirt.validate) - Shirt.drop_collection() - def test_simple_choices_validation_invalid_value(self): """Ensure that error messages are correct. """ @@ -3150,8 +3135,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(error_dict['size'], SIZE_MESSAGE) self.assertEqual(error_dict['color'], COLOR_MESSAGE) - Shirt.drop_collection() - def test_ensure_unique_default_instances(self): """Ensure that every field has it's own unique default instance.""" class D(Document): @@ -3302,7 +3285,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(a.counter, 2) def test_multiple_sequence_fields_on_docs(self): - class Animal(Document): id = SequenceField(primary_key=True) name = StringField() @@ -3449,17 +3431,16 @@ class FieldTest(unittest.TestCase): person.save() person = Person.objects.first() - self.assertTrue(isinstance(person.like, Car)) + self.assertIsInstance(person.like, Car) person.like = Dish(food="arroz", number=15) person.save() person = Person.objects.first() - self.assertTrue(isinstance(person.like, Dish)) + self.assertIsInstance(person.like, Dish) def test_generic_embedded_document_choices(self): - """Ensure you can limit GenericEmbeddedDocument choices - """ + """Ensure you can limit GenericEmbeddedDocument choices.""" class Car(EmbeddedDocument): name = StringField() @@ -3481,11 +3462,11 @@ class FieldTest(unittest.TestCase): person.save() person = Person.objects.first() - self.assertTrue(isinstance(person.like, Dish)) + self.assertIsInstance(person.like, Dish) def test_generic_list_embedded_document_choices(self): - """Ensure you can limit GenericEmbeddedDocument choices inside a list - field + """Ensure you can limit GenericEmbeddedDocument choices inside + a list field. """ class Car(EmbeddedDocument): name = StringField() @@ -3508,11 +3489,10 @@ class FieldTest(unittest.TestCase): person.save() person = Person.objects.first() - self.assertTrue(isinstance(person.likes[0], Dish)) + self.assertIsInstance(person.likes[0], Dish) def test_recursive_validation(self): - """Ensure that a validation result to_dict is available. - """ + """Ensure that a validation result to_dict is available.""" class Author(EmbeddedDocument): name = StringField(required=True) @@ -3535,18 +3515,17 @@ class FieldTest(unittest.TestCase): except ValidationError as error: # ValidationError.errors property self.assertTrue(hasattr(error, 'errors')) - self.assertTrue(isinstance(error.errors, dict)) - self.assertTrue('comments' in error.errors) - self.assertTrue(1 in error.errors['comments']) - self.assertTrue(isinstance(error.errors['comments'][1]['content'], - ValidationError)) + self.assertIsInstance(error.errors, dict) + self.assertIn('comments', error.errors) + self.assertIn(1, error.errors['comments']) + self.assertIsInstance(error.errors['comments'][1]['content'], ValidationError) # ValidationError.schema property error_dict = error.to_dict() - self.assertTrue(isinstance(error_dict, dict)) - self.assertTrue('comments' in error_dict) - self.assertTrue(1 in error_dict['comments']) - self.assertTrue('content' in error_dict['comments'][1]) + self.assertIsInstance(error_dict, dict) + self.assertIn('comments', error_dict) + self.assertIn(1, error_dict['comments']) + self.assertIn('content', error_dict['comments'][1]) self.assertEqual(error_dict['comments'][1]['content'], u'Field is required') @@ -3557,23 +3536,99 @@ class FieldTest(unittest.TestCase): class User(Document): email = EmailField() - user = User(email="ross@example.com") - self.assertTrue(user.validate() is None) + user = User(email='ross@example.com') + user.validate() - user = User(email="ross@example.co.uk") - self.assertTrue(user.validate() is None) + user = User(email='ross@example.co.uk') + user.validate() - user = User(email=("Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S" - "aJIazqqWkm7.net")) - self.assertTrue(user.validate() is None) + user = User(email=('Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S' + 'aJIazqqWkm7.net')) + user.validate() - user = User(email="new-tld@example.technology") - self.assertTrue(user.validate() is None) + user = User(email='new-tld@example.technology') + user.validate() + user = User(email='ross@example.com.') + self.assertRaises(ValidationError, user.validate) + + # unicode domain + user = User(email=u'user@пример.рф') + user.validate() + + # invalid unicode domain + user = User(email=u'user@пример') + self.assertRaises(ValidationError, user.validate) + + # invalid data type + user = User(email=123) + self.assertRaises(ValidationError, user.validate) + + def test_email_field_unicode_user(self): + # Don't run this test on pypy3, which doesn't support unicode regex: + # https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode + if sys.version_info[:2] == (3, 2): + raise SkipTest('unicode email addresses are not supported on PyPy 3') + + class User(Document): + email = EmailField() + + # unicode user shouldn't validate by default... + user = User(email=u'Dörte@Sörensen.example.com') + self.assertRaises(ValidationError, user.validate) + + # ...but it should be fine with allow_utf8_user set to True + class User(Document): + email = EmailField(allow_utf8_user=True) + + user = User(email=u'Dörte@Sörensen.example.com') + user.validate() + + def test_email_field_domain_whitelist(self): + class User(Document): + email = EmailField() + + # localhost domain shouldn't validate by default... user = User(email='me@localhost') self.assertRaises(ValidationError, user.validate) - user = User(email="ross@example.com.") + # ...but it should be fine if it's whitelisted + class User(Document): + email = EmailField(domain_whitelist=['localhost']) + + user = User(email='me@localhost') + user.validate() + + def test_email_field_ip_domain(self): + class User(Document): + email = EmailField() + + valid_ipv4 = 'email@[127.0.0.1]' + valid_ipv6 = 'email@[2001:dB8::1]' + invalid_ip = 'email@[324.0.0.1]' + + # IP address as a domain shouldn't validate by default... + user = User(email=valid_ipv4) + self.assertRaises(ValidationError, user.validate) + + user = User(email=valid_ipv6) + self.assertRaises(ValidationError, user.validate) + + user = User(email=invalid_ip) + self.assertRaises(ValidationError, user.validate) + + # ...but it should be fine with allow_ip_domain set to True + class User(Document): + email = EmailField(allow_ip_domain=True) + + user = User(email=valid_ipv4) + user.validate() + + user = User(email=valid_ipv6) + user.validate() + + # invalid IP should still fail validation + user = User(email=invalid_ip) self.assertRaises(ValidationError, user.validate) def test_email_field_honors_regex(self): @@ -3586,12 +3641,11 @@ class FieldTest(unittest.TestCase): # Passes regex validation user = User(email='me@example.com') - self.assertTrue(user.validate() is None) + self.assertIsNone(user.validate()) def test_tuples_as_tuples(self): - """ - Ensure that tuples remain tuples when they are - inside a ComplexBaseField + """Ensure that tuples remain tuples when they are inside + a ComplexBaseField. """ class EnumField(BaseField): @@ -3608,18 +3662,18 @@ class FieldTest(unittest.TestCase): items = ListField(EnumField()) TestDoc.drop_collection() + tuples = [(100, 'Testing')] doc = TestDoc() doc.items = tuples doc.save() x = TestDoc.objects().get() - self.assertTrue(x is not None) - self.assertTrue(len(x.items) == 1) - self.assertTrue(tuple(x.items[0]) in tuples) - self.assertTrue(x.items[0] in tuples) + self.assertIsNotNone(x) + self.assertEqual(len(x.items), 1) + self.assertIn(tuple(x.items[0]), tuples) + self.assertIn(x.items[0], tuples) def test_dynamic_fields_class(self): - class Doc2(Document): field_1 = StringField(db_field='f') @@ -3642,7 +3696,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(doc.embed_me.field_1, "hello") def test_dynamic_fields_embedded_class(self): - class Embed(EmbeddedDocument): field_1 = StringField(db_field='f') @@ -3659,8 +3712,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(doc.embed_me.field_1, "hello") def test_dynamicfield_dump_document(self): - """Ensure a DynamicField can handle another document's dump - """ + """Ensure a DynamicField can handle another document's dump.""" class Doc(Document): field = DynamicField() @@ -3731,60 +3783,55 @@ class FieldTest(unittest.TestCase): def test_sparse_field(self): class Doc(Document): name = StringField(required=False, unique=True, sparse=True) - try: - Doc().save() - Doc().save() - except Exception: - self.fail() + + # This would raise an exception in a non-sparse unique index + Doc().save() + Doc().save() def test_undefined_field_exception(self): - """Tests if a `FieldDoesNotExist` exception is raised when trying to - instanciate a document with a field that's not defined. + """Tests if a `FieldDoesNotExist` exception is raised when + trying to instantiate a document with a field that's not + defined. """ - class Doc(Document): - foo = StringField(db_field='f') + foo = StringField() - def test(): + with self.assertRaises(FieldDoesNotExist): Doc(bar='test') - self.assertRaises(FieldDoesNotExist, test) - def test_undefined_field_exception_with_strict(self): - """Tests if a `FieldDoesNotExist` exception is raised when trying to - instanciate a document with a field that's not defined, - even when strict is set to False. + """Tests if a `FieldDoesNotExist` exception is raised when + trying to instantiate a document with a field that's not + defined, even when strict is set to False. """ - class Doc(Document): - foo = StringField(db_field='f') + foo = StringField() meta = {'strict': False} - def test(): + with self.assertRaises(FieldDoesNotExist): Doc(bar='test') - self.assertRaises(FieldDoesNotExist, test) - def test_long_field_is_considered_as_int64(self): """ - Tests that long fields are stored as long in mongo, even if long value - is small enough to be an int. + Tests that long fields are stored as long in mongo, even if long + value is small enough to be an int. """ class TestLongFieldConsideredAsInt64(Document): some_long = LongField() doc = TestLongFieldConsideredAsInt64(some_long=42).save() db = get_db() - self.assertTrue(isinstance(db.test_long_field_considered_as_int64.find()[0]['some_long'], Int64)) - self.assertTrue(isinstance(doc.some_long, six.integer_types)) + self.assertIsInstance(db.test_long_field_considered_as_int64.find()[0]['some_long'], Int64) + self.assertIsInstance(doc.some_long, six.integer_types) -class EmbeddedDocumentListFieldTestCase(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.db = connect(db='EmbeddedDocumentListFieldTestCase') +class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): + def setUp(self): + """ + Create two BlogPost entries in the database, each with + several EmbeddedDocuments. + """ class Comments(EmbeddedDocument): author = StringField() message = StringField() @@ -3792,14 +3839,11 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): class BlogPost(Document): comments = EmbeddedDocumentListField(Comments) - cls.Comments = Comments - cls.BlogPost = BlogPost + BlogPost.drop_collection() + + self.Comments = Comments + self.BlogPost = BlogPost - def setUp(self): - """ - Create two BlogPost entries in the database, each with - several EmbeddedDocuments. - """ self.post1 = self.BlogPost(comments=[ self.Comments(author='user1', message='message1'), self.Comments(author='user2', message='message1') @@ -3811,13 +3855,6 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): self.Comments(author='user3', message='message1') ]).save() - def tearDown(self): - self.BlogPost.drop_collection() - - @classmethod - def tearDownClass(cls): - cls.db.drop_database('EmbeddedDocumentListFieldTestCase') - def test_no_keyword_filter(self): """ Tests the filter method of a List of Embedded Documents @@ -4175,7 +4212,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique)) A(my_list=[]).save() - self.assertRaises(NotUniqueError, lambda: A(my_list=[]).save()) + with self.assertRaises(NotUniqueError): + A(my_list=[]).save() class EmbeddedWithSparseUnique(EmbeddedDocument): number = IntField(unique=True, sparse=True) @@ -4183,6 +4221,9 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): class B(Document): my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique)) + A.drop_collection() + B.drop_collection() + B(my_list=[]).save() B(my_list=[]).save() @@ -4222,6 +4263,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): a_field = IntField() c_field = IntField(custom_data=custom_data) + CustomData.drop_collection() + a1 = CustomData(a_field=1, c_field=2).save() self.assertEqual(2, a1.c_field) self.assertFalse(hasattr(a1.c_field, 'custom_data')) @@ -4229,5 +4272,1172 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): self.assertEqual(custom_data['a'], CustomData.c_field.custom_data['a']) +class TestEmbeddedDocumentField(MongoDBTestCase): + def test___init___(self): + class MyDoc(EmbeddedDocument): + name = StringField() + + field = EmbeddedDocumentField(MyDoc) + self.assertEqual(field.document_type_obj, MyDoc) + + field2 = EmbeddedDocumentField('MyDoc') + self.assertEqual(field2.document_type_obj, 'MyDoc') + + def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self): + with self.assertRaises(ValidationError): + EmbeddedDocumentField(dict) + + def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self): + + class MyDoc(Document): + name = StringField() + + emb = EmbeddedDocumentField('MyDoc') + with self.assertRaises(ValidationError) as ctx: + emb.document_type + self.assertIn('Invalid embedded document class provided to an EmbeddedDocumentField', str(ctx.exception)) + + def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self): + # Relates to #1661 + class MyDoc(Document): + name = StringField() + + with self.assertRaises(ValidationError): + class MyFailingDoc(Document): + emb = EmbeddedDocumentField(MyDoc) + + with self.assertRaises(ValidationError): + class MyFailingdoc2(Document): + emb = EmbeddedDocumentField('MyDoc') + +class CachedReferenceFieldTest(MongoDBTestCase): + + def test_cached_reference_field_get_and_save(self): + """ + Tests #1047: CachedReferenceField creates DBRefs on to_python, + but can't save them on to_mongo. + """ + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocorrence(Document): + person = StringField() + animal = CachedReferenceField(Animal) + + Animal.drop_collection() + Ocorrence.drop_collection() + + Ocorrence(person="testte", + animal=Animal(name="Leopard", tag="heavy").save()).save() + p = Ocorrence.objects.get() + p.person = 'new_testte' + p.save() + + def test_cached_reference_fields(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocorrence(Document): + person = StringField() + animal = CachedReferenceField( + Animal, fields=['tag']) + + Animal.drop_collection() + Ocorrence.drop_collection() + + a = Animal(name="Leopard", tag="heavy") + a.save() + + self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal]) + o = Ocorrence(person="teste", animal=a) + o.save() + + p = Ocorrence(person="Wilson") + p.save() + + self.assertEqual(Ocorrence.objects(animal=None).count(), 1) + + self.assertEqual( + a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk}) + + self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') + + # counts + Ocorrence(person="teste 2").save() + Ocorrence(person="teste 3").save() + + count = Ocorrence.objects(animal__tag='heavy').count() + self.assertEqual(count, 1) + + ocorrence = Ocorrence.objects(animal__tag='heavy').first() + self.assertEqual(ocorrence.person, "teste") + self.assertIsInstance(ocorrence.animal, Animal) + + def test_cached_reference_field_decimal(self): + class PersonAuto(Document): + name = StringField() + salary = DecimalField() + + class SocialTest(Document): + group = StringField() + person = CachedReferenceField( + PersonAuto, + fields=('salary',)) + + PersonAuto.drop_collection() + SocialTest.drop_collection() + + p = PersonAuto(name="Alberto", salary=Decimal('7000.00')) + p.save() + + s = SocialTest(group="dev", person=p) + s.save() + + self.assertEqual( + SocialTest.objects._collection.find_one({'person.salary': 7000.00}), { + '_id': s.pk, + 'group': s.group, + 'person': { + '_id': p.pk, + 'salary': 7000.00 + } + }) + + def test_cached_reference_field_reference(self): + class Group(Document): + name = StringField() + + class Person(Document): + name = StringField() + group = ReferenceField(Group) + + class SocialData(Document): + obs = StringField() + tags = ListField( + StringField()) + person = CachedReferenceField( + Person, + fields=('group',)) + + Group.drop_collection() + Person.drop_collection() + SocialData.drop_collection() + + g1 = Group(name='dev') + g1.save() + + g2 = Group(name="designers") + g2.save() + + p1 = Person(name="Alberto", group=g1) + p1.save() + + p2 = Person(name="Andre", group=g1) + p2.save() + + p3 = Person(name="Afro design", group=g2) + p3.save() + + s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2']) + s1.save() + + s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4']) + s2.save() + + self.assertEqual(SocialData.objects._collection.find_one( + {'tags': 'tag2'}), { + '_id': s1.pk, + 'obs': 'testing 123', + 'tags': ['tag1', 'tag2'], + 'person': { + '_id': p1.pk, + 'group': g1.pk + } + }) + + self.assertEqual(SocialData.objects(person__group=g2).count(), 1) + self.assertEqual(SocialData.objects(person__group=g2).first(), s2) + + def test_cached_reference_field_push_with_fields(self): + class Product(Document): + name = StringField() + + Product.drop_collection() + + class Basket(Document): + products = ListField(CachedReferenceField(Product, fields=['name'])) + + Basket.drop_collection() + product1 = Product(name='abc').save() + product2 = Product(name='def').save() + basket = Basket(products=[product1]).save() + self.assertEqual( + Basket.objects._collection.find_one(), + { + '_id': basket.pk, + 'products': [ + { + '_id': product1.pk, + 'name': product1.name + } + ] + } + ) + # push to list + basket.update(push__products=product2) + basket.reload() + self.assertEqual( + Basket.objects._collection.find_one(), + { + '_id': basket.pk, + 'products': [ + { + '_id': product1.pk, + 'name': product1.name + }, + { + '_id': product2.pk, + 'name': product2.name + } + ] + } + ) + + def test_cached_reference_field_update_all(self): + class Person(Document): + TYPES = ( + ('pf', "PF"), + ('pj', "PJ") + ) + name = StringField() + tp = StringField( + choices=TYPES + ) + + father = CachedReferenceField('self', fields=('tp',)) + + Person.drop_collection() + + a1 = Person(name="Wilson Father", tp="pj") + a1.save() + + a2 = Person(name='Wilson Junior', tp='pf', father=a1) + a2.save() + + self.assertEqual(dict(a2.to_mongo()), { + "_id": a2.pk, + "name": u"Wilson Junior", + "tp": u"pf", + "father": { + "_id": a1.pk, + "tp": u"pj" + } + }) + + self.assertEqual(Person.objects(father=a1)._query, { + 'father._id': a1.pk + }) + self.assertEqual(Person.objects(father=a1).count(), 1) + + Person.objects.update(set__tp="pf") + Person.father.sync_all() + + a2.reload() + self.assertEqual(dict(a2.to_mongo()), { + "_id": a2.pk, + "name": u"Wilson Junior", + "tp": u"pf", + "father": { + "_id": a1.pk, + "tp": u"pf" + } + }) + + def test_cached_reference_fields_on_embedded_documents(self): + with self.assertRaises(InvalidDocumentError): + class Test(Document): + name = StringField() + + type('WrongEmbeddedDocument', ( + EmbeddedDocument,), { + 'test': CachedReferenceField(Test) + }) + + def test_cached_reference_auto_sync(self): + class Person(Document): + TYPES = ( + ('pf', "PF"), + ('pj', "PJ") + ) + name = StringField() + tp = StringField( + choices=TYPES + ) + + father = CachedReferenceField('self', fields=('tp',)) + + Person.drop_collection() + + a1 = Person(name="Wilson Father", tp="pj") + a1.save() + + a2 = Person(name='Wilson Junior', tp='pf', father=a1) + a2.save() + + a1.tp = 'pf' + a1.save() + + a2.reload() + self.assertEqual(dict(a2.to_mongo()), { + '_id': a2.pk, + 'name': 'Wilson Junior', + 'tp': 'pf', + 'father': { + '_id': a1.pk, + 'tp': 'pf' + } + }) + + def test_cached_reference_auto_sync_disabled(self): + class Persone(Document): + TYPES = ( + ('pf', "PF"), + ('pj', "PJ") + ) + name = StringField() + tp = StringField( + choices=TYPES + ) + + father = CachedReferenceField( + 'self', fields=('tp',), auto_sync=False) + + Persone.drop_collection() + + a1 = Persone(name="Wilson Father", tp="pj") + a1.save() + + a2 = Persone(name='Wilson Junior', tp='pf', father=a1) + a2.save() + + a1.tp = 'pf' + a1.save() + + self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), { + '_id': a2.pk, + 'name': 'Wilson Junior', + 'tp': 'pf', + 'father': { + '_id': a1.pk, + 'tp': 'pj' + } + }) + + def test_cached_reference_embedded_fields(self): + class Owner(EmbeddedDocument): + TPS = ( + ('n', "Normal"), + ('u', "Urgent") + ) + name = StringField() + tp = StringField( + verbose_name="Type", + db_field="t", + choices=TPS) + + class Animal(Document): + name = StringField() + tag = StringField() + + owner = EmbeddedDocumentField(Owner) + + class Ocorrence(Document): + person = StringField() + animal = CachedReferenceField( + Animal, fields=['tag', 'owner.tp']) + + Animal.drop_collection() + Ocorrence.drop_collection() + + a = Animal(name="Leopard", tag="heavy", + owner=Owner(tp='u', name="Wilson Júnior") + ) + a.save() + + o = Ocorrence(person="teste", animal=a) + o.save() + self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), { + '_id': a.pk, + 'tag': 'heavy', + 'owner': { + 't': 'u' + } + }) + self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') + self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u') + + # counts + Ocorrence(person="teste 2").save() + Ocorrence(person="teste 3").save() + + count = Ocorrence.objects( + animal__tag='heavy', animal__owner__tp='u').count() + self.assertEqual(count, 1) + + ocorrence = Ocorrence.objects( + animal__tag='heavy', + animal__owner__tp='u').first() + self.assertEqual(ocorrence.person, "teste") + self.assertIsInstance(ocorrence.animal, Animal) + + def test_cached_reference_embedded_list_fields(self): + class Owner(EmbeddedDocument): + name = StringField() + tags = ListField(StringField()) + + class Animal(Document): + name = StringField() + tag = StringField() + + owner = EmbeddedDocumentField(Owner) + + class Ocorrence(Document): + person = StringField() + animal = CachedReferenceField( + Animal, fields=['tag', 'owner.tags']) + + Animal.drop_collection() + Ocorrence.drop_collection() + + a = Animal(name="Leopard", tag="heavy", + owner=Owner(tags=['cool', 'funny'], + name="Wilson Júnior") + ) + a.save() + + o = Ocorrence(person="teste 2", animal=a) + o.save() + self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), { + '_id': a.pk, + 'tag': 'heavy', + 'owner': { + 'tags': ['cool', 'funny'] + } + }) + + self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') + self.assertEqual(o.to_mongo()['animal']['owner']['tags'], + ['cool', 'funny']) + + # counts + Ocorrence(person="teste 2").save() + Ocorrence(person="teste 3").save() + + query = Ocorrence.objects( + animal__tag='heavy', animal__owner__tags='cool')._query + self.assertEqual( + query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'}) + + ocorrence = Ocorrence.objects( + animal__tag='heavy', + animal__owner__tags='cool').first() + self.assertEqual(ocorrence.person, "teste 2") + self.assertIsInstance(ocorrence.animal, Animal) + + +class LazyReferenceFieldTest(MongoDBTestCase): + def test_lazy_reference_config(self): + # Make sure ReferenceField only accepts a document class or a string + # with a document class name. + self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument) + + def test_lazy_reference_simple(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal = Animal(name="Leopard", tag="heavy").save() + Ocurrence(person="test", animal=animal).save() + p = Ocurrence.objects.get() + self.assertIsInstance(p.animal, LazyReference) + fetched_animal = p.animal.fetch() + self.assertEqual(fetched_animal, animal) + # `fetch` keep cache on referenced document by default... + animal.tag = "not so heavy" + animal.save() + double_fetch = p.animal.fetch() + self.assertIs(fetched_animal, double_fetch) + self.assertEqual(double_fetch.tag, "heavy") + # ...unless specified otherwise + fetch_force = p.animal.fetch(force=True) + self.assertIsNot(fetch_force, fetched_animal) + self.assertEqual(fetch_force.tag, "not so heavy") + + def test_lazy_reference_fetch_invalid_ref(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal = Animal(name="Leopard", tag="heavy").save() + Ocurrence(person="test", animal=animal).save() + animal.delete() + p = Ocurrence.objects.get() + self.assertIsInstance(p.animal, LazyReference) + with self.assertRaises(DoesNotExist): + p.animal.fetch() + + def test_lazy_reference_set(self): + class Animal(Document): + meta = {'allow_inheritance': True} + + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + class SubAnimal(Animal): + nick = StringField() + + animal = Animal(name="Leopard", tag="heavy").save() + sub_animal = SubAnimal(nick='doggo', name='dog').save() + for ref in ( + animal, + animal.pk, + DBRef(animal._get_collection_name(), animal.pk), + LazyReference(Animal, animal.pk), + + sub_animal, + sub_animal.pk, + DBRef(sub_animal._get_collection_name(), sub_animal.pk), + LazyReference(SubAnimal, sub_animal.pk), + ): + p = Ocurrence(person="test", animal=ref).save() + p.reload() + self.assertIsInstance(p.animal, LazyReference) + p.animal.fetch() + + def test_lazy_reference_bad_set(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + class BadDoc(Document): + pass + + animal = Animal(name="Leopard", tag="heavy").save() + baddoc = BadDoc().save() + for bad in ( + 42, + 'foo', + baddoc, + DBRef(baddoc._get_collection_name(), animal.pk), + LazyReference(BadDoc, animal.pk) + ): + with self.assertRaises(ValidationError): + p = Ocurrence(person="test", animal=bad).save() + + def test_lazy_reference_query_conversion(self): + """Ensure that LazyReferenceFields can be queried using objects and values + of the type of the primary key of the referenced object. + """ + class Member(Document): + user_num = IntField(primary_key=True) + + class BlogPost(Document): + title = StringField() + author = LazyReferenceField(Member, dbref=False) + + Member.drop_collection() + BlogPost.drop_collection() + + m1 = Member(user_num=1) + m1.save() + m2 = Member(user_num=2) + m2.save() + + post1 = BlogPost(title='post 1', author=m1) + post1.save() + + post2 = BlogPost(title='post 2', author=m2) + post2.save() + + post = BlogPost.objects(author=m1).first() + self.assertEqual(post.id, post1.id) + + post = BlogPost.objects(author=m2).first() + self.assertEqual(post.id, post2.id) + + # Same thing by passing a LazyReference instance + post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() + self.assertEqual(post.id, post2.id) + + def test_lazy_reference_query_conversion_dbref(self): + """Ensure that LazyReferenceFields can be queried using objects and values + of the type of the primary key of the referenced object. + """ + class Member(Document): + user_num = IntField(primary_key=True) + + class BlogPost(Document): + title = StringField() + author = LazyReferenceField(Member, dbref=True) + + Member.drop_collection() + BlogPost.drop_collection() + + m1 = Member(user_num=1) + m1.save() + m2 = Member(user_num=2) + m2.save() + + post1 = BlogPost(title='post 1', author=m1) + post1.save() + + post2 = BlogPost(title='post 2', author=m2) + post2.save() + + post = BlogPost.objects(author=m1).first() + self.assertEqual(post.id, post1.id) + + post = BlogPost.objects(author=m2).first() + self.assertEqual(post.id, post2.id) + + # Same thing by passing a LazyReference instance + post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() + self.assertEqual(post.id, post2.id) + + def test_lazy_reference_passthrough(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + animal = LazyReferenceField(Animal, passthrough=False) + animal_passthrough = LazyReferenceField(Animal, passthrough=True) + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal = Animal(name="Leopard", tag="heavy").save() + Ocurrence(animal=animal, animal_passthrough=animal).save() + p = Ocurrence.objects.get() + self.assertIsInstance(p.animal, LazyReference) + with self.assertRaises(KeyError): + p.animal['name'] + with self.assertRaises(AttributeError): + p.animal.name + self.assertEqual(p.animal.pk, animal.pk) + + self.assertEqual(p.animal_passthrough.name, "Leopard") + self.assertEqual(p.animal_passthrough['name'], "Leopard") + + # Should not be able to access referenced document's methods + with self.assertRaises(AttributeError): + p.animal.save + with self.assertRaises(KeyError): + p.animal['save'] + + def test_lazy_reference_not_set(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + Ocurrence(person='foo').save() + p = Ocurrence.objects.get() + self.assertIs(p.animal, None) + + def test_lazy_reference_equality(self): + class Animal(Document): + name = StringField() + tag = StringField() + + Animal.drop_collection() + + animal = Animal(name="Leopard", tag="heavy").save() + animalref = LazyReference(Animal, animal.pk) + self.assertEqual(animal, animalref) + self.assertEqual(animalref, animal) + + other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90")) + self.assertNotEqual(animal, other_animalref) + self.assertNotEqual(other_animalref, animal) + + def test_lazy_reference_embedded(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class EmbeddedOcurrence(EmbeddedDocument): + in_list = ListField(LazyReferenceField(Animal)) + direct = LazyReferenceField(Animal) + + class Ocurrence(Document): + in_list = ListField(LazyReferenceField(Animal)) + in_embedded = EmbeddedDocumentField(EmbeddedOcurrence) + direct = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal1 = Animal('doggo').save() + animal2 = Animal('cheeta').save() + + def check_fields_type(occ): + self.assertIsInstance(occ.direct, LazyReference) + for elem in occ.in_list: + self.assertIsInstance(elem, LazyReference) + self.assertIsInstance(occ.in_embedded.direct, LazyReference) + for elem in occ.in_embedded.in_list: + self.assertIsInstance(elem, LazyReference) + + occ = Ocurrence( + in_list=[animal1, animal2], + in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, + direct=animal1 + ).save() + check_fields_type(occ) + occ.reload() + check_fields_type(occ) + occ.direct = animal1.id + occ.in_list = [animal1.id, animal2.id] + occ.in_embedded.direct = animal1.id + occ.in_embedded.in_list = [animal1.id, animal2.id] + check_fields_type(occ) + + +class GenericLazyReferenceFieldTest(MongoDBTestCase): + def test_generic_lazy_reference_simple(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = GenericLazyReferenceField() + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal = Animal(name="Leopard", tag="heavy").save() + Ocurrence(person="test", animal=animal).save() + p = Ocurrence.objects.get() + self.assertIsInstance(p.animal, LazyReference) + fetched_animal = p.animal.fetch() + self.assertEqual(fetched_animal, animal) + # `fetch` keep cache on referenced document by default... + animal.tag = "not so heavy" + animal.save() + double_fetch = p.animal.fetch() + self.assertIs(fetched_animal, double_fetch) + self.assertEqual(double_fetch.tag, "heavy") + # ...unless specified otherwise + fetch_force = p.animal.fetch(force=True) + self.assertIsNot(fetch_force, fetched_animal) + self.assertEqual(fetch_force.tag, "not so heavy") + + def test_generic_lazy_reference_choices(self): + class Animal(Document): + name = StringField() + + class Vegetal(Document): + name = StringField() + + class Mineral(Document): + name = StringField() + + class Ocurrence(Document): + living_thing = GenericLazyReferenceField(choices=[Animal, Vegetal]) + thing = GenericLazyReferenceField() + + Animal.drop_collection() + Vegetal.drop_collection() + Mineral.drop_collection() + Ocurrence.drop_collection() + + animal = Animal(name="Leopard").save() + vegetal = Vegetal(name="Oak").save() + mineral = Mineral(name="Granite").save() + + occ_animal = Ocurrence(living_thing=animal, thing=animal).save() + occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save() + with self.assertRaises(ValidationError): + Ocurrence(living_thing=mineral).save() + + occ = Ocurrence.objects.get(living_thing=animal) + self.assertEqual(occ, occ_animal) + self.assertIsInstance(occ.thing, LazyReference) + self.assertIsInstance(occ.living_thing, LazyReference) + + occ.thing = vegetal + occ.living_thing = vegetal + occ.save() + + occ.thing = mineral + occ.living_thing = mineral + with self.assertRaises(ValidationError): + occ.save() + + def test_generic_lazy_reference_set(self): + class Animal(Document): + meta = {'allow_inheritance': True} + + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = GenericLazyReferenceField() + + Animal.drop_collection() + Ocurrence.drop_collection() + + class SubAnimal(Animal): + nick = StringField() + + animal = Animal(name="Leopard", tag="heavy").save() + sub_animal = SubAnimal(nick='doggo', name='dog').save() + for ref in ( + animal, + LazyReference(Animal, animal.pk), + {'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)}, + + sub_animal, + LazyReference(SubAnimal, sub_animal.pk), + {'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)}, + ): + p = Ocurrence(person="test", animal=ref).save() + p.reload() + self.assertIsInstance(p.animal, (LazyReference, Document)) + p.animal.fetch() + + def test_generic_lazy_reference_bad_set(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = GenericLazyReferenceField(choices=['Animal']) + + Animal.drop_collection() + Ocurrence.drop_collection() + + class BadDoc(Document): + pass + + animal = Animal(name="Leopard", tag="heavy").save() + baddoc = BadDoc().save() + for bad in ( + 42, + 'foo', + baddoc, + LazyReference(BadDoc, animal.pk) + ): + with self.assertRaises(ValidationError): + p = Ocurrence(person="test", animal=bad).save() + + def test_generic_lazy_reference_query_conversion(self): + class Member(Document): + user_num = IntField(primary_key=True) + + class BlogPost(Document): + title = StringField() + author = GenericLazyReferenceField() + + Member.drop_collection() + BlogPost.drop_collection() + + m1 = Member(user_num=1) + m1.save() + m2 = Member(user_num=2) + m2.save() + + post1 = BlogPost(title='post 1', author=m1) + post1.save() + + post2 = BlogPost(title='post 2', author=m2) + post2.save() + + post = BlogPost.objects(author=m1).first() + self.assertEqual(post.id, post1.id) + + post = BlogPost.objects(author=m2).first() + self.assertEqual(post.id, post2.id) + + # Same thing by passing a LazyReference instance + post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() + self.assertEqual(post.id, post2.id) + + def test_generic_lazy_reference_not_set(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = GenericLazyReferenceField() + + Animal.drop_collection() + Ocurrence.drop_collection() + + Ocurrence(person='foo').save() + p = Ocurrence.objects.get() + self.assertIs(p.animal, None) + + def test_generic_lazy_reference_embedded(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class EmbeddedOcurrence(EmbeddedDocument): + in_list = ListField(GenericLazyReferenceField()) + direct = GenericLazyReferenceField() + + class Ocurrence(Document): + in_list = ListField(GenericLazyReferenceField()) + in_embedded = EmbeddedDocumentField(EmbeddedOcurrence) + direct = GenericLazyReferenceField() + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal1 = Animal('doggo').save() + animal2 = Animal('cheeta').save() + + def check_fields_type(occ): + self.assertIsInstance(occ.direct, LazyReference) + for elem in occ.in_list: + self.assertIsInstance(elem, LazyReference) + self.assertIsInstance(occ.in_embedded.direct, LazyReference) + for elem in occ.in_embedded.in_list: + self.assertIsInstance(elem, LazyReference) + + occ = Ocurrence( + in_list=[animal1, animal2], + in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, + direct=animal1 + ).save() + check_fields_type(occ) + occ.reload() + check_fields_type(occ) + animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)} + animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)} + occ.direct = animal1_ref + occ.in_list = [animal1_ref, animal2_ref] + occ.in_embedded.direct = animal1_ref + occ.in_embedded.in_list = [animal1_ref, animal2_ref] + check_fields_type(occ) + + +class ComplexDateTimeFieldTest(MongoDBTestCase): + def test_complexdatetime_storage(self): + """Tests for complex datetime fields - which can handle + microseconds without rounding. + """ + class LogEntry(Document): + date = ComplexDateTimeField() + date_with_dots = ComplexDateTimeField(separator='.') + + LogEntry.drop_collection() + + # Post UTC - microseconds are rounded (down) nearest millisecond and + # dropped - with default datetimefields + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) + log = LogEntry() + log.date = d1 + log.save() + log.reload() + self.assertEqual(log.date, d1) + + # Post UTC - microseconds are rounded (down) nearest millisecond - with + # default datetimefields + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) + log.date = d1 + log.save() + log.reload() + self.assertEqual(log.date, d1) + + # Pre UTC dates microseconds below 1000 are dropped - with default + # datetimefields + d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) + log.date = d1 + log.save() + log.reload() + self.assertEqual(log.date, d1) + + # Pre UTC microseconds above 1000 is wonky - with default datetimefields + # log.date has an invalid microsecond value so I can't construct + # a date to compare. + for i in range(1001, 3113, 33): + d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) + log.date = d1 + log.save() + log.reload() + self.assertEqual(log.date, d1) + log1 = LogEntry.objects.get(date=d1) + self.assertEqual(log, log1) + + # Test string padding + microsecond = map(int, [math.pow(10, x) for x in range(6)]) + mm = dd = hh = ii = ss = [1, 10] + + for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): + stored = LogEntry(date=datetime.datetime(*values)).to_mongo()['date'] + self.assertTrue(re.match('^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$', stored) is not None) + + # Test separator + stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()['date_with_dots'] + self.assertTrue(re.match('^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$', stored) is not None) + + def test_complexdatetime_usage(self): + """Tests for complex datetime fields - which can handle + microseconds without rounding. + """ + class LogEntry(Document): + date = ComplexDateTimeField() + + LogEntry.drop_collection() + + d1 = datetime.datetime(1950, 1, 1, 0, 0, 1, 999) + log = LogEntry() + log.date = d1 + log.save() + + log1 = LogEntry.objects.get(date=d1) + self.assertEqual(log, log1) + + # create extra 59 log entries for a total of 60 + for i in range(1951, 2010): + d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) + LogEntry(date=d).save() + + self.assertEqual(LogEntry.objects.count(), 60) + + # Test ordering + logs = LogEntry.objects.order_by("date") + i = 0 + while i < 59: + self.assertTrue(logs[i].date <= logs[i + 1].date) + i += 1 + + logs = LogEntry.objects.order_by("-date") + i = 0 + while i < 59: + self.assertTrue(logs[i].date >= logs[i + 1].date) + i += 1 + + # Test searching + logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) + self.assertEqual(logs.count(), 30) + + logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) + self.assertEqual(logs.count(), 30) + + logs = LogEntry.objects.filter( + date__lte=datetime.datetime(2011, 1, 1), + date__gte=datetime.datetime(2000, 1, 1), + ) + self.assertEqual(logs.count(), 10) + + LogEntry.drop_collection() + + # Test microsecond-level ordering/filtering + for microsecond in (99, 999, 9999, 10000): + LogEntry( + date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond) + ).save() + + logs = list(LogEntry.objects.order_by('date')) + for next_idx, log in enumerate(logs[:-1], start=1): + next_log = logs[next_idx] + self.assertTrue(log.date < next_log.date) + + logs = list(LogEntry.objects.order_by('-date')) + for next_idx, log in enumerate(logs[:-1], start=1): + next_log = logs[next_idx] + self.assertTrue(log.date > next_log.date) + + logs = LogEntry.objects.filter( + date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)) + self.assertEqual(logs.count(), 4) + + def test_no_default_value(self): + class Log(Document): + timestamp = ComplexDateTimeField() + + Log.drop_collection() + + log = Log() + self.assertIsNone(log.timestamp) + log.save() + + fetched_log = Log.objects.with_id(log.id) + self.assertIsNone(fetched_log.timestamp) + + def test_default_static_value(self): + NOW = datetime.datetime.utcnow() + class Log(Document): + timestamp = ComplexDateTimeField(default=NOW) + + Log.drop_collection() + + log = Log() + self.assertEqual(log.timestamp, NOW) + log.save() + + fetched_log = Log.objects.with_id(log.id) + self.assertEqual(fetched_log.timestamp, NOW) + + def test_default_callable(self): + NOW = datetime.datetime.utcnow() + + class Log(Document): + timestamp = ComplexDateTimeField(default=datetime.datetime.utcnow) + + Log.drop_collection() + + log = Log() + self.assertGreaterEqual(log.timestamp, NOW) + log.save() + + fetched_log = Log.objects.with_id(log.id) + self.assertGreaterEqual(fetched_log.timestamp, NOW) + + if __name__ == '__main__': unittest.main() diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index b266a5e5..213e889c 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -18,15 +18,13 @@ try: except ImportError: HAS_PIL = False +from tests.utils import MongoDBTestCase + TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') -class FileTest(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() +class FileTest(MongoDBTestCase): def tearDown(self): self.db.drop_collection('fs.files') @@ -55,8 +53,8 @@ class FileTest(unittest.TestCase): putfile.save() result = PutFile.objects.first() - self.assertTrue(putfile == result) - self.assertEqual("%s" % result.the_file, "") + self.assertEqual(putfile, result) + self.assertEqual("%s" % result.the_file, "" % result.the_file.grid_id) self.assertEqual(result.the_file.read(), text) self.assertEqual(result.the_file.content_type, content_type) result.the_file.delete() # Remove file from GridFS @@ -73,7 +71,7 @@ class FileTest(unittest.TestCase): putfile.save() result = PutFile.objects.first() - self.assertTrue(putfile == result) + self.assertEqual(putfile, result) self.assertEqual(result.the_file.read(), text) self.assertEqual(result.the_file.content_type, content_type) result.the_file.delete() @@ -98,7 +96,7 @@ class FileTest(unittest.TestCase): streamfile.save() result = StreamFile.objects.first() - self.assertTrue(streamfile == result) + self.assertEqual(streamfile, result) self.assertEqual(result.the_file.read(), text + more_text) self.assertEqual(result.the_file.content_type, content_type) result.the_file.seek(0) @@ -134,7 +132,7 @@ class FileTest(unittest.TestCase): streamfile.save() result = StreamFile.objects.first() - self.assertTrue(streamfile == result) + self.assertEqual(streamfile, result) self.assertEqual(result.the_file.read(), text + more_text) # self.assertEqual(result.the_file.content_type, content_type) result.the_file.seek(0) @@ -163,7 +161,7 @@ class FileTest(unittest.TestCase): setfile.save() result = SetFile.objects.first() - self.assertTrue(setfile == result) + self.assertEqual(setfile, result) self.assertEqual(result.the_file.read(), text) # Try replacing file with new one @@ -171,7 +169,7 @@ class FileTest(unittest.TestCase): result.save() result = SetFile.objects.first() - self.assertTrue(setfile == result) + self.assertEqual(setfile, result) self.assertEqual(result.the_file.read(), more_text) result.the_file.delete() @@ -233,8 +231,8 @@ class FileTest(unittest.TestCase): test_file_dupe = TestFile() data = test_file_dupe.the_file.read() # Should be None - self.assertTrue(test_file.name != test_file_dupe.name) - self.assertTrue(test_file.the_file.read() != data) + self.assertNotEqual(test_file.name, test_file_dupe.name) + self.assertNotEqual(test_file.the_file.read(), data) TestFile.drop_collection() @@ -293,7 +291,7 @@ class FileTest(unittest.TestCase): the_file = FileField() test_file = TestFile() - self.assertFalse(test_file.the_file in [{"test": 1}]) + self.assertNotIn(test_file.the_file, [{"test": 1}]) def test_file_disk_space(self): """ Test disk space usage when we delete/replace a file """ diff --git a/tests/fields/geo.py b/tests/fields/geo.py index 1c5bccc0..754f4203 100644 --- a/tests/fields/geo.py +++ b/tests/fields/geo.py @@ -298,9 +298,9 @@ class GeoFieldTest(unittest.TestCase): polygon = PolygonField() geo_indicies = Event._geo_indices() - self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies) - self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies) - self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies) + self.assertIn({'fields': [('line', '2dsphere')]}, geo_indicies) + self.assertIn({'fields': [('polygon', '2dsphere')]}, geo_indicies) + self.assertIn({'fields': [('point', '2dsphere')]}, geo_indicies) def test_indexes_2dsphere_embedded(self): """Ensure that indexes are created automatically for GeoPointFields. @@ -316,9 +316,9 @@ class GeoFieldTest(unittest.TestCase): venue = EmbeddedDocumentField(Venue) geo_indicies = Event._geo_indices() - self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies) - self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies) - self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies) + self.assertIn({'fields': [('venue.line', '2dsphere')]}, geo_indicies) + self.assertIn({'fields': [('venue.polygon', '2dsphere')]}, geo_indicies) + self.assertIn({'fields': [('venue.point', '2dsphere')]}, geo_indicies) def test_geo_indexes_recursion(self): @@ -335,9 +335,9 @@ class GeoFieldTest(unittest.TestCase): Parent(name='Berlin').save() info = Parent._get_collection().index_information() - self.assertFalse('location_2d' in info) + self.assertNotIn('location_2d', info) info = Location._get_collection().index_information() - self.assertTrue('location_2d' in info) + self.assertIn('location_2d', info) self.assertEqual(len(Parent._geo_indices()), 0) self.assertEqual(len(Location._geo_indices()), 1) diff --git a/tests/queryset/__init__.py b/tests/queryset/__init__.py index c36b2684..31016966 100644 --- a/tests/queryset/__init__.py +++ b/tests/queryset/__init__.py @@ -1,6 +1,6 @@ -from transform import * -from field_list import * -from queryset import * -from visitor import * -from geo import * -from modify import * \ No newline at end of file +from .transform import * +from .field_list import * +from .queryset import * +from .visitor import * +from .geo import * +from .modify import * diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py index 76d5f779..b111238a 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/field_list.py @@ -141,6 +141,16 @@ class OnlyExcludeAllTest(unittest.TestCase): self.assertEqual(qs._loaded_fields.as_dict(), {'b': {'$slice': 5}}) + def test_mix_slice_with_other_fields(self): + class MyDoc(Document): + a = ListField() + b = ListField() + c = ListField() + + qs = MyDoc.objects.fields(a=1, b=0, slice__c=2) + self.assertEqual(qs._loaded_fields.as_dict(), + {'c': {'$slice': 2}, 'a': 1}) + def test_only(self): """Ensure that QuerySet.only only returns the requested fields. """ @@ -171,7 +181,7 @@ class OnlyExcludeAllTest(unittest.TestCase): employee.save() obj = self.Person.objects(id=employee.id).only('age').get() - self.assertTrue(isinstance(obj, Employee)) + self.assertIsInstance(obj, Employee) # Check field names are looked up properly obj = Employee.objects(id=employee.id).only('salary').get() @@ -187,14 +197,18 @@ class OnlyExcludeAllTest(unittest.TestCase): title = StringField() text = StringField() + class VariousData(EmbeddedDocument): + some = BooleanField() + class BlogPost(Document): content = StringField() author = EmbeddedDocumentField(User) comments = ListField(EmbeddedDocumentField(Comment)) + various = MapField(field=EmbeddedDocumentField(VariousData)) BlogPost.drop_collection() - post = BlogPost(content='Had a good coffee today...') + post = BlogPost(content='Had a good coffee today...', various={'test_dynamic':{'some': True}}) post.author = User(name='Test User') post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] post.save() @@ -205,6 +219,9 @@ class OnlyExcludeAllTest(unittest.TestCase): self.assertEqual(obj.author.name, 'Test User') self.assertEqual(obj.comments, []) + obj = BlogPost.objects.only('various.test_dynamic.some').get() + self.assertEqual(obj.various["test_dynamic"].some, True) + obj = BlogPost.objects.only('content', 'comments.title',).get() self.assertEqual(obj.content, 'Had a good coffee today...') self.assertEqual(obj.author, None) diff --git a/tests/queryset/geo.py b/tests/queryset/geo.py index d10c51cd..fea225b2 100644 --- a/tests/queryset/geo.py +++ b/tests/queryset/geo.py @@ -1,105 +1,139 @@ -from datetime import datetime, timedelta +import datetime import unittest -from pymongo.errors import OperationFailure from mongoengine import * -from mongoengine.connection import get_connection -from nose.plugins.skip import SkipTest + +from tests.utils import MongoDBTestCase, needs_mongodb_v3 __all__ = ("GeoQueriesTest",) -class GeoQueriesTest(unittest.TestCase): +class GeoQueriesTest(MongoDBTestCase): - def setUp(self): - connect(db='mongoenginetest') - - def test_geospatial_operators(self): - """Ensure that geospatial queries are working. - """ + def _create_event_data(self, point_field_class=GeoPointField): + """Create some sample data re-used in many of the tests below.""" class Event(Document): title = StringField() date = DateTimeField() - location = GeoPointField() + location = point_field_class() def __unicode__(self): return self.title + self.Event = Event + Event.drop_collection() - event1 = Event(title="Coltrane Motion @ Double Door", - date=datetime.now() - timedelta(days=1), - location=[-87.677137, 41.909889]).save() - event2 = Event(title="Coltrane Motion @ Bottom of the Hill", - date=datetime.now() - timedelta(days=10), - location=[-122.4194155, 37.7749295]).save() - event3 = Event(title="Coltrane Motion @ Empty Bottle", - date=datetime.now(), - location=[-87.686638, 41.900474]).save() + event1 = Event.objects.create( + title="Coltrane Motion @ Double Door", + date=datetime.datetime.now() - datetime.timedelta(days=1), + location=[-87.677137, 41.909889]) + event2 = Event.objects.create( + title="Coltrane Motion @ Bottom of the Hill", + date=datetime.datetime.now() - datetime.timedelta(days=10), + location=[-122.4194155, 37.7749295]) + event3 = Event.objects.create( + title="Coltrane Motion @ Empty Bottle", + date=datetime.datetime.now(), + location=[-87.686638, 41.900474]) + + return event1, event2, event3 + + def test_near(self): + """Make sure the "near" operator works.""" + event1, event2, event3 = self._create_event_data() # find all events "near" pitchfork office, chicago. # note that "near" will show the san francisco event, too, # although it sorts to last. - events = Event.objects(location__near=[-87.67892, 41.9120459]) + events = self.Event.objects(location__near=[-87.67892, 41.9120459]) self.assertEqual(events.count(), 3) self.assertEqual(list(events), [event1, event3, event2]) - # find events within 5 degrees of pitchfork office, chicago - point_and_distance = [[-87.67892, 41.9120459], 5] - events = Event.objects(location__within_distance=point_and_distance) - self.assertEqual(events.count(), 2) - events = list(events) - self.assertTrue(event2 not in events) - self.assertTrue(event1 in events) - self.assertTrue(event3 in events) - # ensure ordering is respected by "near" - events = Event.objects(location__near=[-87.67892, 41.9120459]) + events = self.Event.objects(location__near=[-87.67892, 41.9120459]) events = events.order_by("-date") self.assertEqual(events.count(), 3) self.assertEqual(list(events), [event3, event1, event2]) + def test_near_and_max_distance(self): + """Ensure the "max_distance" operator works alongside the "near" + operator. + """ + event1, event2, event3 = self._create_event_data() + # find events within 10 degrees of san francisco point = [-122.415579, 37.7566023] - events = Event.objects(location__near=point, location__max_distance=10) + events = self.Event.objects(location__near=point, + location__max_distance=10) self.assertEqual(events.count(), 1) self.assertEqual(events[0], event2) + # $minDistance was added in MongoDB v2.6, but continued being buggy + # until v3.0; skip for older versions + @needs_mongodb_v3 + def test_near_and_min_distance(self): + """Ensure the "min_distance" operator works alongside the "near" + operator. + """ + event1, event2, event3 = self._create_event_data() + # find events at least 10 degrees away of san francisco point = [-122.415579, 37.7566023] - events = Event.objects(location__near=point, location__min_distance=10) - # The following real test passes on MongoDB 3 but minDistance seems - # buggy on older MongoDB versions - if get_connection().server_info()['versionArray'][0] > 2: - self.assertEqual(events.count(), 2) - else: - self.assertTrue(events.count() >= 2) + events = self.Event.objects(location__near=point, + location__min_distance=10) + self.assertEqual(events.count(), 2) + + def test_within_distance(self): + """Make sure the "within_distance" operator works.""" + event1, event2, event3 = self._create_event_data() + + # find events within 5 degrees of pitchfork office, chicago + point_and_distance = [[-87.67892, 41.9120459], 5] + events = self.Event.objects( + location__within_distance=point_and_distance) + self.assertEqual(events.count(), 2) + events = list(events) + self.assertNotIn(event2, events) + self.assertIn(event1, events) + self.assertIn(event3, events) # find events within 10 degrees of san francisco point_and_distance = [[-122.415579, 37.7566023], 10] - events = Event.objects(location__within_distance=point_and_distance) + events = self.Event.objects( + location__within_distance=point_and_distance) self.assertEqual(events.count(), 1) self.assertEqual(events[0], event2) # find events within 1 degree of greenpoint, broolyn, nyc, ny point_and_distance = [[-73.9509714, 40.7237134], 1] - events = Event.objects(location__within_distance=point_and_distance) + events = self.Event.objects( + location__within_distance=point_and_distance) self.assertEqual(events.count(), 0) # ensure ordering is respected by "within_distance" point_and_distance = [[-87.67892, 41.9120459], 10] - events = Event.objects(location__within_distance=point_and_distance) + events = self.Event.objects( + location__within_distance=point_and_distance) events = events.order_by("-date") self.assertEqual(events.count(), 2) self.assertEqual(events[0], event3) + def test_within_box(self): + """Ensure the "within_box" operator works.""" + event1, event2, event3 = self._create_event_data() + # check that within_box works box = [(-125.0, 35.0), (-100.0, 40.0)] - events = Event.objects(location__within_box=box) + events = self.Event.objects(location__within_box=box) self.assertEqual(events.count(), 1) self.assertEqual(events[0].id, event2.id) + def test_within_polygon(self): + """Ensure the "within_polygon" operator works.""" + event1, event2, event3 = self._create_event_data() + polygon = [ (-87.694445, 41.912114), (-87.69084, 41.919395), @@ -107,7 +141,7 @@ class GeoQueriesTest(unittest.TestCase): (-87.654276, 41.911731), (-87.656164, 41.898061), ] - events = Event.objects(location__within_polygon=polygon) + events = self.Event.objects(location__within_polygon=polygon) self.assertEqual(events.count(), 1) self.assertEqual(events[0].id, event1.id) @@ -116,13 +150,151 @@ class GeoQueriesTest(unittest.TestCase): (-1.225891, 52.792797), (-4.40094, 53.389881) ] - events = Event.objects(location__within_polygon=polygon2) + events = self.Event.objects(location__within_polygon=polygon2) self.assertEqual(events.count(), 0) - def test_geo_spatial_embedded(self): + def test_2dsphere_near(self): + """Make sure the "near" operator works with a PointField, which + corresponds to a 2dsphere index. + """ + event1, event2, event3 = self._create_event_data( + point_field_class=PointField + ) + # find all events "near" pitchfork office, chicago. + # note that "near" will show the san francisco event, too, + # although it sorts to last. + events = self.Event.objects(location__near=[-87.67892, 41.9120459]) + self.assertEqual(events.count(), 3) + self.assertEqual(list(events), [event1, event3, event2]) + + # ensure ordering is respected by "near" + events = self.Event.objects(location__near=[-87.67892, 41.9120459]) + events = events.order_by("-date") + self.assertEqual(events.count(), 3) + self.assertEqual(list(events), [event3, event1, event2]) + + def test_2dsphere_near_and_max_distance(self): + """Ensure the "max_distance" operator works alongside the "near" + operator with a 2dsphere index. + """ + event1, event2, event3 = self._create_event_data( + point_field_class=PointField + ) + + # find events within 10km of san francisco + point = [-122.415579, 37.7566023] + events = self.Event.objects(location__near=point, + location__max_distance=10000) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0], event2) + + # find events within 1km of greenpoint, broolyn, nyc, ny + events = self.Event.objects(location__near=[-73.9509714, 40.7237134], + location__max_distance=1000) + self.assertEqual(events.count(), 0) + + # ensure ordering is respected by "near" + events = self.Event.objects( + location__near=[-87.67892, 41.9120459], + location__max_distance=10000 + ).order_by("-date") + self.assertEqual(events.count(), 2) + self.assertEqual(events[0], event3) + + def test_2dsphere_geo_within_box(self): + """Ensure the "geo_within_box" operator works with a 2dsphere + index. + """ + event1, event2, event3 = self._create_event_data( + point_field_class=PointField + ) + + # check that within_box works + box = [(-125.0, 35.0), (-100.0, 40.0)] + events = self.Event.objects(location__geo_within_box=box) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0].id, event2.id) + + def test_2dsphere_geo_within_polygon(self): + """Ensure the "geo_within_polygon" operator works with a + 2dsphere index. + """ + event1, event2, event3 = self._create_event_data( + point_field_class=PointField + ) + + polygon = [ + (-87.694445, 41.912114), + (-87.69084, 41.919395), + (-87.681742, 41.927186), + (-87.654276, 41.911731), + (-87.656164, 41.898061), + ] + events = self.Event.objects(location__geo_within_polygon=polygon) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0].id, event1.id) + + polygon2 = [ + (-1.742249, 54.033586), + (-1.225891, 52.792797), + (-4.40094, 53.389881) + ] + events = self.Event.objects(location__geo_within_polygon=polygon2) + self.assertEqual(events.count(), 0) + + # $minDistance was added in MongoDB v2.6, but continued being buggy + # until v3.0; skip for older versions + @needs_mongodb_v3 + def test_2dsphere_near_and_min_max_distance(self): + """Ensure "min_distace" and "max_distance" operators work well + together with the "near" operator in a 2dsphere index. + """ + event1, event2, event3 = self._create_event_data( + point_field_class=PointField + ) + + # ensure min_distance and max_distance combine well + events = self.Event.objects( + location__near=[-87.67892, 41.9120459], + location__min_distance=1000, + location__max_distance=10000 + ).order_by("-date") + self.assertEqual(events.count(), 1) + self.assertEqual(events[0], event3) + + # ensure ordering is respected by "near" with "min_distance" + events = self.Event.objects( + location__near=[-87.67892, 41.9120459], + location__min_distance=10000 + ).order_by("-date") + self.assertEqual(events.count(), 1) + self.assertEqual(events[0], event2) + + def test_2dsphere_geo_within_center(self): + """Make sure the "geo_within_center" operator works with a + 2dsphere index. + """ + event1, event2, event3 = self._create_event_data( + point_field_class=PointField + ) + + # find events within 5 degrees of pitchfork office, chicago + point_and_distance = [[-87.67892, 41.9120459], 2] + events = self.Event.objects( + location__geo_within_center=point_and_distance) + self.assertEqual(events.count(), 2) + events = list(events) + self.assertNotIn(event2, events) + self.assertIn(event1, events) + self.assertIn(event3, events) + + def _test_embedded(self, point_field_class): + """Helper test method ensuring given point field class works + well in an embedded document. + """ class Venue(EmbeddedDocument): - location = GeoPointField() + location = point_field_class() name = StringField() class Event(Document): @@ -148,16 +320,18 @@ class GeoQueriesTest(unittest.TestCase): self.assertEqual(events.count(), 3) self.assertEqual(list(events), [event1, event3, event2]) - def test_spherical_geospatial_operators(self): - """Ensure that spherical geospatial queries are working - """ - # Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039 - connection = get_connection() - info = connection.test.command('buildInfo') - mongodb_version = tuple([int(i) for i in info['version'].split('.')]) - if mongodb_version < (2, 6, 4): - raise SkipTest("Need MongoDB version 2.6.4+") + def test_geo_spatial_embedded(self): + """Make sure GeoPointField works properly in an embedded document.""" + self._test_embedded(point_field_class=GeoPointField) + def test_2dsphere_point_embedded(self): + """Make sure PointField works properly in an embedded document.""" + self._test_embedded(point_field_class=PointField) + + # Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039 + @needs_mongodb_v3 + def test_spherical_geospatial_operators(self): + """Ensure that spherical geospatial queries are working.""" class Point(Document): location = GeoPointField() @@ -177,7 +351,10 @@ class GeoQueriesTest(unittest.TestCase): # Same behavior for _within_spherical_distance points = Point.objects( - location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius] + location__within_spherical_distance=[ + [-122, 37.5], + 60 / earth_radius + ] ) self.assertEqual(points.count(), 2) @@ -194,14 +371,9 @@ class GeoQueriesTest(unittest.TestCase): # Test query works with min_distance, being farer from one point points = Point.objects(location__near_sphere=[-122, 37.8], location__min_distance=60 / earth_radius) - # The following real test passes on MongoDB 3 but minDistance seems - # buggy on older MongoDB versions - if get_connection().server_info()['versionArray'][0] > 2: - self.assertEqual(points.count(), 1) - far_point = points.first() - self.assertNotEqual(close_point, far_point) - else: - self.assertTrue(points.count() >= 1) + self.assertEqual(points.count(), 1) + far_point = points.first() + self.assertNotEqual(close_point, far_point) # Finds both points, but orders the north point first because it's # closer to the reference point to the north. @@ -220,141 +392,15 @@ class GeoQueriesTest(unittest.TestCase): # Finds only one point because only the first point is within 60km of # the reference point to the south. points = Point.objects( - location__within_spherical_distance=[[-122, 36.5], 60/earth_radius]) + location__within_spherical_distance=[ + [-122, 36.5], + 60 / earth_radius + ] + ) self.assertEqual(points.count(), 1) self.assertEqual(points[0].id, south_point.id) - def test_2dsphere_point(self): - - class Event(Document): - title = StringField() - date = DateTimeField() - location = PointField() - - def __unicode__(self): - return self.title - - Event.drop_collection() - - event1 = Event(title="Coltrane Motion @ Double Door", - date=datetime.now() - timedelta(days=1), - location=[-87.677137, 41.909889]) - event1.save() - event2 = Event(title="Coltrane Motion @ Bottom of the Hill", - date=datetime.now() - timedelta(days=10), - location=[-122.4194155, 37.7749295]).save() - event3 = Event(title="Coltrane Motion @ Empty Bottle", - date=datetime.now(), - location=[-87.686638, 41.900474]).save() - - # find all events "near" pitchfork office, chicago. - # note that "near" will show the san francisco event, too, - # although it sorts to last. - events = Event.objects(location__near=[-87.67892, 41.9120459]) - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event1, event3, event2]) - - # find events within 5 degrees of pitchfork office, chicago - point_and_distance = [[-87.67892, 41.9120459], 2] - events = Event.objects(location__geo_within_center=point_and_distance) - self.assertEqual(events.count(), 2) - events = list(events) - self.assertTrue(event2 not in events) - self.assertTrue(event1 in events) - self.assertTrue(event3 in events) - - # ensure ordering is respected by "near" - events = Event.objects(location__near=[-87.67892, 41.9120459]) - events = events.order_by("-date") - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event3, event1, event2]) - - # find events within 10km of san francisco - point = [-122.415579, 37.7566023] - events = Event.objects(location__near=point, location__max_distance=10000) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) - - # find events within 1km of greenpoint, broolyn, nyc, ny - events = Event.objects(location__near=[-73.9509714, 40.7237134], location__max_distance=1000) - self.assertEqual(events.count(), 0) - - # ensure ordering is respected by "near" - events = Event.objects(location__near=[-87.67892, 41.9120459], - location__max_distance=10000).order_by("-date") - self.assertEqual(events.count(), 2) - self.assertEqual(events[0], event3) - - # ensure min_distance and max_distance combine well - events = Event.objects(location__near=[-87.67892, 41.9120459], - location__min_distance=1000, - location__max_distance=10000).order_by("-date") - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event3) - - # ensure ordering is respected by "near" - events = Event.objects(location__near=[-87.67892, 41.9120459], - # location__min_distance=10000 - location__min_distance=10000).order_by("-date") - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) - - # check that within_box works - box = [(-125.0, 35.0), (-100.0, 40.0)] - events = Event.objects(location__geo_within_box=box) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event2.id) - - polygon = [ - (-87.694445, 41.912114), - (-87.69084, 41.919395), - (-87.681742, 41.927186), - (-87.654276, 41.911731), - (-87.656164, 41.898061), - ] - events = Event.objects(location__geo_within_polygon=polygon) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event1.id) - - polygon2 = [ - (-1.742249, 54.033586), - (-1.225891, 52.792797), - (-4.40094, 53.389881) - ] - events = Event.objects(location__geo_within_polygon=polygon2) - self.assertEqual(events.count(), 0) - - def test_2dsphere_point_embedded(self): - - class Venue(EmbeddedDocument): - location = GeoPointField() - name = StringField() - - class Event(Document): - title = StringField() - venue = EmbeddedDocumentField(Venue) - - Event.drop_collection() - - venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889]) - venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295]) - - event1 = Event(title="Coltrane Motion @ Double Door", - venue=venue1).save() - event2 = Event(title="Coltrane Motion @ Bottom of the Hill", - venue=venue2).save() - event3 = Event(title="Coltrane Motion @ Empty Bottle", - venue=venue1).save() - - # find all events "near" pitchfork office, chicago. - # note that "near" will show the san francisco event, too, - # although it sorts to last. - events = Event.objects(venue__location__near=[-87.67892, 41.9120459]) - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event1, event3, event2]) - def test_linestring(self): - class Road(Document): name = StringField() line = LineStringField() @@ -410,7 +456,6 @@ class GeoQueriesTest(unittest.TestCase): self.assertEqual(1, roads) def test_polygon(self): - class Road(Document): name = StringField() poly = PolygonField() @@ -465,6 +510,24 @@ class GeoQueriesTest(unittest.TestCase): roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count() self.assertEqual(1, roads) + def test_aspymongo_with_only(self): + """Ensure as_pymongo works with only""" + class Place(Document): + location = PointField() + + Place.drop_collection() + p = Place(location=[24.946861267089844, 60.16311983618494]) + p.save() + qs = Place.objects().only('location') + self.assertDictEqual( + qs.as_pymongo()[0]['location'], + {u'type': u'Point', + u'coordinates': [ + 24.946861267089844, + 60.16311983618494] + } + ) + def test_2dsphere_point_sets_correctly(self): class Location(Document): loc = PointField() @@ -507,5 +570,6 @@ class GeoQueriesTest(unittest.TestCase): loc = Location.objects.as_pymongo()[0] self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]}) + if __name__ == '__main__': unittest.main() diff --git a/tests/queryset/modify.py b/tests/queryset/modify.py index 607937f6..b37f9b73 100644 --- a/tests/queryset/modify.py +++ b/tests/queryset/modify.py @@ -1,6 +1,8 @@ import unittest -from mongoengine import connect, Document, IntField +from mongoengine import connect, Document, IntField, StringField, ListField + +from tests.utils import needs_mongodb_v26 __all__ = ("FindAndModifyTest",) @@ -94,6 +96,37 @@ class FindAndModifyTest(unittest.TestCase): self.assertEqual(old_doc.to_mongo(), {"_id": 1}) self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) + @needs_mongodb_v26 + def test_modify_with_push(self): + class BlogPost(Document): + tags = ListField(StringField()) + + BlogPost.drop_collection() + + blog = BlogPost.objects.create() + + # Push a new tag via modify with new=False (default). + BlogPost(id=blog.id).modify(push__tags='code') + self.assertEqual(blog.tags, []) + blog.reload() + self.assertEqual(blog.tags, ['code']) + + # Push a new tag via modify with new=True. + blog = BlogPost.objects(id=blog.id).modify(push__tags='java', new=True) + self.assertEqual(blog.tags, ['code', 'java']) + + # Push a new tag with a positional argument. + blog = BlogPost.objects(id=blog.id).modify( + push__tags__0='python', + new=True) + self.assertEqual(blog.tags, ['python', 'code', 'java']) + + # Push multiple new tags with a positional argument. + blog = BlogPost.objects(id=blog.id).modify( + push__tags__1=['go', 'rust'], + new=True) + self.assertEqual(blog.tags, ['python', 'go', 'rust', 'code', 'java']) + if __name__ == '__main__': unittest.main() diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index e4c71de7..0f2364f7 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3,12 +3,14 @@ import datetime import unittest import uuid +from decimal import Decimal from bson import DBRef, ObjectId from nose.plugins.skip import SkipTest import pymongo from pymongo.errors import ConfigurationError from pymongo.read_preferences import ReadPreference +from pymongo.results import UpdateResult import six from mongoengine import * @@ -19,6 +21,9 @@ from mongoengine.python_support import IS_PYMONGO_3 from mongoengine.queryset import (DoesNotExist, MultipleObjectsReturned, QuerySet, QuerySetManager, queryset_manager) +from tests.utils import needs_mongodb_v26, skip_pymongo3 + + __all__ = ("QuerySetTest",) @@ -32,37 +37,6 @@ class db_ops_tracker(query_counter): return list(self.db.system.profile.find(ignore_query)) -def skip_older_mongodb(f): - def _inner(*args, **kwargs): - connection = get_connection() - info = connection.test.command('buildInfo') - mongodb_version = tuple([int(i) for i in info['version'].split('.')]) - - if mongodb_version < (2, 6): - raise SkipTest("Need MongoDB version 2.6+") - - return f(*args, **kwargs) - - _inner.__name__ = f.__name__ - _inner.__doc__ = f.__doc__ - - return _inner - - -def skip_pymongo3(f): - def _inner(*args, **kwargs): - - if IS_PYMONGO_3: - raise SkipTest("Useless with PyMongo 3+") - - return f(*args, **kwargs) - - _inner.__name__ = f.__name__ - _inner.__doc__ = f.__doc__ - - return _inner - - class QuerySetTest(unittest.TestCase): def setUp(self): @@ -85,11 +59,10 @@ class QuerySetTest(unittest.TestCase): def test_initialisation(self): """Ensure that a QuerySet is correctly initialised by QuerySetManager. """ - self.assertTrue(isinstance(self.Person.objects, QuerySet)) + self.assertIsInstance(self.Person.objects, QuerySet) self.assertEqual(self.Person.objects._collection.name, self.Person._get_collection_name()) - self.assertTrue(isinstance(self.Person.objects._collection, - pymongo.collection.Collection)) + self.assertIsInstance(self.Person.objects._collection, pymongo.collection.Collection) def test_cannot_perform_joins_references(self): @@ -106,58 +79,116 @@ class QuerySetTest(unittest.TestCase): list(BlogPost.objects(author2__name="test")) def test_find(self): - """Ensure that a query returns a valid set of results. - """ - self.Person(name="User A", age=20).save() - self.Person(name="User B", age=30).save() + """Ensure that a query returns a valid set of results.""" + user_a = self.Person.objects.create(name='User A', age=20) + user_b = self.Person.objects.create(name='User B', age=30) # Find all people in the collection people = self.Person.objects self.assertEqual(people.count(), 2) results = list(people) - self.assertTrue(isinstance(results[0], self.Person)) - self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode))) - self.assertEqual(results[0].name, "User A") + + self.assertIsInstance(results[0], self.Person) + self.assertIsInstance(results[0].id, (ObjectId, str, unicode)) + + self.assertEqual(results[0], user_a) + self.assertEqual(results[0].name, 'User A') self.assertEqual(results[0].age, 20) - self.assertEqual(results[1].name, "User B") + + self.assertEqual(results[1], user_b) + self.assertEqual(results[1].name, 'User B') self.assertEqual(results[1].age, 30) - # Use a query to filter the people found to just person1 + # Filter people by age people = self.Person.objects(age=20) self.assertEqual(people.count(), 1) person = people.next() + self.assertEqual(person, user_a) self.assertEqual(person.name, "User A") self.assertEqual(person.age, 20) - # Test limit + def test_limit(self): + """Ensure that QuerySet.limit works as expected.""" + user_a = self.Person.objects.create(name='User A', age=20) + user_b = self.Person.objects.create(name='User B', age=30) + + # Test limit on a new queryset people = list(self.Person.objects.limit(1)) self.assertEqual(len(people), 1) - self.assertEqual(people[0].name, 'User A') + self.assertEqual(people[0], user_a) - # Test skip + # Test limit on an existing queryset + people = self.Person.objects + self.assertEqual(len(people), 2) + people2 = people.limit(1) + self.assertEqual(len(people), 2) + self.assertEqual(len(people2), 1) + self.assertEqual(people2[0], user_a) + + # Test limit with 0 as parameter + people = self.Person.objects.limit(0) + self.assertEqual(people.count(with_limit_and_skip=True), 2) + self.assertEqual(len(people), 2) + + # Test chaining of only after limit + person = self.Person.objects().limit(1).only('name').first() + self.assertEqual(person, user_a) + self.assertEqual(person.name, 'User A') + self.assertEqual(person.age, None) + + def test_skip(self): + """Ensure that QuerySet.skip works as expected.""" + user_a = self.Person.objects.create(name='User A', age=20) + user_b = self.Person.objects.create(name='User B', age=30) + + # Test skip on a new queryset people = list(self.Person.objects.skip(1)) self.assertEqual(len(people), 1) - self.assertEqual(people[0].name, 'User B') + self.assertEqual(people[0], user_b) - person3 = self.Person(name="User C", age=40) - person3.save() + # Test skip on an existing queryset + people = self.Person.objects + self.assertEqual(len(people), 2) + people2 = people.skip(1) + self.assertEqual(len(people), 2) + self.assertEqual(len(people2), 1) + self.assertEqual(people2[0], user_b) + + # Test chaining of only after skip + person = self.Person.objects().skip(1).only('name').first() + self.assertEqual(person, user_b) + self.assertEqual(person.name, 'User B') + self.assertEqual(person.age, None) + + def test_slice(self): + """Ensure slicing a queryset works as expected.""" + user_a = self.Person.objects.create(name='User A', age=20) + user_b = self.Person.objects.create(name='User B', age=30) + user_c = self.Person.objects.create(name="User C", age=40) # Test slice limit people = list(self.Person.objects[:2]) self.assertEqual(len(people), 2) - self.assertEqual(people[0].name, 'User A') - self.assertEqual(people[1].name, 'User B') + self.assertEqual(people[0], user_a) + self.assertEqual(people[1], user_b) # Test slice skip people = list(self.Person.objects[1:]) self.assertEqual(len(people), 2) - self.assertEqual(people[0].name, 'User B') - self.assertEqual(people[1].name, 'User C') + self.assertEqual(people[0], user_b) + self.assertEqual(people[1], user_c) # Test slice limit and skip people = list(self.Person.objects[1:2]) self.assertEqual(len(people), 1) - self.assertEqual(people[0].name, 'User B') + self.assertEqual(people[0], user_b) + + # Test slice limit and skip on an existing queryset + people = self.Person.objects + self.assertEqual(len(people), 3) + people2 = people[1:2] + self.assertEqual(len(people2), 1) + self.assertEqual(people2[0], user_b) # Test slice limit and skip cursor reset qs = self.Person.objects[1:2] @@ -168,6 +199,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(len(people), 1) self.assertEqual(people[0].name, 'User B') + # Test empty slice people = list(self.Person.objects[1:1]) self.assertEqual(len(people), 0) @@ -187,12 +219,6 @@ class QuerySetTest(unittest.TestCase): self.assertEqual("[, ]", "%s" % self.Person.objects[51:53]) - # Test only after limit - self.assertEqual(self.Person.objects().limit(2).only('name')[0].age, None) - - # Test only after skip - self.assertEqual(self.Person.objects().skip(2).only('name')[0].age, None) - def test_find_one(self): """Ensure that a query using find_one returns a valid result. """ @@ -203,7 +229,7 @@ class QuerySetTest(unittest.TestCase): # Retrieve the first person from the database person = self.Person.objects.first() - self.assertTrue(isinstance(person, self.Person)) + self.assertIsInstance(person, self.Person) self.assertEqual(person.name, "User A") self.assertEqual(person.age, 20) @@ -551,16 +577,37 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(post.comments[0].by, 'joe') self.assertEqual(post.comments[0].votes.score, 4) + @needs_mongodb_v26 def test_update_min_max(self): class Scores(Document): high_score = IntField() low_score = IntField() - scores = Scores(high_score=800, low_score=200) - scores.save() + + scores = Scores.objects.create(high_score=800, low_score=200) + Scores.objects(id=scores.id).update(min__low_score=150) - self.assertEqual(Scores.objects(id=scores.id).get().low_score, 150) + self.assertEqual(Scores.objects.get(id=scores.id).low_score, 150) Scores.objects(id=scores.id).update(min__low_score=250) - self.assertEqual(Scores.objects(id=scores.id).get().low_score, 150) + self.assertEqual(Scores.objects.get(id=scores.id).low_score, 150) + + Scores.objects(id=scores.id).update(max__high_score=1000) + self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000) + Scores.objects(id=scores.id).update(max__high_score=500) + self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000) + + @needs_mongodb_v26 + def test_update_multiple(self): + class Product(Document): + item = StringField() + price = FloatField() + + product = Product.objects.create(item='ABC', price=10.99) + product = Product.objects.create(item='ABC', price=10.99) + Product.objects(id=product.id).update(mul__price=1.25) + self.assertEqual(Product.objects.get(id=product.id).price, 13.7375) + unknown_product = Product.objects.create(item='Unknown') + Product.objects(id=unknown_product.id).update(mul__price=100) + self.assertEqual(Product.objects.get(id=unknown_product.id).price, 0) def test_updates_can_have_match_operators(self): @@ -629,14 +676,14 @@ class QuerySetTest(unittest.TestCase): result = self.Person(name="Bob", age=25).update( upsert=True, full_result=True) - self.assertTrue(isinstance(result, dict)) - self.assertTrue("upserted" in result) - self.assertFalse(result["updatedExisting"]) + self.assertIsInstance(result, UpdateResult) + self.assertIn("upserted", result.raw_result) + self.assertFalse(result.raw_result["updatedExisting"]) bob = self.Person.objects.first() result = bob.update(set__age=30, full_result=True) - self.assertTrue(isinstance(result, dict)) - self.assertTrue(result["updatedExisting"]) + self.assertIsInstance(result, UpdateResult) + self.assertTrue(result.raw_result["updatedExisting"]) self.Person(name="Bob", age=20).save() result = self.Person.objects(name="Bob").update( @@ -803,11 +850,8 @@ class QuerySetTest(unittest.TestCase): blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) Blog.objects.insert(blogs, load_bulk=False) - if mongodb_version < (2, 6): - self.assertEqual(q, 1) - else: - # profiling logs each doc now in the bulk op - self.assertEqual(q, 99) + # profiling logs each doc now in the bulk op + self.assertEqual(q, 99) Blog.drop_collection() Blog.ensure_indexes() @@ -816,11 +860,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(q, 0) Blog.objects.insert(blogs) - if mongodb_version < (2, 6): - self.assertEqual(q, 2) # 1 for insert, and 1 for in bulk fetch - else: - # 99 for insert, and 1 for in bulk fetch - self.assertEqual(q, 100) + self.assertEqual(q, 100) # 99 for insert 1 for fetch Blog.drop_collection() @@ -885,12 +925,10 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(Blog.objects.count(), 2) - Blog.objects.insert([blog2, blog3], - write_concern={"w": 0, 'continue_on_error': True}) - self.assertEqual(Blog.objects.count(), 3) - def test_get_changed_fields_query_count(self): - + """Make sure we don't perform unnecessary db operations when + none of document's fields were updated. + """ class Person(Document): name = StringField() owns = ListField(ReferenceField('Organization')) @@ -898,8 +936,8 @@ class QuerySetTest(unittest.TestCase): class Organization(Document): name = StringField() - owner = ReferenceField('Person') - employees = ListField(ReferenceField('Person')) + owner = ReferenceField(Person) + employees = ListField(ReferenceField(Person)) class Project(Document): name = StringField() @@ -918,35 +956,35 @@ class QuerySetTest(unittest.TestCase): with query_counter() as q: self.assertEqual(q, 0) - fresh_o1 = Organization.objects.get(id=o1.id) - self.assertEqual(1, q) - fresh_o1._get_changed_fields() - self.assertEqual(1, q) - - with query_counter() as q: - self.assertEqual(q, 0) - - fresh_o1 = Organization.objects.get(id=o1.id) - fresh_o1.save() # No changes, does nothing - + # Fetching a document should result in a query. + org = Organization.objects.get(id=o1.id) self.assertEqual(q, 1) - with query_counter() as q: - self.assertEqual(q, 0) - - fresh_o1 = Organization.objects.get(id=o1.id) - fresh_o1.save(cascade=False) # No changes, does nothing - + # Checking changed fields of a newly fetched document should not + # result in a query. + org._get_changed_fields() self.assertEqual(q, 1) + # Saving a doc without changing any of its fields should not result + # in a query (with or without cascade=False). + org = Organization.objects.get(id=o1.id) with query_counter() as q: + org.save() self.assertEqual(q, 0) - fresh_o1 = Organization.objects.get(id=o1.id) - fresh_o1.employees.append(p2) # Dereferences - fresh_o1.save(cascade=False) # Saves + org = Organization.objects.get(id=o1.id) + with query_counter() as q: + org.save(cascade=False) + self.assertEqual(q, 0) - self.assertEqual(q, 3) + # Saving a doc after you append a reference to it should result in + # two db operations (a query for the reference and an update). + # TODO dereferencing of p2 shouldn't be necessary. + org = Organization.objects.get(id=o1.id) + with query_counter() as q: + org.employees.append(p2) # dereferences p2 + org.save() # saves the org + self.assertEqual(q, 2) @skip_pymongo3 def test_slave_okay(self): @@ -960,11 +998,11 @@ class QuerySetTest(unittest.TestCase): # Retrieve the first person from the database person = self.Person.objects.slave_okay(True).first() - self.assertTrue(isinstance(person, self.Person)) + self.assertIsInstance(person, self.Person) self.assertEqual(person.name, "User A") self.assertEqual(person.age, 20) - @skip_older_mongodb + @needs_mongodb_v26 @skip_pymongo3 def test_cursor_args(self): """Ensures the cursor args can be set as expected @@ -1027,10 +1065,10 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(docs.count(), 1000) docs_string = "%s" % docs - self.assertTrue("Doc: 0" in docs_string) + self.assertIn("Doc: 0", docs_string) self.assertEqual(docs.count(), 1000) - self.assertTrue('(remaining elements truncated)' in "%s" % docs) + self.assertIn('(remaining elements truncated)', "%s" % docs) # Limit and skip docs = docs[1:4] @@ -1169,6 +1207,14 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() Blog.drop_collection() + def test_filter_chaining_with_regex(self): + person = self.Person(name='Guido van Rossum') + person.save() + + people = self.Person.objects + people = people.filter(name__startswith='Gui').filter(name__not__endswith='tum') + self.assertEqual(people.count(), 1) + def assertSequence(self, qs, expected): qs = list(qs) expected = list(expected) @@ -1226,6 +1272,7 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() + # default ordering should be used by default with db_ops_tracker() as q: BlogPost.objects.filter(title='whatever').first() self.assertEqual(len(q.get_ops()), 1) @@ -1234,10 +1281,27 @@ class QuerySetTest(unittest.TestCase): {'published_date': -1} ) + # calling order_by() should clear the default ordering with db_ops_tracker() as q: BlogPost.objects.filter(title='whatever').order_by().first() self.assertEqual(len(q.get_ops()), 1) - self.assertFalse('$orderby' in q.get_ops()[0]['query']) + self.assertNotIn('$orderby', q.get_ops()[0]['query']) + + # calling an explicit order_by should use a specified sort + with db_ops_tracker() as q: + BlogPost.objects.filter(title='whatever').order_by('published_date').first() + self.assertEqual(len(q.get_ops()), 1) + self.assertEqual( + q.get_ops()[0]['query']['$orderby'], + {'published_date': 1} + ) + + # calling order_by() after an explicit sort should clear it + with db_ops_tracker() as q: + qs = BlogPost.objects.filter(title='whatever').order_by('published_date') + qs.order_by().first() + self.assertEqual(len(q.get_ops()), 1) + self.assertNotIn('$orderby', q.get_ops()[0]['query']) def test_no_ordering_for_get(self): """ Ensure that Doc.objects.get doesn't use any ordering. @@ -1256,17 +1320,17 @@ class QuerySetTest(unittest.TestCase): with db_ops_tracker() as q: BlogPost.objects.get(title='whatever') self.assertEqual(len(q.get_ops()), 1) - self.assertFalse('$orderby' in q.get_ops()[0]['query']) + self.assertNotIn('$orderby', q.get_ops()[0]['query']) # Ordering should be ignored for .get even if we set it explicitly with db_ops_tracker() as q: BlogPost.objects.order_by('-title').get(title='whatever') self.assertEqual(len(q.get_ops()), 1) - self.assertFalse('$orderby' in q.get_ops()[0]['query']) + self.assertNotIn('$orderby', q.get_ops()[0]['query']) def test_find_embedded(self): """Ensure that an embedded document is properly returned from - a query. + different manners of querying. """ class User(EmbeddedDocument): name = StringField() @@ -1277,15 +1341,29 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() + user = User(name='Test User') BlogPost.objects.create( - author=User(name='Test User'), + author=user, content='Had a good coffee today...' ) result = BlogPost.objects.first() - self.assertTrue(isinstance(result.author, User)) + self.assertIsInstance(result.author, User) self.assertEqual(result.author.name, 'Test User') + result = BlogPost.objects.get(author__name=user.name) + self.assertIsInstance(result.author, User) + self.assertEqual(result.author.name, 'Test User') + + result = BlogPost.objects.get(author={'name': user.name}) + self.assertIsInstance(result.author, User) + self.assertEqual(result.author.name, 'Test User') + + # Fails, since the string is not a type that is able to represent the + # author's document structure (should be dict) + with self.assertRaises(InvalidQueryError): + BlogPost.objects.get(author=user.name) + def test_find_empty_embedded(self): """Ensure that you can save and find an empty embedded document.""" class User(EmbeddedDocument): @@ -1396,7 +1474,7 @@ class QuerySetTest(unittest.TestCase): code_chunks = ['doc["cmnts"];', 'doc["doc-name"],', 'doc["cmnts"][i]["body"]'] for chunk in code_chunks: - self.assertTrue(chunk in sub_code) + self.assertIn(chunk, sub_code) results = BlogPost.objects.exec_js(code) expected_results = [ @@ -1786,21 +1864,16 @@ class QuerySetTest(unittest.TestCase): self.assertEqual( 1, BlogPost.objects(author__in=["%s" % me.pk]).count()) - def test_update(self): - """Ensure that atomic updates work properly. - """ + def test_update_intfield_operator(self): class BlogPost(Document): - name = StringField() - title = StringField() hits = IntField() - tags = ListField(StringField()) BlogPost.drop_collection() - post = BlogPost(name="Test Post", hits=5, tags=['test']) + post = BlogPost(hits=5) post.save() - BlogPost.objects.update(set__hits=10) + BlogPost.objects.update_one(set__hits=10) post.reload() self.assertEqual(post.hits, 10) @@ -1812,13 +1885,68 @@ class QuerySetTest(unittest.TestCase): post.reload() self.assertEqual(post.hits, 10) + # Negative dec operator is equal to a positive inc operator + BlogPost.objects.update_one(dec__hits=-1) + post.reload() + self.assertEqual(post.hits, 11) + + def test_update_decimalfield_operator(self): + class BlogPost(Document): + review = DecimalField() + + BlogPost.drop_collection() + + post = BlogPost(review=3.5) + post.save() + + BlogPost.objects.update_one(inc__review=0.1) # test with floats + post.reload() + self.assertEqual(float(post.review), 3.6) + + BlogPost.objects.update_one(dec__review=0.1) + post.reload() + self.assertEqual(float(post.review), 3.5) + + BlogPost.objects.update_one(inc__review=Decimal(0.12)) # test with Decimal + post.reload() + self.assertEqual(float(post.review), 3.62) + + BlogPost.objects.update_one(dec__review=Decimal(0.12)) + post.reload() + self.assertEqual(float(post.review), 3.5) + + def test_update_decimalfield_operator_not_working_with_force_string(self): + class BlogPost(Document): + review = DecimalField(force_string=True) + + BlogPost.drop_collection() + + post = BlogPost(review=3.5) + post.save() + + with self.assertRaises(OperationError): + BlogPost.objects.update_one(inc__review=0.1) # test with floats + + def test_update_listfield_operator(self): + """Ensure that atomic updates work properly. + """ + class BlogPost(Document): + tags = ListField(StringField()) + + BlogPost.drop_collection() + + post = BlogPost(tags=['test']) + post.save() + + # ListField operator BlogPost.objects.update(push__tags='mongo') post.reload() - self.assertTrue('mongo' in post.tags) + self.assertIn('mongo', post.tags) BlogPost.objects.update_one(push_all__tags=['db', 'nosql']) post.reload() - self.assertTrue('db' in post.tags and 'nosql' in post.tags) + self.assertIn('db', post.tags) + self.assertIn('nosql', post.tags) tags = post.tags[:-1] BlogPost.objects.update(pop__tags=1) @@ -1830,13 +1958,64 @@ class QuerySetTest(unittest.TestCase): post.reload() self.assertEqual(post.tags.count('unique'), 1) - self.assertNotEqual(post.hits, None) - BlogPost.objects.update_one(unset__hits=1) - post.reload() - self.assertEqual(post.hits, None) + BlogPost.drop_collection() + + def test_update_unset(self): + class BlogPost(Document): + title = StringField() BlogPost.drop_collection() + post = BlogPost(title='garbage').save() + + self.assertNotEqual(post.title, None) + BlogPost.objects.update_one(unset__title=1) + post.reload() + self.assertEqual(post.title, None) + pymongo_doc = BlogPost.objects.as_pymongo().first() + self.assertNotIn('title', pymongo_doc) + + @needs_mongodb_v26 + def test_update_push_with_position(self): + """Ensure that the 'push' update with position works properly. + """ + class BlogPost(Document): + slug = StringField() + tags = ListField(StringField()) + + BlogPost.drop_collection() + + post = BlogPost.objects.create(slug="test") + + BlogPost.objects.filter(id=post.id).update(push__tags="code") + BlogPost.objects.filter(id=post.id).update(push__tags__0=["mongodb", "python"]) + post.reload() + self.assertEqual(post.tags, ['mongodb', 'python', 'code']) + + BlogPost.objects.filter(id=post.id).update(set__tags__2="java") + post.reload() + self.assertEqual(post.tags, ['mongodb', 'python', 'java']) + + #test push with singular value + BlogPost.objects.filter(id=post.id).update(push__tags__0='scala') + post.reload() + self.assertEqual(post.tags, ['scala', 'mongodb', 'python', 'java']) + + def test_update_push_list_of_list(self): + """Ensure that the 'push' update operation works in the list of list + """ + class BlogPost(Document): + slug = StringField() + tags = ListField() + + BlogPost.drop_collection() + + post = BlogPost(slug="test").save() + + BlogPost.objects.filter(slug="test").update(push__tags=["value1", 123]) + post.reload() + self.assertEqual(post.tags, [["value1", 123]]) + def test_update_push_and_pull_add_to_set(self): """Ensure that the 'pull' update operation works correctly. """ @@ -1979,6 +2158,23 @@ class QuerySetTest(unittest.TestCase): Site.objects(id=s.id).update_one( pull_all__collaborators__helpful__user=['Ross']) + def test_pull_in_genericembedded_field(self): + + class Foo(EmbeddedDocument): + name = StringField() + + class Bar(Document): + foos = ListField(GenericEmbeddedDocumentField( + choices=[Foo, ])) + + Bar.drop_collection() + + foo = Foo(name="bar") + bar = Bar(foos=[foo]).save() + Bar.objects(id=bar.id).update(pull__foos=foo) + bar.reload() + self.assertEqual(len(bar.foos), 0) + def test_update_one_pop_generic_reference(self): class BlogTag(Document): @@ -2072,6 +2268,24 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(message.authors[1].name, "Ross") self.assertEqual(message.authors[2].name, "Adam") + def test_set_generic_embedded_documents(self): + + class Bar(EmbeddedDocument): + name = StringField() + + class User(Document): + username = StringField() + bar = GenericEmbeddedDocumentField(choices=[Bar,]) + + User.drop_collection() + + User(username='abc').save() + User.objects(username='abc').update( + set__bar=Bar(name='test'), upsert=True) + + user = User.objects(username='abc').first() + self.assertEqual(user.bar.name, "test") + def test_reload_embedded_docs_instance(self): class SubDoc(EmbeddedDocument): @@ -2241,14 +2455,19 @@ class QuerySetTest(unittest.TestCase): age = IntField() with db_ops_tracker() as q: - adult = (User.objects.filter(age__gte=18) + adult1 = (User.objects.filter(age__gte=18) .comment('looking for an adult') .first()) + + adult2 = (User.objects.comment('looking for an adult') + .filter(age__gte=18) + .first()) + ops = q.get_ops() - self.assertEqual(len(ops), 1) - op = ops[0] - self.assertEqual(op['query']['$query'], {'age': {'$gte': 18}}) - self.assertEqual(op['query']['$comment'], 'looking for an adult') + self.assertEqual(len(ops), 2) + for op in ops: + self.assertEqual(op['query']['$query'], {'age': {'$gte': 18}}) + self.assertEqual(op['query']['$comment'], 'looking for an adult') def test_map_reduce(self): """Ensure map/reduce is both mapping and reducing. @@ -3044,7 +3263,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(Foo.objects.distinct("bar"), [bar]) - @skip_older_mongodb + @needs_mongodb_v26 def test_text_indexes(self): class News(Document): title = StringField() @@ -3060,8 +3279,8 @@ class QuerySetTest(unittest.TestCase): News.drop_collection() info = News.objects._collection.index_information() - self.assertTrue('title_text_content_text' in info) - self.assertTrue('textIndexVersion' in info['title_text_content_text']) + self.assertIn('title_text_content_text', info) + self.assertIn('textIndexVersion', info['title_text_content_text']) News(title="Neymar quebrou a vertebra", content="O Brasil sofre com a perda de Neymar").save() @@ -3095,15 +3314,15 @@ class QuerySetTest(unittest.TestCase): '$search': 'dilma', '$language': 'pt'}, 'is_active': False}) - self.assertEqual(new.is_active, False) - self.assertTrue('dilma' in new.content) - self.assertTrue('planejamento' in new.title) + self.assertFalse(new.is_active) + self.assertIn('dilma', new.content) + self.assertIn('planejamento', new.title) query = News.objects.search_text("candidata") self.assertEqual(query._search_text, "candidata") new = query.first() - self.assertTrue(isinstance(new.get_text_score(), float)) + self.assertIsInstance(new.get_text_score(), float) # count query = News.objects.search_text('brasil').order_by('$text_score') @@ -3131,7 +3350,7 @@ class QuerySetTest(unittest.TestCase): 'brasil').order_by('$text_score').first() self.assertEqual(item.get_text_score(), max_text_score) - @skip_older_mongodb + @needs_mongodb_v26 def test_distinct_handles_references_to_alias(self): register_connection('testdb', 'mongoenginetest2') @@ -3398,39 +3617,12 @@ class QuerySetTest(unittest.TestCase): Group.objects(id=group.id).update(set__members=[user1, user2]) group.reload() - self.assertTrue(len(group.members) == 2) + self.assertEqual(len(group.members), 2) self.assertEqual(group.members[0].name, user1.name) self.assertEqual(group.members[1].name, user2.name) Group.drop_collection() - def test_dict_with_custom_baseclass(self): - """Ensure DictField working with custom base clases. - """ - class Test(Document): - testdict = DictField() - - Test.drop_collection() - - t = Test(testdict={'f': 'Value'}) - t.save() - - self.assertEqual( - Test.objects(testdict__f__startswith='Val').count(), 1) - self.assertEqual(Test.objects(testdict__f='Value').count(), 1) - Test.drop_collection() - - class Test(Document): - testdict = DictField(basecls=StringField) - - t = Test(testdict={'f': 'Value'}) - t.save() - - self.assertEqual(Test.objects(testdict__f='Value').count(), 1) - self.assertEqual( - Test.objects(testdict__f__startswith='Val').count(), 1) - Test.drop_collection() - def test_bulk(self): """Ensure bulk querying by object id returns a proper dict. """ @@ -3456,13 +3648,13 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(len(objects), 3) - self.assertTrue(post_1.id in objects) - self.assertTrue(post_2.id in objects) - self.assertTrue(post_5.id in objects) + self.assertIn(post_1.id, objects) + self.assertIn(post_2.id, objects) + self.assertIn(post_5.id, objects) - self.assertTrue(objects[post_1.id].title == post_1.title) - self.assertTrue(objects[post_2.id].title == post_2.title) - self.assertTrue(objects[post_5.id].title == post_5.title) + self.assertEqual(objects[post_1.id].title, post_1.title) + self.assertEqual(objects[post_2.id].title, post_2.title) + self.assertEqual(objects[post_5.id].title, post_5.title) BlogPost.drop_collection() @@ -3482,7 +3674,7 @@ class QuerySetTest(unittest.TestCase): Post.drop_collection() - self.assertTrue(isinstance(Post.objects, CustomQuerySet)) + self.assertIsInstance(Post.objects, CustomQuerySet) self.assertFalse(Post.objects.not_empty()) Post().save() @@ -3507,7 +3699,7 @@ class QuerySetTest(unittest.TestCase): Post.drop_collection() - self.assertTrue(isinstance(Post.objects, CustomQuerySet)) + self.assertIsInstance(Post.objects, CustomQuerySet) self.assertFalse(Post.objects.not_empty()) Post().save() @@ -3554,7 +3746,7 @@ class QuerySetTest(unittest.TestCase): pass Post.drop_collection() - self.assertTrue(isinstance(Post.objects, CustomQuerySet)) + self.assertIsInstance(Post.objects, CustomQuerySet) self.assertFalse(Post.objects.not_empty()) Post().save() @@ -3582,7 +3774,7 @@ class QuerySetTest(unittest.TestCase): pass Post.drop_collection() - self.assertTrue(isinstance(Post.objects, CustomQuerySet)) + self.assertIsInstance(Post.objects, CustomQuerySet) self.assertFalse(Post.objects.not_empty()) Post().save() @@ -3673,17 +3865,17 @@ class QuerySetTest(unittest.TestCase): test = Number.objects test2 = test.clone() - self.assertFalse(test == test2) + self.assertNotEqual(test, test2) self.assertEqual(test.count(), test2.count()) test = test.filter(n__gt=11) test2 = test.clone() - self.assertFalse(test == test2) + self.assertNotEqual(test, test2) self.assertEqual(test.count(), test2.count()) test = test.limit(10) test2 = test.clone() - self.assertFalse(test == test2) + self.assertNotEqual(test, test2) self.assertEqual(test.count(), test2.count()) Number.drop_collection() @@ -3773,7 +3965,7 @@ class QuerySetTest(unittest.TestCase): value.get('unique', False), value.get('sparse', False)) for key, value in info.iteritems()] - self.assertTrue(([('_cls', 1), ('message', 1)], False, False) in info) + self.assertIn(([('_cls', 1), ('message', 1)], False, False), info) def test_where(self): """Ensure that where clauses work. @@ -3797,13 +3989,13 @@ class QuerySetTest(unittest.TestCase): 'this["fielda"] >= this["fieldb"]', query._where_clause) results = list(query) self.assertEqual(2, len(results)) - self.assertTrue(a in results) - self.assertTrue(c in results) + self.assertIn(a, results) + self.assertIn(c, results) query = IntPair.objects.where('this[~fielda] == this[~fieldb]') results = list(query) self.assertEqual(1, len(results)) - self.assertTrue(a in results) + self.assertIn(a, results) query = IntPair.objects.where( 'function() { return this[~fielda] >= this[~fieldb] }') @@ -3811,8 +4003,8 @@ class QuerySetTest(unittest.TestCase): 'function() { return this["fielda"] >= this["fieldb"] }', query._where_clause) results = list(query) self.assertEqual(2, len(results)) - self.assertTrue(a in results) - self.assertTrue(c in results) + self.assertIn(a, results) + self.assertIn(c, results) with self.assertRaises(TypeError): list(IntPair.objects.where(fielda__gte=3)) @@ -3981,6 +4173,35 @@ class QuerySetTest(unittest.TestCase): plist = list(Person.objects.scalar('name', 'state')) self.assertEqual(plist, [(u'Wilson JR', s1)]) + def test_generic_reference_field_with_only_and_as_pymongo(self): + class TestPerson(Document): + name = StringField() + + class TestActivity(Document): + name = StringField() + owner = GenericReferenceField() + + TestPerson.drop_collection() + TestActivity.drop_collection() + + person = TestPerson(name='owner') + person.save() + + a1 = TestActivity(name='a1', owner=person) + a1.save() + + activity = TestActivity.objects(owner=person).scalar('id', 'owner').no_dereference().first() + self.assertEqual(activity[0], a1.pk) + self.assertEqual(activity[1]['_ref'], DBRef('test_person', person.pk)) + + activity = TestActivity.objects(owner=person).only('id', 'owner')[0] + self.assertEqual(activity.pk, a1.pk) + self.assertEqual(activity.owner, person) + + activity = TestActivity.objects(owner=person).only('id', 'owner').as_pymongo().first() + self.assertEqual(activity['_id'], a1.pk) + self.assertTrue(activity['owner']['_ref'], DBRef('test_person', person.pk)) + def test_scalar_db_field(self): class TestDoc(Document): @@ -4165,7 +4386,7 @@ class QuerySetTest(unittest.TestCase): Test.drop_collection() Test.objects(test='foo').update_one(upsert=True, set__test='foo') - self.assertFalse('_cls' in Test._collection.find_one()) + self.assertNotIn('_cls', Test._collection.find_one()) class Test(Document): meta = {'allow_inheritance': True} @@ -4174,7 +4395,7 @@ class QuerySetTest(unittest.TestCase): Test.drop_collection() Test.objects(test='foo').update_one(upsert=True, set__test='foo') - self.assertTrue('_cls' in Test._collection.find_one()) + self.assertIn('_cls', Test._collection.find_one()) def test_update_upsert_looks_like_a_digit(self): class MyDoc(DynamicDocument): @@ -4258,6 +4479,25 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED) + @needs_mongodb_v26 + def test_read_preference_aggregation_framework(self): + class Bar(Document): + txt = StringField() + + meta = { + 'indexes': ['txt'] + } + # Aggregates with read_preference + bars = Bar.objects \ + .read_preference(ReadPreference.SECONDARY_PREFERRED) \ + .aggregate() + if IS_PYMONGO_3: + self.assertEqual(bars._CommandCursor__collection.read_preference, + ReadPreference.SECONDARY_PREFERRED) + else: + self.assertNotEqual(bars._CommandCursor__collection.read_preference, + ReadPreference.SECONDARY_PREFERRED) + def test_json_simple(self): class Embedded(EmbeddedDocument): @@ -4326,41 +4566,68 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) def test_as_pymongo(self): - from decimal import Decimal + class LastLogin(EmbeddedDocument): + location = StringField() + ip = StringField() + class User(Document): id = ObjectIdField('_id') name = StringField() age = IntField() price = DecimalField() + last_login = EmbeddedDocumentField(LastLogin) User.drop_collection() - User(name="Bob Dole", age=89, price=Decimal('1.11')).save() - User(name="Barack Obama", age=51, price=Decimal('2.22')).save() + + User.objects.create(name="Bob Dole", age=89, price=Decimal('1.11')) + User.objects.create( + name="Barack Obama", + age=51, + price=Decimal('2.22'), + last_login=LastLogin( + location='White House', + ip='104.107.108.116' + ) + ) + + results = User.objects.as_pymongo() + self.assertEqual( + set(results[0].keys()), + set(['_id', 'name', 'age', 'price']) + ) + self.assertEqual( + set(results[1].keys()), + set(['_id', 'name', 'age', 'price', 'last_login']) + ) results = User.objects.only('id', 'name').as_pymongo() - self.assertEqual(sorted(results[0].keys()), sorted(['_id', 'name'])) + self.assertEqual(set(results[0].keys()), set(['_id', 'name'])) users = User.objects.only('name', 'price').as_pymongo() results = list(users) - self.assertTrue(isinstance(results[0], dict)) - self.assertTrue(isinstance(results[1], dict)) + self.assertIsInstance(results[0], dict) + self.assertIsInstance(results[1], dict) self.assertEqual(results[0]['name'], 'Bob Dole') self.assertEqual(results[0]['price'], 1.11) self.assertEqual(results[1]['name'], 'Barack Obama') self.assertEqual(results[1]['price'], 2.22) - # Test coerce_types - users = User.objects.only( - 'name', 'price').as_pymongo(coerce_types=True) + users = User.objects.only('name', 'last_login').as_pymongo() results = list(users) - self.assertTrue(isinstance(results[0], dict)) - self.assertTrue(isinstance(results[1], dict)) - self.assertEqual(results[0]['name'], 'Bob Dole') - self.assertEqual(results[0]['price'], Decimal('1.11')) - self.assertEqual(results[1]['name'], 'Barack Obama') - self.assertEqual(results[1]['price'], Decimal('2.22')) + self.assertIsInstance(results[0], dict) + self.assertIsInstance(results[1], dict) + self.assertEqual(results[0], { + 'name': 'Bob Dole' + }) + self.assertEqual(results[1], { + 'name': 'Barack Obama', + 'last_login': { + 'location': 'White House', + 'ip': '104.107.108.116' + } + }) def test_as_pymongo_json_limit_fields(self): @@ -4407,12 +4674,10 @@ class QuerySetTest(unittest.TestCase): User(name="Bob Dole", organization=whitehouse).save() qs = User.objects() - self.assertTrue(isinstance(qs.first().organization, Organization)) - self.assertFalse(isinstance(qs.no_dereference().first().organization, - Organization)) - self.assertFalse(isinstance(qs.no_dereference().get().organization, - Organization)) - self.assertTrue(isinstance(qs.first().organization, Organization)) + self.assertIsInstance(qs.first().organization, Organization) + self.assertNotIsInstance(qs.no_dereference().first().organization, Organization) + self.assertNotIsInstance(qs.no_dereference().get().organization, Organization) + self.assertIsInstance(qs.first().organization, Organization) def test_no_dereference_embedded_doc(self): @@ -4445,9 +4710,9 @@ class QuerySetTest(unittest.TestCase): result = Organization.objects().no_dereference().first() - self.assertTrue(isinstance(result.admin[0], (DBRef, ObjectId))) - self.assertTrue(isinstance(result.member.user, (DBRef, ObjectId))) - self.assertTrue(isinstance(result.members[0].user, (DBRef, ObjectId))) + self.assertIsInstance(result.admin[0], (DBRef, ObjectId)) + self.assertIsInstance(result.member.user, (DBRef, ObjectId)) + self.assertIsInstance(result.members[0].user, (DBRef, ObjectId)) def test_cached_queryset(self): class Person(Document): @@ -4488,18 +4753,27 @@ class QuerySetTest(unittest.TestCase): for i in range(100): Person(name="No: %s" % i).save() - with query_counter() as q: - self.assertEqual(q, 0) - people = Person.objects.no_cache() + with query_counter() as q: + try: + self.assertEqual(q, 0) + people = Person.objects.no_cache() - [x for x in people] - self.assertEqual(q, 1) + [x for x in people] + self.assertEqual(q, 1) - list(people) - self.assertEqual(q, 2) + list(people) + self.assertEqual(q, 2) + + people.count() + self.assertEqual(q, 3) + except AssertionError as exc: + db = get_db() + msg = '' + for q in list(db.system.profile.find())[-50:]: + msg += str([q['ts'], q['ns'], q.get('query'), q['op']])+'\n' + msg += str(q) + raise AssertionError(str(exc) + '\n'+msg) - people.count() - self.assertEqual(q, 3) def test_cache_not_cloned(self): @@ -4524,7 +4798,6 @@ class QuerySetTest(unittest.TestCase): def test_no_cache(self): """Ensure you can add meta data to file""" - class Noddy(Document): fields = DictField() @@ -4542,15 +4815,19 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(len(list(docs)), 100) + # Can't directly get a length of a no-cache queryset. with self.assertRaises(TypeError): len(docs) + # Another iteration over the queryset should result in another db op. with query_counter() as q: - self.assertEqual(q, 0) list(docs) self.assertEqual(q, 1) + + # ... and another one to double-check. + with query_counter() as q: list(docs) - self.assertEqual(q, 2) + self.assertEqual(q, 1) def test_nested_queryset_iterator(self): # Try iterating the same queryset twice, nested. @@ -4639,6 +4916,30 @@ class QuerySetTest(unittest.TestCase): for obj in C.objects.no_sub_classes(): self.assertEqual(obj.__class__, C) + def test_query_generic_embedded_document(self): + """Ensure that querying sub field on generic_embedded_field works + """ + class A(EmbeddedDocument): + a_name = StringField() + + class B(EmbeddedDocument): + b_name = StringField() + + class Doc(Document): + document = GenericEmbeddedDocumentField(choices=(A, B)) + + Doc.drop_collection() + Doc(document=A(a_name='A doc')).save() + Doc(document=B(b_name='B doc')).save() + + # Using raw in filter working fine + self.assertEqual(Doc.objects( + __raw__={'document.a_name': 'A doc'}).count(), 1) + self.assertEqual(Doc.objects( + __raw__={'document.b_name': 'B doc'}).count(), 1) + self.assertEqual(Doc.objects(document__a_name='A doc').count(), 1) + self.assertEqual(Doc.objects(document__b_name='B doc').count(), 1) + def test_query_reference_to_custom_pk_doc(self): class A(Document): @@ -4763,7 +5064,7 @@ class QuerySetTest(unittest.TestCase): op = q.db.system.profile.find({"ns": {"$ne": "%s.system.indexes" % q.db.name}})[0] - self.assertFalse('$orderby' in op['query'], + self.assertNotIn('$orderby', op['query'], 'BaseQuerySet cannot use orderby in if stmt') with query_counter() as p: @@ -4774,8 +5075,7 @@ class QuerySetTest(unittest.TestCase): op = p.db.system.profile.find({"ns": {"$ne": "%s.system.indexes" % q.db.name}})[0] - self.assertTrue('$orderby' in op['query'], - 'BaseQuerySet cannot remove orderby in for loop') + self.assertIn('$orderby', op['query'], 'BaseQuerySet cannot remove orderby in for loop') def test_bool_with_ordering_from_meta_dict(self): @@ -4799,13 +5099,14 @@ class QuerySetTest(unittest.TestCase): op = q.db.system.profile.find({"ns": {"$ne": "%s.system.indexes" % q.db.name}})[0] - self.assertFalse('$orderby' in op['query'], + self.assertNotIn('$orderby', op['query'], 'BaseQuerySet must remove orderby from meta in boolen test') self.assertEqual(Person.objects.first().name, 'A') self.assertTrue(Person.objects._has_data(), 'Cursor has data and returned False') + @needs_mongodb_v26 def test_queryset_aggregation_framework(self): class Person(Document): name = StringField() @@ -4840,17 +5141,13 @@ class QuerySetTest(unittest.TestCase): {'_id': p1.pk, 'name': "ISABELLA LUANNA"} ]) - data = Person.objects( - age__gte=17, age__lte=40).order_by('-age').aggregate( - {'$group': { - '_id': None, - 'total': {'$sum': 1}, - 'avg': {'$avg': '$age'} - } - } - - ) - + data = Person.objects(age__gte=17, age__lte=40).order_by('-age').aggregate({ + '$group': { + '_id': None, + 'total': {'$sum': 1}, + 'avg': {'$avg': '$age'} + } + }) self.assertEqual(list(data), [ {'_id': None, 'avg': 29, 'total': 2} ]) @@ -4891,28 +5188,16 @@ class QuerySetTest(unittest.TestCase): self.assertEquals(Animal.objects(folded_ears=True).count(), 1) self.assertEquals(Animal.objects(whiskers_length=5.1).count(), 1) - def test_loop_via_invalid_id_does_not_crash(self): + def test_loop_over_invalid_id_does_not_crash(self): class Person(Document): name = StringField() - Person.objects.delete() - Person._get_collection().update({"name": "a"}, {"$set": {"_id": ""}}, upsert=True) + + Person.drop_collection() + + Person._get_collection().insert({'name': 'a', 'id': ''}) for p in Person.objects(): self.assertEqual(p.name, 'a') - def test_last_field_name_like_operator(self): - class EmbeddedItem(EmbeddedDocument): - type = StringField() - - class Doc(Document): - item = EmbeddedDocumentField(EmbeddedItem) - - Doc.drop_collection() - - doc = Doc(item=EmbeddedItem(type="axe")) - doc.save() - - self.assertEqual(1, Doc.objects(item__type__="axe").count()) - def test_len_during_iteration(self): """Tests that calling len on a queyset during iteration doesn't stop paging. @@ -4963,6 +5248,45 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(i, 249) self.assertEqual(j, 249) + def test_in_operator_on_non_iterable(self): + """Ensure that using the `__in` operator on a non-iterable raises an + error. + """ + class User(Document): + name = StringField() + + class BlogPost(Document): + content = StringField() + authors = ListField(ReferenceField(User)) + + User.drop_collection() + BlogPost.drop_collection() + + author = User.objects.create(name='Test User') + post = BlogPost.objects.create(content='Had a good coffee today...', + authors=[author]) + + # Make sure using `__in` with a list works + blog_posts = BlogPost.objects(authors__in=[author]) + self.assertEqual(list(blog_posts), [post]) + + # Using `__in` with a non-iterable should raise a TypeError + self.assertRaises(TypeError, BlogPost.objects(authors__in=author.pk).count) + + # Using `__in` with a `Document` (which is seemingly iterable but not + # in a way we'd expect) should raise a TypeError, too + self.assertRaises(TypeError, BlogPost.objects(authors__in=author).count) + + def test_create_count(self): + self.Person.drop_collection() + self.Person.objects.create(name="Foo") + self.Person.objects.create(name="Bar") + self.Person.objects.create(name="Baz") + self.assertEqual(self.Person.objects.count(with_limit_and_skip=True), 3) + + newPerson = self.Person.objects.create(name="Foo_1") + self.assertEqual(self.Person.objects.count(with_limit_and_skip=True), 4) + if __name__ == '__main__': unittest.main() diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py index 20ab0b3f..8064f09c 100644 --- a/tests/queryset/transform.py +++ b/tests/queryset/transform.py @@ -1,5 +1,7 @@ import unittest +from bson.son import SON + from mongoengine import * from mongoengine.queryset import Q, transform @@ -28,12 +30,16 @@ class TransformTest(unittest.TestCase): {'name': {'$exists': True}}) def test_transform_update(self): + class LisDoc(Document): + foo = ListField(StringField()) + class DicDoc(Document): dictField = DictField() class Doc(Document): pass + LisDoc.drop_collection() DicDoc.drop_collection() Doc.drop_collection() @@ -42,14 +48,28 @@ class TransformTest(unittest.TestCase): for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")): update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) - self.assertTrue(isinstance(update[v]["dictField.test"], dict)) + self.assertIsInstance(update[v]["dictField.test"], dict) # Update special cases update = transform.update(DicDoc, unset__dictField__test=doc) self.assertEqual(update["$unset"]["dictField.test"], 1) update = transform.update(DicDoc, pull__dictField__test=doc) - self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict)) + self.assertIsInstance(update["$pull"]["dictField"]["test"], dict) + + update = transform.update(LisDoc, pull__foo__in=['a']) + self.assertEqual(update, {'$pull': {'foo': {'$in': ['a']}}}) + + def test_transform_update_push(self): + """Ensure the differences in behvaior between 'push' and 'push_all'""" + class BlogPost(Document): + tags = ListField(StringField()) + + update = transform.update(BlogPost, push__tags=['mongo', 'db']) + self.assertEqual(update, {'$push': {'tags': ['mongo', 'db']}}) + + update = transform.update(BlogPost, push_all__tags=['mongo', 'db']) + self.assertEqual(update, {'$push': {'tags': {'$each': ['mongo', 'db']}}}) def test_query_field_name(self): """Ensure that the correct field name is used when querying. @@ -68,17 +88,15 @@ class TransformTest(unittest.TestCase): post = BlogPost(**data) post.save() - self.assertTrue('postTitle' in - BlogPost.objects(title=data['title'])._query) + self.assertIn('postTitle', BlogPost.objects(title=data['title'])._query) self.assertFalse('title' in BlogPost.objects(title=data['title'])._query) self.assertEqual(BlogPost.objects(title=data['title']).count(), 1) - self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query) + self.assertIn('_id', BlogPost.objects(pk=post.id)._query) self.assertEqual(BlogPost.objects(pk=post.id).count(), 1) - self.assertTrue('postComments.commentContent' in - BlogPost.objects(comments__content='test')._query) + self.assertIn('postComments.commentContent', BlogPost.objects(comments__content='test')._query) self.assertEqual(BlogPost.objects(comments__content='test').count(), 1) BlogPost.drop_collection() @@ -96,8 +114,8 @@ class TransformTest(unittest.TestCase): post = BlogPost(**data) post.save() - self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query) - self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query) + self.assertIn('_id', BlogPost.objects(pk=data['title'])._query) + self.assertIn('_id', BlogPost.objects(title=data['title'])._query) self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1) BlogPost.drop_collection() @@ -241,6 +259,30 @@ class TransformTest(unittest.TestCase): with self.assertRaises(InvalidQueryError): events.count() + def test_update_pull_for_list_fields(self): + """ + Test added to check pull operation in update for + EmbeddedDocumentListField which is inside a EmbeddedDocumentField + """ + class Word(EmbeddedDocument): + word = StringField() + index = IntField() + + class SubDoc(EmbeddedDocument): + heading = ListField(StringField()) + text = EmbeddedDocumentListField(Word) + + class MainDoc(Document): + title = StringField() + content = EmbeddedDocumentField(SubDoc) + + word = Word(word='abc', index=1) + update = transform.update(MainDoc, pull__content__text=word) + self.assertEqual(update, {'$pull': {'content.text': SON([('word', u'abc'), ('index', 1)])}}) + + update = transform.update(MainDoc, pull__content__heading='xyz') + self.assertEqual(update, {'$pull': {'content.heading': 'xyz'}}) + if __name__ == '__main__': unittest.main() diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py index 6f020e88..7b68cfb0 100644 --- a/tests/queryset/visitor.py +++ b/tests/queryset/visitor.py @@ -196,7 +196,7 @@ class QTest(unittest.TestCase): test2 = test.clone() self.assertEqual(test2.count(), 3) - self.assertFalse(test2 == test) + self.assertNotEqual(test2, test) test3 = test2.filter(x=6) self.assertEqual(test3.count(), 1) diff --git a/tests/test_connection.py b/tests/test_connection.py index d8f1a79e..88d63cdb 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -35,20 +35,19 @@ class ConnectionTest(unittest.TestCase): mongoengine.connection._dbs = {} def test_connect(self): - """Ensure that the connect() method works properly. - """ + """Ensure that the connect() method works properly.""" connect('mongoenginetest') conn = get_connection() - self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertTrue(isinstance(db, pymongo.database.Database)) + self.assertIsInstance(db, pymongo.database.Database) self.assertEqual(db.name, 'mongoenginetest') connect('mongoenginetest2', alias='testdb') conn = get_connection('testdb') - self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) def test_connect_in_mocking(self): """Ensure that the connect() method works properly in mocking. @@ -60,31 +59,31 @@ class ConnectionTest(unittest.TestCase): connect('mongoenginetest', host='mongomock://localhost') conn = get_connection() - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2') conn = get_connection('testdb2') - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3') conn = get_connection('testdb3') - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) connect('mongoenginetest4', is_mock=True, alias='testdb4') conn = get_connection('testdb4') - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5') conn = get_connection('testdb5') - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6') conn = get_connection('testdb6') - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7') conn = get_connection('testdb7') - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) def test_connect_with_host_list(self): """Ensure that the connect() method works when host is a list @@ -98,27 +97,27 @@ class ConnectionTest(unittest.TestCase): connect(host=['mongomock://localhost']) conn = get_connection() - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2') conn = get_connection('testdb2') - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) connect(host=['localhost'], is_mock=True, alias='testdb3') conn = get_connection('testdb3') - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4') conn = get_connection('testdb4') - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5') conn = get_connection('testdb5') - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6') conn = get_connection('testdb6') - self.assertTrue(isinstance(conn, mongomock.MongoClient)) + self.assertIsInstance(conn, mongomock.MongoClient) def test_disconnect(self): """Ensure that the disconnect() method works properly @@ -146,8 +145,7 @@ class ConnectionTest(unittest.TestCase): self.assertEqual(expected_connection, actual_connection) def test_connect_uri(self): - """Ensure that the connect() method works properly with uri's - """ + """Ensure that the connect() method works properly with URIs.""" c = connect(db='mongoenginetest', alias='admin') c.admin.system.users.remove({}) c.mongoenginetest.system.users.remove({}) @@ -165,10 +163,10 @@ class ConnectionTest(unittest.TestCase): connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') conn = get_connection() - self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertTrue(isinstance(db, pymongo.database.Database)) + self.assertIsInstance(db, pymongo.database.Database) self.assertEqual(db.name, 'mongoenginetest') c.admin.system.users.remove({}) @@ -181,10 +179,10 @@ class ConnectionTest(unittest.TestCase): connect("mongoenginetest", host='mongodb://localhost/') conn = get_connection() - self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertTrue(isinstance(db, pymongo.database.Database)) + self.assertIsInstance(db, pymongo.database.Database) self.assertEqual(db.name, 'mongoenginetest') def test_connect_uri_default_db(self): @@ -194,10 +192,10 @@ class ConnectionTest(unittest.TestCase): connect(host='mongodb://localhost/') conn = get_connection() - self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertTrue(isinstance(db, pymongo.database.Database)) + self.assertIsInstance(db, pymongo.database.Database) self.assertEqual(db.name, 'test') def test_uri_without_credentials_doesnt_override_conn_settings(self): @@ -214,9 +212,8 @@ class ConnectionTest(unittest.TestCase): self.assertRaises(OperationFailure, get_db) def test_connect_uri_with_authsource(self): - """Ensure that the connect() method works well with - the option `authSource` in URI. - This feature was introduced in MongoDB 2.4 and removed in 2.6 + """Ensure that the connect() method works well with `authSource` + option in the URI. """ # Create users c = connect('mongoenginetest') @@ -225,30 +222,31 @@ class ConnectionTest(unittest.TestCase): # Authentication fails without "authSource" if IS_PYMONGO_3: - test_conn = connect('mongoenginetest', alias='test1', - host='mongodb://username2:password@localhost/mongoenginetest') + test_conn = connect( + 'mongoenginetest', alias='test1', + host='mongodb://username2:password@localhost/mongoenginetest' + ) self.assertRaises(OperationFailure, test_conn.server_info) else: self.assertRaises( - MongoEngineConnectionError, connect, 'mongoenginetest', - alias='test1', + MongoEngineConnectionError, + connect, 'mongoenginetest', alias='test1', host='mongodb://username2:password@localhost/mongoenginetest' ) self.assertRaises(MongoEngineConnectionError, get_db, 'test1') # Authentication succeeds with "authSource" - connect( + authd_conn = connect( 'mongoenginetest', alias='test2', host=('mongodb://username2:password@localhost/' 'mongoenginetest?authSource=admin') ) - # This will fail starting from MongoDB 2.6+ db = get_db('test2') - self.assertTrue(isinstance(db, pymongo.database.Database)) + self.assertIsInstance(db, pymongo.database.Database) self.assertEqual(db.name, 'mongoenginetest') # Clear all users - c.admin.system.users.remove({}) + authd_conn.admin.system.users.remove({}) def test_register_connection(self): """Ensure that connections with different aliases may be registered. @@ -257,10 +255,10 @@ class ConnectionTest(unittest.TestCase): self.assertRaises(MongoEngineConnectionError, get_connection) conn = get_connection('testdb') - self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db('testdb') - self.assertTrue(isinstance(db, pymongo.database.Database)) + self.assertIsInstance(db, pymongo.database.Database) self.assertEqual(db.name, 'mongoenginetest2') def test_register_connection_defaults(self): @@ -269,11 +267,10 @@ class ConnectionTest(unittest.TestCase): register_connection('testdb', 'mongoenginetest', host=None, port=None) conn = get_connection('testdb') - self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) def test_connection_kwargs(self): - """Ensure that connection kwargs get passed to pymongo. - """ + """Ensure that connection kwargs get passed to pymongo.""" connect('mongoenginetest', alias='t1', tz_aware=True) conn = get_connection('t1') @@ -283,6 +280,77 @@ class ConnectionTest(unittest.TestCase): conn = get_connection('t2') self.assertFalse(get_tz_awareness(conn)) + def test_connection_pool_via_kwarg(self): + """Ensure we can specify a max connection pool size using + a connection kwarg. + """ + # Use "max_pool_size" or "maxpoolsize" depending on PyMongo version + # (former was changed to the latter as described in + # https://jira.mongodb.org/browse/PYTHON-854). + # TODO remove once PyMongo < 3.0 support is dropped + if pymongo.version_tuple[0] >= 3: + pool_size_kwargs = {'maxpoolsize': 100} + else: + pool_size_kwargs = {'max_pool_size': 100} + + conn = connect('mongoenginetest', alias='max_pool_size_via_kwarg', **pool_size_kwargs) + self.assertEqual(conn.max_pool_size, 100) + + def test_connection_pool_via_uri(self): + """Ensure we can specify a max connection pool size using + an option in a connection URI. + """ + if pymongo.version_tuple[0] == 2 and pymongo.version_tuple[1] < 9: + raise SkipTest('maxpoolsize as a URI option is only supported in PyMongo v2.9+') + + conn = connect(host='mongodb://localhost/test?maxpoolsize=100', alias='max_pool_size_via_uri') + self.assertEqual(conn.max_pool_size, 100) + + def test_write_concern(self): + """Ensure write concern can be specified in connect() via + a kwarg or as part of the connection URI. + """ + conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true') + conn2 = connect('testing', alias='conn2', w=1, j=True) + if IS_PYMONGO_3: + self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True}) + self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True}) + else: + self.assertEqual(dict(conn1.write_concern), {'w': 1, 'j': True}) + self.assertEqual(dict(conn2.write_concern), {'w': 1, 'j': True}) + + def test_connect_with_replicaset_via_uri(self): + """Ensure connect() works when specifying a replicaSet via the + MongoDB URI. + """ + if IS_PYMONGO_3: + c = connect(host='mongodb://localhost/test?replicaSet=local-rs') + db = get_db() + self.assertIsInstance(db, pymongo.database.Database) + self.assertEqual(db.name, 'test') + else: + # PyMongo < v3.x raises an exception: + # "localhost:27017 is not a member of replica set local-rs" + with self.assertRaises(MongoEngineConnectionError): + c = connect(host='mongodb://localhost/test?replicaSet=local-rs') + + def test_connect_with_replicaset_via_kwargs(self): + """Ensure connect() works when specifying a replicaSet via the + connection kwargs + """ + if IS_PYMONGO_3: + c = connect(replicaset='local-rs') + self.assertEqual(c._MongoClient__options.replica_set_name, + 'local-rs') + db = get_db() + self.assertIsInstance(db, pymongo.database.Database) + self.assertEqual(db.name, 'test') + else: + # PyMongo < v3.x raises an exception: + # "localhost:27017 is not a member of replica set local-rs" + with self.assertRaises(MongoEngineConnectionError): + c = connect(replicaset='local-rs') + def test_datetime(self): connect('mongoenginetest', tz_aware=True) d = datetime.datetime(2010, 5, 5, tzinfo=utc) @@ -296,6 +364,12 @@ class ConnectionTest(unittest.TestCase): date_doc = DateDoc.objects.first() self.assertEqual(d, date_doc.the_date) + def test_read_preference_from_parse(self): + if IS_PYMONGO_3: + from pymongo import ReadPreference + conn = connect(host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred") + self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_PREFERRED) + def test_multiple_connection_settings(self): connect('mongoenginetest', alias='t1', host="localhost") @@ -303,8 +377,8 @@ class ConnectionTest(unittest.TestCase): mongo_connections = mongoengine.connection._connections self.assertEqual(len(mongo_connections.items()), 2) - self.assertTrue('t1' in mongo_connections.keys()) - self.assertTrue('t2' in mongo_connections.keys()) + self.assertIn('t1', mongo_connections.keys()) + self.assertIn('t2', mongo_connections.keys()) if not IS_PYMONGO_3: self.assertEqual(mongo_connections['t1'].host, 'localhost') self.assertEqual(mongo_connections['t2'].host, '127.0.0.1') diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index 0f6bf815..df5e5212 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -89,15 +89,15 @@ class ContextManagersTest(unittest.TestCase): with no_dereference(Group) as Group: group = Group.objects.first() - self.assertTrue(all([not isinstance(m, User) - for m in group.members])) - self.assertFalse(isinstance(group.ref, User)) - self.assertFalse(isinstance(group.generic, User)) + for m in group.members: + self.assertNotIsInstance(m, User) + self.assertNotIsInstance(group.ref, User) + self.assertNotIsInstance(group.generic, User) - self.assertTrue(all([isinstance(m, User) - for m in group.members])) - self.assertTrue(isinstance(group.ref, User)) - self.assertTrue(isinstance(group.generic, User)) + for m in group.members: + self.assertIsInstance(m, User) + self.assertIsInstance(group.ref, User) + self.assertIsInstance(group.generic, User) def test_no_dereference_context_manager_dbref(self): """Ensure that DBRef items in ListFields aren't dereferenced. @@ -129,19 +129,17 @@ class ContextManagersTest(unittest.TestCase): group = Group.objects.first() self.assertTrue(all([not isinstance(m, User) for m in group.members])) - self.assertFalse(isinstance(group.ref, User)) - self.assertFalse(isinstance(group.generic, User)) + self.assertNotIsInstance(group.ref, User) + self.assertNotIsInstance(group.generic, User) self.assertTrue(all([isinstance(m, User) for m in group.members])) - self.assertTrue(isinstance(group.ref, User)) - self.assertTrue(isinstance(group.generic, User)) + self.assertIsInstance(group.ref, User) + self.assertIsInstance(group.generic, User) def test_no_sub_classes(self): class A(Document): x = IntField() - y = IntField() - meta = {'allow_inheritance': True} class B(A): @@ -152,29 +150,29 @@ class ContextManagersTest(unittest.TestCase): A.drop_collection() - A(x=10, y=20).save() - A(x=15, y=30).save() - B(x=20, y=40).save() - B(x=30, y=50).save() - C(x=40, y=60).save() + A(x=10).save() + A(x=15).save() + B(x=20).save() + B(x=30).save() + C(x=40).save() self.assertEqual(A.objects.count(), 5) self.assertEqual(B.objects.count(), 3) self.assertEqual(C.objects.count(), 1) - with no_sub_classes(A) as A: + with no_sub_classes(A): self.assertEqual(A.objects.count(), 2) for obj in A.objects: self.assertEqual(obj.__class__, A) - with no_sub_classes(B) as B: + with no_sub_classes(B): self.assertEqual(B.objects.count(), 2) for obj in B.objects: self.assertEqual(obj.__class__, B) - with no_sub_classes(C) as C: + with no_sub_classes(C): self.assertEqual(C.objects.count(), 1) for obj in C.objects: @@ -185,18 +183,125 @@ class ContextManagersTest(unittest.TestCase): self.assertEqual(B.objects.count(), 3) self.assertEqual(C.objects.count(), 1) + def test_no_sub_classes_modification_to_document_class_are_temporary(self): + class A(Document): + x = IntField() + meta = {'allow_inheritance': True} + + class B(A): + z = IntField() + + self.assertEqual(A._subclasses, ('A', 'A.B')) + with no_sub_classes(A): + self.assertEqual(A._subclasses, ('A',)) + self.assertEqual(A._subclasses, ('A', 'A.B')) + + self.assertEqual(B._subclasses, ('A.B',)) + with no_sub_classes(B): + self.assertEqual(B._subclasses, ('A.B',)) + self.assertEqual(B._subclasses, ('A.B',)) + + def test_no_subclass_context_manager_does_not_swallow_exception(self): + class User(Document): + name = StringField() + + with self.assertRaises(TypeError): + with no_sub_classes(User): + raise TypeError() + + def test_query_counter_does_not_swallow_exception(self): + + with self.assertRaises(TypeError): + with query_counter() as q: + raise TypeError() + + def test_query_counter_temporarily_modifies_profiling_level(self): + connect('mongoenginetest') + db = get_db() + + initial_profiling_level = db.profiling_level() + + try: + NEW_LEVEL = 1 + db.set_profiling_level(NEW_LEVEL) + self.assertEqual(db.profiling_level(), NEW_LEVEL) + with query_counter() as q: + self.assertEqual(db.profiling_level(), 2) + self.assertEqual(db.profiling_level(), NEW_LEVEL) + except Exception: + db.set_profiling_level(initial_profiling_level) # Ensures it gets reseted no matter the outcome of the test + raise + def test_query_counter(self): connect('mongoenginetest') db = get_db() - db.test.find({}) + + collection = db.query_counter + collection.drop() + + def issue_1_count_query(): + collection.find({}).count() + + def issue_1_insert_query(): + collection.insert_one({'test': 'garbage'}) + + def issue_1_find_query(): + collection.find_one() + + counter = 0 + with query_counter() as q: + self.assertEqual(q, counter) + self.assertEqual(q, counter) # Ensures previous count query did not get counted + + for _ in range(10): + issue_1_insert_query() + counter += 1 + self.assertEqual(q, counter) + + for _ in range(4): + issue_1_find_query() + counter += 1 + self.assertEqual(q, counter) + + for _ in range(3): + issue_1_count_query() + counter += 1 + self.assertEqual(q, counter) + + def test_query_counter_counts_getmore_queries(self): + connect('mongoenginetest') + db = get_db() + + collection = db.query_counter + collection.drop() + + many_docs = [{'test': 'garbage %s' % i} for i in range(150)] + collection.insert_many(many_docs) # first batch of documents contains 101 documents with query_counter() as q: - self.assertEqual(0, q) + self.assertEqual(q, 0) + list(collection.find()) + self.assertEqual(q, 2) # 1st select + 1 getmore - for i in range(1, 51): - db.test.find({}).count() + def test_query_counter_ignores_particular_queries(self): + connect('mongoenginetest') + db = get_db() - self.assertEqual(50, q) + collection = db.query_counter + collection.insert_many([{'test': 'garbage %s' % i} for i in range(10)]) + + with query_counter() as q: + self.assertEqual(q, 0) + cursor = collection.find() + self.assertEqual(q, 0) # cursor wasn't opened yet + _ = next(cursor) # opens the cursor and fires the find query + self.assertEqual(q, 1) + + cursor.close() # issues a `killcursors` query that is ignored by the context + self.assertEqual(q, 1) + + _ = db.system.indexes.find_one() # queries on db.system.indexes are ignored as well + self.assertEqual(q, 1) if __name__ == '__main__': unittest.main() diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 6830a188..1ea562a5 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1,6 +1,21 @@ import unittest -from mongoengine.base.datastructures import StrictDict, SemiStrictDict +from mongoengine.base.datastructures import StrictDict, BaseList + + +class TestBaseList(unittest.TestCase): + + def test_iter_simple(self): + values = [True, False, True, False] + base_list = BaseList(values, instance=None, name='my_name') + self.assertEqual(values, list(base_list)) + + def test_iter_allow_modification_while_iterating_withou_error(self): + # regular list allows for this, thus this subclass must comply to that + base_list = BaseList([True, False, True, False], instance=None, name='my_name') + for idx, val in enumerate(base_list): + if val: + base_list.pop(idx) class TestStrictDict(unittest.TestCase): @@ -76,44 +91,5 @@ class TestStrictDict(unittest.TestCase): assert dict(**d) == {'a': 1, 'b': 2} -class TestSemiSrictDict(TestStrictDict): - def strict_dict_class(self, *args, **kwargs): - return SemiStrictDict.create(*args, **kwargs) - - def test_init_fails_on_nonexisting_attrs(self): - # disable irrelevant test - pass - - def test_setattr_raises_on_nonexisting_attr(self): - # disable irrelevant test - pass - - def test_setattr_getattr_nonexisting_attr_succeeds(self): - d = self.dtype() - d.x = 1 - self.assertEqual(d.x, 1) - - def test_init_succeeds_with_nonexisting_attrs(self): - d = self.dtype(a=1, b=1, c=1, x=2) - self.assertEqual((d.a, d.b, d.c, d.x), (1, 1, 1, 2)) - - def test_iter_with_nonexisting_attrs(self): - d = self.dtype(a=1, b=1, c=1, x=2) - self.assertEqual(list(d), ['a', 'b', 'c', 'x']) - - def test_iteritems_with_nonexisting_attrs(self): - d = self.dtype(a=1, b=1, c=1, x=2) - self.assertEqual(list(d.iteritems()), [('a', 1), ('b', 1), ('c', 1), ('x', 2)]) - - def tets_cmp_with_strict_dicts(self): - d = self.dtype(a=1, b=1, c=1) - dd = StrictDict.create(("a", "b", "c"))(a=1, b=1, c=1) - self.assertEqual(d, dd) - - def test_cmp_with_strict_dict_with_nonexisting_attrs(self): - d = self.dtype(a=1, b=1, c=1, x=2) - dd = StrictDict.create(("a", "b", "c", "x"))(a=1, b=1, c=1, x=2) - self.assertEqual(d, dd) - if __name__ == '__main__': unittest.main() diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 7f58a85b..8b8bcfb2 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -200,8 +200,8 @@ class FieldTest(unittest.TestCase): group = Group(author=user, members=[user]).save() raw_data = Group._get_collection().find_one() - self.assertTrue(isinstance(raw_data['author'], DBRef)) - self.assertTrue(isinstance(raw_data['members'][0], DBRef)) + self.assertIsInstance(raw_data['author'], DBRef) + self.assertIsInstance(raw_data['members'][0], DBRef) group = Group.objects.first() self.assertEqual(group.author, user) @@ -224,8 +224,8 @@ class FieldTest(unittest.TestCase): self.assertEqual(group.members, [user]) raw_data = Group._get_collection().find_one() - self.assertTrue(isinstance(raw_data['author'], ObjectId)) - self.assertTrue(isinstance(raw_data['members'][0], ObjectId)) + self.assertIsInstance(raw_data['author'], ObjectId) + self.assertIsInstance(raw_data['members'][0], ObjectId) def test_recursive_reference(self): """Ensure that ReferenceFields can reference their own documents. @@ -469,7 +469,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) # Document select_related with query_counter() as q: @@ -485,7 +485,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) # Queryset select_related with query_counter() as q: @@ -502,7 +502,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) UserA.drop_collection() UserB.drop_collection() @@ -560,7 +560,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) # Document select_related with query_counter() as q: @@ -576,7 +576,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) # Queryset select_related with query_counter() as q: @@ -593,7 +593,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) UserA.drop_collection() UserB.drop_collection() @@ -633,7 +633,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): - self.assertTrue(isinstance(m, User)) + self.assertIsInstance(m, User) # Document select_related with query_counter() as q: @@ -646,7 +646,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): - self.assertTrue(isinstance(m, User)) + self.assertIsInstance(m, User) # Queryset select_related with query_counter() as q: @@ -660,7 +660,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): - self.assertTrue(isinstance(m, User)) + self.assertIsInstance(m, User) User.drop_collection() Group.drop_collection() @@ -715,7 +715,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) # Document select_related with query_counter() as q: @@ -731,7 +731,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) # Queryset select_related with query_counter() as q: @@ -748,7 +748,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) Group.objects.delete() Group().save() @@ -806,7 +806,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): - self.assertTrue(isinstance(m, UserA)) + self.assertIsInstance(m, UserA) # Document select_related with query_counter() as q: @@ -822,7 +822,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): - self.assertTrue(isinstance(m, UserA)) + self.assertIsInstance(m, UserA) # Queryset select_related with query_counter() as q: @@ -839,7 +839,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): - self.assertTrue(isinstance(m, UserA)) + self.assertIsInstance(m, UserA) UserA.drop_collection() Group.drop_collection() @@ -894,7 +894,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) # Document select_related with query_counter() as q: @@ -910,7 +910,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) # Queryset select_related with query_counter() as q: @@ -927,7 +927,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): - self.assertTrue('User' in m.__class__.__name__) + self.assertIn('User', m.__class__.__name__) Group.objects.delete() Group().save() @@ -1209,10 +1209,10 @@ class FieldTest(unittest.TestCase): # Can't use query_counter across databases - so test the _data object book = Book.objects.first() - self.assertFalse(isinstance(book._data['author'], User)) + self.assertNotIsInstance(book._data['author'], User) book.select_related() - self.assertTrue(isinstance(book._data['author'], User)) + self.assertIsInstance(book._data['author'], User) def test_non_ascii_pk(self): """ diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 00000000..562cc1ff --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,38 @@ +import unittest +import re + +from mongoengine.base.utils import LazyRegexCompiler + +signal_output = [] + + +class LazyRegexCompilerTest(unittest.TestCase): + + def test_lazy_regex_compiler_verify_laziness_of_descriptor(self): + class UserEmail(object): + EMAIL_REGEX = LazyRegexCompiler('@', flags=32) + + descriptor = UserEmail.__dict__['EMAIL_REGEX'] + self.assertIsNone(descriptor._compiled_regex) + + regex = UserEmail.EMAIL_REGEX + self.assertEqual(regex, re.compile('@', flags=32)) + self.assertEqual(regex.search('user@domain.com').group(), '@') + + user_email = UserEmail() + self.assertIs(user_email.EMAIL_REGEX, UserEmail.EMAIL_REGEX) + + def test_lazy_regex_compiler_verify_cannot_set_descriptor_on_instance(self): + class UserEmail(object): + EMAIL_REGEX = LazyRegexCompiler('@') + + user_email = UserEmail() + with self.assertRaises(AttributeError): + user_email.EMAIL_REGEX = re.compile('@') + + def test_lazy_regex_compiler_verify_can_override_class_attr(self): + class UserEmail(object): + EMAIL_REGEX = LazyRegexCompiler('@') + + UserEmail.EMAIL_REGEX = re.compile('cookies') + self.assertEqual(UserEmail.EMAIL_REGEX.search('Cake & cookies').group(), 'cookies') diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 00000000..acd318c5 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,82 @@ +import unittest + +from nose.plugins.skip import SkipTest + +from mongoengine import connect +from mongoengine.connection import get_db, get_connection +from mongoengine.python_support import IS_PYMONGO_3 + + +MONGO_TEST_DB = 'mongoenginetest' # standard name for the test database + + +class MongoDBTestCase(unittest.TestCase): + """Base class for tests that need a mongodb connection + It ensures that the db is clean at the beginning and dropped at the end automatically + """ + + @classmethod + def setUpClass(cls): + cls._connection = connect(db=MONGO_TEST_DB) + cls._connection.drop_database(MONGO_TEST_DB) + cls.db = get_db() + + @classmethod + def tearDownClass(cls): + cls._connection.drop_database(MONGO_TEST_DB) + + +def get_mongodb_version(): + """Return the version tuple of the MongoDB server that the default + connection is connected to. + """ + return tuple(get_connection().server_info()['versionArray']) + + +def _decorated_with_ver_requirement(func, ver_tuple): + """Return a given function decorated with the version requirement + for a particular MongoDB version tuple. + """ + def _inner(*args, **kwargs): + mongodb_ver = get_mongodb_version() + if mongodb_ver >= ver_tuple: + return func(*args, **kwargs) + + raise SkipTest('Needs MongoDB v{}+'.format( + '.'.join([str(v) for v in ver_tuple]) + )) + + _inner.__name__ = func.__name__ + _inner.__doc__ = func.__doc__ + + return _inner + + +def needs_mongodb_v26(func): + """Raise a SkipTest exception if we're working with MongoDB version + lower than v2.6. + """ + return _decorated_with_ver_requirement(func, (2, 6)) + + +def needs_mongodb_v3(func): + """Raise a SkipTest exception if we're working with MongoDB version + lower than v3.0. + """ + return _decorated_with_ver_requirement(func, (3, 0)) + + +def skip_pymongo3(f): + """Raise a SkipTest exception if we're running a test against + PyMongo v3.x. + """ + def _inner(*args, **kwargs): + if IS_PYMONGO_3: + raise SkipTest("Useless with PyMongo 3+") + return f(*args, **kwargs) + + _inner.__name__ = f.__name__ + _inner.__doc__ = f.__doc__ + + return _inner + diff --git a/tox.ini b/tox.ini index d6052edf..815d2acc 100644 --- a/tox.ini +++ b/tox.ini @@ -1,22 +1,12 @@ [tox] -envlist = {py26,py27,py33,py34,py35,pypy,pypy3}-{mg27,mg28},flake8 +envlist = {py27,py35,pypy,pypy3}-{mg35,mg3x} [testenv] commands = python setup.py nosetests {posargs} deps = nose - mg27: PyMongo<2.8 - mg28: PyMongo>=2.8,<3.0 - mg30: PyMongo>=3.0 - mgdev: https://github.com/mongodb/mongo-python-driver/tarball/master + mg35: PyMongo==3.5 + mg3x: PyMongo>=3.0,<3.7 setenv = PYTHON_EGG_CACHE = {envdir}/python-eggs -passenv = windir - -[testenv:flake8] -deps = - flake8 - flake8-import-order -commands = - flake8