Compare commits

..

1 Commits

Author SHA1 Message Date
Stefan Wojcik
a52d3b92a8 fix BaseQuerySet.fields when mixing exclusion/inclusion with complex values like $slice 2016-12-28 11:06:14 -05:00
28 changed files with 594 additions and 1028 deletions

View File

@@ -1,23 +0,0 @@
#!/bin/bash
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
if [ "$MONGODB" = "2.4" ]; then
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
sudo apt-get update
sudo apt-get install mongodb-10gen=2.4.14
sudo service mongodb start
elif [ "$MONGODB" = "2.6" ]; then
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
sudo apt-get update
sudo apt-get install mongodb-org-server=2.6.12
# service should be started automatically
elif [ "$MONGODB" = "3.0" ]; then
echo "deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
sudo apt-get update
sudo apt-get install mongodb-org-server=3.0.14
# service should be started automatically
else
echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0."
exit 1
fi;

View File

@@ -1,48 +1,28 @@
# For full coverage, we'd have to test all supported Python, MongoDB, and
# PyMongo combinations. However, that would result in an overly long build
# with a very large number of jobs, hence we only test a subset of all the
# combinations:
# * MongoDB v2.4 & v3.0 are only tested against Python v2.7 & v3.5.
# * MongoDB v2.4 is tested against PyMongo v2.7 & v3.x.
# * MongoDB v3.0 is tested against PyMongo v3.x.
# * MongoDB v2.6 is currently the "main" version tested against Python v2.7,
# v3.5, PyPy & PyPy3, and PyMongo v2.7, v2.8 & v3.x.
#
# Reminder: Update README.rst if you change MongoDB versions we test.
language: python language: python
python: python:
- 2.7 - '2.7'
- 3.5 - '3.3'
- '3.4'
- '3.5'
- pypy - pypy
- pypy3 - pypy3
env: env:
- MONGODB=2.6 PYMONGO=2.7 - PYMONGO=2.7
- MONGODB=2.6 PYMONGO=2.8 - PYMONGO=2.8
- MONGODB=2.6 PYMONGO=3.0 - PYMONGO=3.0
- PYMONGO=dev
matrix: matrix:
# Finish the build as soon as one job fails
fast_finish: true fast_finish: true
include:
- python: 2.7
env: MONGODB=2.4 PYMONGO=2.7
- python: 2.7
env: MONGODB=2.4 PYMONGO=3.0
- python: 2.7
env: MONGODB=3.0 PYMONGO=3.0
- python: 3.5
env: MONGODB=2.4 PYMONGO=2.7
- python: 3.5
env: MONGODB=2.4 PYMONGO=3.0
- python: 3.5
env: MONGODB=3.0 PYMONGO=3.0
before_install: before_install:
- bash .install_mongodb_on_travis.sh - travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
- echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' |
sudo tee /etc/apt/sources.list.d/mongodb.list
- travis_retry sudo apt-get update
- travis_retry sudo apt-get install mongodb-org-server
install: install:
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
@@ -50,17 +30,14 @@ install:
python-tk python-tk
- travis_retry pip install --upgrade pip - travis_retry pip install --upgrade pip
- travis_retry pip install coveralls - travis_retry pip install coveralls
- travis_retry pip install flake8 flake8-import-order - travis_retry pip install flake8
- travis_retry pip install tox>=1.9 - travis_retry pip install tox>=1.9
- travis_retry pip install "virtualenv<14.0.0" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) - travis_retry pip install "virtualenv<14.0.0" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
- travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test - travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
# Cache dependencies installed via pip
cache: pip
# Run flake8 for py27 # Run flake8 for py27
before_script: before_script:
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then tox -e flake8; fi
script: script:
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
@@ -68,34 +45,22 @@ script:
# For now only submit coveralls for Python v2.7. Python v3.x currently shows # For now only submit coveralls for Python v2.7. Python v3.x currently shows
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
# code in a separate dir and runs tests on that. # code in a separate dir and runs tests on that.
after_success: after_script:
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi
notifications: notifications:
irc: irc.freenode.org#mongoengine irc: irc.freenode.org#mongoengine
# Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z)
branches: branches:
only: only:
- master - master
- /^v.*$/ - /^v.*$/
# Whenever a new release is created via GitHub, publish it on PyPI.
deploy: deploy:
provider: pypi provider: pypi
user: the_drow user: the_drow
password: password:
secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek=
# create a source distribution and a pure python wheel for faster installs
distributions: "sdist bdist_wheel"
# only deploy on tagged commits (aka GitHub releases) and only for the
# parent repo's builds running Python 2.7 along with dev PyMongo (we run
# Travis against many different Python and PyMongo versions and we don't
# want the deploy to occur multiple times).
on: on:
tags: true tags: true
repo: MongoEngine/mongoengine repo: MongoEngine/mongoengine
condition: "$PYMONGO = 3.0"
python: 2.7

View File

@@ -14,7 +14,7 @@ Before starting to write code, look for existing `tickets
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one <https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific <https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
issue or feature request. That way you avoid working on something issue or feature request. That way you avoid working on something
that might not be of interest or that has already been addressed. If in doubt that might not be of interest or that has already been addressed. If in doubt
post to the `user group <http://groups.google.com/group/mongoengine-users>` post to the `user group <http://groups.google.com/group/mongoengine-users>`
Supported Interpreters Supported Interpreters
@@ -29,20 +29,19 @@ Style Guide
----------- -----------
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_ MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
including 4 space indents. When possible we try to stick to 79 character line including 4 space indents. When possible we try to stick to 79 character line limits.
limits. However, screens got bigger and an ORM has a strong focus on However, screens got bigger and an ORM has a strong focus on readability and
readability and if it can help, we accept 119 as maximum line length, in a if it can help, we accept 119 as maximum line length, in a similar way as
similar way as `django does `django does <https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
<https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
Testing Testing
------- -------
All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_ All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
and any pull requests are automatically tested. Any pull requests without and any pull requests are automatically tested by Travis. Any pull requests
tests will take longer to be integrated and might be refused. without tests will take longer to be integrated and might be refused.
You may also submit a simple failing test as a pull request if you don't know You may also submit a simple failing test as a PullRequest if you don't know
how to fix it, it will be easier for other people to work on it and it may get how to fix it, it will be easier for other people to work on it and it may get
fixed faster. fixed faster.
@@ -50,18 +49,13 @@ General Guidelines
------------------ ------------------
- Avoid backward breaking changes if at all possible. - Avoid backward breaking changes if at all possible.
- If you *have* to introduce a breaking change, make it very clear in your
pull request's description. Also, describe how users of this package
should adapt to the breaking change in docs/upgrade.rst.
- Write inline documentation for new classes and methods. - Write inline documentation for new classes and methods.
- Write tests and make sure they pass (make sure you have a mongod - Write tests and make sure they pass (make sure you have a mongod
running on the default port, then execute ``python setup.py nosetests`` running on the default port, then execute ``python setup.py nosetests``
from the cmd line to run the test suite). from the cmd line to run the test suite).
- Ensure tests pass on all supported Python, PyMongo, and MongoDB versions. - Ensure tests pass on every Python and PyMongo versions.
You can test various Python and PyMongo versions locally by executing You can test on these versions locally by executing ``tox``
``tox``. For different MongoDB versions, you can rely on our automated - Add enhancements or problematic bug fixes to docs/changelog.rst
Travis tests.
- Add enhancements or problematic bug fixes to docs/changelog.rst.
- Add yourself to AUTHORS :) - Add yourself to AUTHORS :)
Documentation Documentation
@@ -75,6 +69,3 @@ just make your changes to the inline documentation of the appropriate
branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_. branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_.
You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_ You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_
button. button.
If you want to test your documentation changes locally, you need to install
the ``sphinx`` package.

View File

@@ -19,42 +19,32 @@ MongoEngine
About About
===== =====
MongoEngine is a Python Object-Document Mapper for working with MongoDB. MongoEngine is a Python Object-Document Mapper for working with MongoDB.
Documentation is available at https://mongoengine-odm.readthedocs.io - there Documentation available at https://mongoengine-odm.readthedocs.io - there is currently
is currently a `tutorial <https://mongoengine-odm.readthedocs.io/tutorial.html>`_, a `tutorial <https://mongoengine-odm.readthedocs.io/tutorial.html>`_, a `user guide
a `user guide <https://mongoengine-odm.readthedocs.io/guide/index.html>`_, and <https://mongoengine-odm.readthedocs.io/guide/index.html>`_ and an `API reference
an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_. <https://mongoengine-odm.readthedocs.io/apireference.html>`_.
Supported MongoDB Versions
==========================
MongoEngine is currently tested against MongoDB v2.4, v2.6, and v3.0. Future
versions should be supported as well, but aren't actively tested at the moment.
Make sure to open an issue or submit a pull request if you experience any
problems with MongoDB v3.2+.
Installation Installation
============ ============
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``. `pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ and thus
and thus you can use ``easy_install -U mongoengine``. Otherwise, you can download the you can use ``easy_install -U mongoengine``. Otherwise, you can download the
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
setup.py install``. setup.py install``.
Dependencies Dependencies
============ ============
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_.
At the very least, you'll need these two packages to use MongoEngine:
- pymongo>=2.7.1 - pymongo>=2.7.1
- six>=1.10.0 - sphinx (optional - for documentation generation)
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
Optional Dependencies
---------------------
- **Image Fields**: Pillow>=2.0.0
- dateutil>=2.1.0 - dateutil>=2.1.0
If you need to use an ``ImageField`` or ``ImageGridFsProxy``: .. note
MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: PyMongo 3.0.1
- Pillow>=2.0.0
Examples Examples
======== ========
@@ -67,7 +57,7 @@ Some simple examples of what MongoEngine code looks like:
class BlogPost(Document): class BlogPost(Document):
title = StringField(required=True, max_length=200) title = StringField(required=True, max_length=200)
posted = DateTimeField(default=datetime.datetime.utcnow) posted = DateTimeField(default=datetime.datetime.now)
tags = ListField(StringField(max_length=50)) tags = ListField(StringField(max_length=50))
meta = {'allow_inheritance': True} meta = {'allow_inheritance': True}
@@ -97,28 +87,27 @@ Some simple examples of what MongoEngine code looks like:
... print ... print
... ...
# Count all blog posts and its subtypes >>> len(BlogPost.objects)
>>> BlogPost.objects.count()
2 2
>>> TextPost.objects.count() >>> len(TextPost.objects)
1 1
>>> LinkPost.objects.count() >>> len(LinkPost.objects)
1 1
# Count tagged posts # Find tagged posts
>>> BlogPost.objects(tags='mongoengine').count() >>> len(BlogPost.objects(tags='mongoengine'))
2 2
>>> BlogPost.objects(tags='mongodb').count() >>> len(BlogPost.objects(tags='mongodb'))
1 1
Tests Tests
===== =====
To run the test suite, ensure you are running a local instance of MongoDB on To run the test suite, ensure you are running a local instance of MongoDB on
the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``. the standard port and have ``nose`` installed. Then, run: ``python setup.py nosetests``.
To run the test suite on every supported Python and PyMongo version, you can To run the test suite on every supported Python version and every supported PyMongo version,
use ``tox``. You'll need to make sure you have each supported Python version you can use ``tox``.
installed in your environment and then: tox and each supported Python version should be installed in your environment:
.. code-block:: shell .. code-block:: shell
@@ -127,16 +116,13 @@ installed in your environment and then:
# Run the test suites # Run the test suites
$ tox $ tox
If you wish to run a subset of tests, use the nosetests convention: If you wish to run one single or selected tests, use the nosetest convention. It will find the folder,
eventually the file, go to the TestClass specified after the colon and eventually right to the single test.
Also use the -s argument if you want to print out whatever or access pdb while testing.
.. code-block:: shell .. code-block:: shell
# Run all the tests in a particular test file $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest.test_cls_field -s
$ python setup.py nosetests --tests tests/fields/fields.py
# Run only particular test class in that file
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest
# Use the -s option if you want to print some debug statements or use pdb
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s
Community Community
========= =========
@@ -144,7 +130,8 @@ Community
<http://groups.google.com/group/mongoengine-users>`_ <http://groups.google.com/group/mongoengine-users>`_
- `MongoEngine Developers mailing list - `MongoEngine Developers mailing list
<http://groups.google.com/group/mongoengine-dev>`_ <http://groups.google.com/group/mongoengine-dev>`_
- `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_
Contributing Contributing
============ ============
We welcome contributions! See the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_ We welcome contributions! see the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_

View File

@@ -4,10 +4,7 @@ Changelog
Development Development
=========== ===========
- (Fill this out as you fix issues and develop your features). - (Fill this out as you fix issues and develop you features).
- Fixed using sets in field choices #1481
- POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476
- POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476
- Fixed connecting to a replica set with PyMongo 2.x #1436 - Fixed connecting to a replica set with PyMongo 2.x #1436
- Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237 - Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237
@@ -16,7 +13,6 @@ Changes in 0.11.0
- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 - BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428
- BREAKING CHANGE: Dropped Python 2.6 support. #1428 - BREAKING CHANGE: Dropped Python 2.6 support. #1428
- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 - BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428
- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334
- Fixed absent rounding for DecimalField when `force_string` is set. #1103 - Fixed absent rounding for DecimalField when `force_string` is set. #1103
Changes in 0.10.8 Changes in 0.10.8

View File

@@ -33,7 +33,7 @@ the :attr:`host` to
corresponding parameters in :func:`~mongoengine.connect`: :: corresponding parameters in :func:`~mongoengine.connect`: ::
connect( connect(
db='test', name='test',
username='user', username='user',
password='12345', password='12345',
host='mongodb://admin:qwerty@localhost/production' host='mongodb://admin:qwerty@localhost/production'

View File

@@ -150,7 +150,7 @@ arguments can be set on all fields:
.. note:: If set, this field is also accessible through the `pk` field. .. note:: If set, this field is also accessible through the `pk` field.
:attr:`choices` (Default: None) :attr:`choices` (Default: None)
An iterable (e.g. list, tuple or set) of choices to which the value of this An iterable (e.g. a list or tuple) of choices to which the value of this
field should be limited. field should be limited.
Can be either be a nested tuples of value (stored in mongo) and a Can be either be a nested tuples of value (stored in mongo) and a
@@ -214,8 +214,8 @@ document class as the first argument::
Dictionary Fields Dictionary Fields
----------------- -----------------
Often, an embedded document may be used instead of a dictionary generally Often, an embedded document may be used instead of a dictionary generally
embedded documents are recommended as dictionaries dont support validation embedded documents are recommended as dictionaries dont support validation
or custom field types. However, sometimes you will not know the structure of what you want to or custom field types. However, sometimes you will not know the structure of what you want to
store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate:: store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
@@ -361,6 +361,11 @@ Its value can take any of the following constants:
In Django, be sure to put all apps that have such delete rule declarations in In Django, be sure to put all apps that have such delete rule declarations in
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
.. warning::
Signals are not triggered when doing cascading updates / deletes - if this
is required you must manually handle the update / delete.
Generic reference fields Generic reference fields
'''''''''''''''''''''''' ''''''''''''''''''''''''
A second kind of reference field also exists, A second kind of reference field also exists,

View File

@@ -142,4 +142,11 @@ cleaner looking while still allowing manual execution of the callback::
modified = DateTimeField() modified = DateTimeField()
ReferenceFields and Signals
---------------------------
Currently `reverse_delete_rule` does not trigger signals on the other part of
the relationship. If this is required you must manually handle the
reverse deletion.
.. _blinker: http://pypi.python.org/pypi/blinker .. _blinker: http://pypi.python.org/pypi/blinker

View File

@@ -2,20 +2,6 @@
Upgrading Upgrading
######### #########
Development
***********
(Fill this out whenever you introduce breaking changes to MongoEngine)
This release includes various fixes for the `BaseQuerySet` methods and how they
are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size
to an already-existing queryset wouldn't modify the underlying PyMongo cursor.
This has been fixed now, so you'll need to make sure that your code didn't rely
on the broken implementation.
Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private
`_clone_into`. If you directly used that method in your code, you'll need to
rename its occurrences.
0.11.0 0.11.0
****** ******
This release includes a major rehaul of MongoEngine's code quality and This release includes a major rehaul of MongoEngine's code quality and

View File

@@ -5,7 +5,7 @@ __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set', 'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert', 'min', 'max', 'rename']) 'set_on_insert', 'min', 'max'])
_document_registry = {} _document_registry = {}

View File

@@ -429,7 +429,7 @@ class StrictDict(object):
def __eq__(self, other): def __eq__(self, other):
return self.items() == other.items() return self.items() == other.items()
def __ne__(self, other): def __neq__(self, other):
return self.items() != other.items() return self.items() != other.items()
@classmethod @classmethod

View File

@@ -402,11 +402,9 @@ class BaseDocument(object):
raise ValidationError(message, errors=errors) raise ValidationError(message, errors=errors)
def to_json(self, *args, **kwargs): def to_json(self, *args, **kwargs):
"""Convert this document to JSON. """Converts a document to JSON.
:param use_db_field: Set to True by default but enables the output of the json structure with the field names
:param use_db_field: Serialize field names as they appear in and not the mongodb store db_names in case of set to False
MongoDB (as opposed to attribute names on this document).
Defaults to True.
""" """
use_db_field = kwargs.pop('use_db_field', True) use_db_field = kwargs.pop('use_db_field', True)
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs) return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)

View File

@@ -41,7 +41,7 @@ class BaseField(object):
""" """
:param db_field: The database field to store this field in :param db_field: The database field to store this field in
(defaults to the name of the field) (defaults to the name of the field)
:param name: Deprecated - use db_field :param name: Depreciated - use db_field
:param required: If the field is required. Whether it has to have a :param required: If the field is required. Whether it has to have a
value or not. Defaults to False. value or not. Defaults to False.
:param default: (optional) The default value for this field if no value :param default: (optional) The default value for this field if no value
@@ -81,17 +81,6 @@ class BaseField(object):
self.sparse = sparse self.sparse = sparse
self._owner_document = None self._owner_document = None
# Validate the db_field
if isinstance(self.db_field, six.string_types) and (
'.' in self.db_field or
'\0' in self.db_field or
self.db_field.startswith('$')
):
raise ValueError(
'field names cannot contain dots (".") or null characters '
'("\\0"), and they must not start with a dollar sign ("$").'
)
# Detect and report conflicts between metadata and base properties. # Detect and report conflicts between metadata and base properties.
conflicts = set(dir(self)) & set(kwargs) conflicts = set(dir(self)) & set(kwargs)
if conflicts: if conflicts:
@@ -193,8 +182,7 @@ class BaseField(object):
EmbeddedDocument = _import_class('EmbeddedDocument') EmbeddedDocument = _import_class('EmbeddedDocument')
choice_list = self.choices choice_list = self.choices
if isinstance(next(iter(choice_list)), (list, tuple)): if isinstance(choice_list[0], (list, tuple)):
# next(iter) is useful for sets
choice_list = [k for k, _ in choice_list] choice_list = [k for k, _ in choice_list]
# Choices which are other types of Documents # Choices which are other types of Documents

View File

@@ -51,9 +51,7 @@ def register_connection(alias, name=None, host=None, port=None,
MONGODB-CR (MongoDB Challenge Response protocol) for older servers. MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
:param is_mock: explicitly use mongomock for this connection :param is_mock: explicitly use mongomock for this connection
(can also be done by using `mongomock://` as db host prefix) (can also be done by using `mongomock://` as db host prefix)
:param kwargs: ad-hoc parameters to be passed into the pymongo driver, :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
for example maxpoolsize, tz_aware, etc. See the documentation
for pymongo's `MongoClient` for a full list.
.. versionchanged:: 0.10.6 - added mongomock support .. versionchanged:: 0.10.6 - added mongomock support
""" """
@@ -243,12 +241,9 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
running on the default port on localhost. If authentication is needed, running on the default port on localhost. If authentication is needed,
provide username and password arguments as well. provide username and password arguments as well.
Multiple databases are supported by using aliases. Provide a separate Multiple databases are supported by using aliases. Provide a separate
`alias` to connect to a different instance of :program:`mongod`. `alias` to connect to a different instance of :program:`mongod`.
See the docstring for `register_connection` for more details about all
supported kwargs.
.. versionchanged:: 0.6 - added multiple database support. .. versionchanged:: 0.6 - added multiple database support.
""" """
if alias not in _connections: if alias not in _connections:

View File

@@ -332,20 +332,68 @@ class Document(BaseDocument):
signals.pre_save_post_validation.send(self.__class__, document=self, signals.pre_save_post_validation.send(self.__class__, document=self,
created=created, **signal_kwargs) created=created, **signal_kwargs)
if self._meta.get('auto_create_index', True):
self.ensure_indexes()
try: try:
# Save a new document or update an existing one collection = self._get_collection()
if self._meta.get('auto_create_index', True):
self.ensure_indexes()
if created: if created:
object_id = self._save_create(doc, force_insert, write_concern) if force_insert:
object_id = collection.insert(doc, **write_concern)
else:
object_id = collection.save(doc, **write_concern)
# In PyMongo 3.0, the save() call calls internally the _update() call
# but they forget to return the _id value passed back, therefore getting it back here
# Correct behaviour in 2.X and in 3.0.1+ versions
if not object_id and pymongo.version_tuple == (3, 0):
pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk)
object_id = (
self._qs.filter(pk=pk_as_mongo_obj).first() and
self._qs.filter(pk=pk_as_mongo_obj).first().pk
) # TODO doesn't this make 2 queries?
else: else:
object_id, created = self._save_update(doc, save_condition, object_id = doc['_id']
write_concern) updates, removals = self._delta()
# Need to add shard key to query, or you get an error
if save_condition is not None:
select_dict = transform.query(self.__class__,
**save_condition)
else:
select_dict = {}
select_dict['_id'] = object_id
shard_key = self._meta.get('shard_key', tuple())
for k in shard_key:
path = self._lookup_field(k.split('.'))
actual_key = [p.db_field for p in path]
val = doc
for ak in actual_key:
val = val[ak]
select_dict['.'.join(actual_key)] = val
def is_new_object(last_error):
if last_error is not None:
updated = last_error.get('updatedExisting')
if updated is not None:
return not updated
return created
update_query = {}
if updates:
update_query['$set'] = updates
if removals:
update_query['$unset'] = removals
if updates or removals:
upsert = save_condition is None
last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern)
if not upsert and last_error['n'] == 0:
raise SaveConditionError('Race condition preventing'
' document update detected')
created = is_new_object(last_error)
if cascade is None: if cascade is None:
cascade = (self._meta.get('cascade', False) or cascade = self._meta.get(
cascade_kwargs is not None) 'cascade', False) or cascade_kwargs is not None
if cascade: if cascade:
kwargs = { kwargs = {
@@ -358,7 +406,6 @@ class Document(BaseDocument):
kwargs.update(cascade_kwargs) kwargs.update(cascade_kwargs)
kwargs['_refs'] = _refs kwargs['_refs'] = _refs
self.cascade_save(**kwargs) self.cascade_save(**kwargs)
except pymongo.errors.DuplicateKeyError as err: except pymongo.errors.DuplicateKeyError as err:
message = u'Tried to save duplicate unique keys (%s)' message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % six.text_type(err)) raise NotUniqueError(message % six.text_type(err))
@@ -371,91 +418,16 @@ class Document(BaseDocument):
raise NotUniqueError(message % six.text_type(err)) raise NotUniqueError(message % six.text_type(err))
raise OperationError(message % six.text_type(err)) raise OperationError(message % six.text_type(err))
# Make sure we store the PK on this document now that it's saved
id_field = self._meta['id_field'] id_field = self._meta['id_field']
if created or id_field not in self._meta.get('shard_key', []): if created or id_field not in self._meta.get('shard_key', []):
self[id_field] = self._fields[id_field].to_python(object_id) self[id_field] = self._fields[id_field].to_python(object_id)
signals.post_save.send(self.__class__, document=self, signals.post_save.send(self.__class__, document=self,
created=created, **signal_kwargs) created=created, **signal_kwargs)
self._clear_changed_fields() self._clear_changed_fields()
self._created = False self._created = False
return self return self
def _save_create(self, doc, force_insert, write_concern):
"""Save a new document.
Helper method, should only be used inside save().
"""
collection = self._get_collection()
if force_insert:
return collection.insert(doc, **write_concern)
object_id = collection.save(doc, **write_concern)
# In PyMongo 3.0, the save() call calls internally the _update() call
# but they forget to return the _id value passed back, therefore getting it back here
# Correct behaviour in 2.X and in 3.0.1+ versions
if not object_id and pymongo.version_tuple == (3, 0):
pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk)
object_id = (
self._qs.filter(pk=pk_as_mongo_obj).first() and
self._qs.filter(pk=pk_as_mongo_obj).first().pk
) # TODO doesn't this make 2 queries?
return object_id
def _save_update(self, doc, save_condition, write_concern):
"""Update an existing document.
Helper method, should only be used inside save().
"""
collection = self._get_collection()
object_id = doc['_id']
created = False
select_dict = {}
if save_condition is not None:
select_dict = transform.query(self.__class__, **save_condition)
select_dict['_id'] = object_id
# Need to add shard key to query, or you get an error
shard_key = self._meta.get('shard_key', tuple())
for k in shard_key:
path = self._lookup_field(k.split('.'))
actual_key = [p.db_field for p in path]
val = doc
for ak in actual_key:
val = val[ak]
select_dict['.'.join(actual_key)] = val
updates, removals = self._delta()
update_query = {}
if updates:
update_query['$set'] = updates
if removals:
update_query['$unset'] = removals
if updates or removals:
upsert = save_condition is None
last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern)
if not upsert and last_error['n'] == 0:
raise SaveConditionError('Race condition preventing'
' document update detected')
if last_error is not None:
updated_existing = last_error.get('updatedExisting')
if updated_existing is False:
created = True
# !!! This is bad, means we accidentally created a new,
# potentially corrupted document. See
# https://github.com/MongoEngine/mongoengine/issues/564
return object_id, created
def cascade_save(self, **kwargs): def cascade_save(self, **kwargs):
"""Recursively save any references and generic references on the """Recursively save any references and generic references on the
document. document.

View File

@@ -50,8 +50,8 @@ class FieldDoesNotExist(Exception):
or an :class:`~mongoengine.EmbeddedDocument`. or an :class:`~mongoengine.EmbeddedDocument`.
To avoid this behavior on data loading, To avoid this behavior on data loading,
you should set the :attr:`strict` to ``False`` you should the :attr:`strict` to ``False``
in the :attr:`meta` dictionary. in the :attr:`meta` dictionnary.
""" """

View File

@@ -139,12 +139,12 @@ class URLField(StringField):
# Check first if the scheme is valid # Check first if the scheme is valid
scheme = value.split('://')[0].lower() scheme = value.split('://')[0].lower()
if scheme not in self.schemes: if scheme not in self.schemes:
self.error(u'Invalid scheme {} in URL: {}'.format(scheme, value)) self.error('Invalid scheme {} in URL: {}'.format(scheme, value))
return return
# Then check full URL # Then check full URL
if not self.url_regex.match(value): if not self.url_regex.match(value):
self.error(u'Invalid URL: {}'.format(value)) self.error('Invalid URL: {}'.format(value))
return return
@@ -888,6 +888,10 @@ class ReferenceField(BaseField):
Foo.register_delete_rule(Bar, 'foo', NULLIFY) Foo.register_delete_rule(Bar, 'foo', NULLIFY)
.. note ::
`reverse_delete_rule` does not trigger pre / post delete signals to be
triggered.
.. versionchanged:: 0.5 added `reverse_delete_rule` .. versionchanged:: 0.5 added `reverse_delete_rule`
""" """

View File

@@ -86,7 +86,6 @@ class BaseQuerySet(object):
self._batch_size = None self._batch_size = None
self.only_fields = [] self.only_fields = []
self._max_time_ms = None self._max_time_ms = None
self._comment = None
def __call__(self, q_obj=None, class_check=True, read_preference=None, def __call__(self, q_obj=None, class_check=True, read_preference=None,
**query): **query):
@@ -707,36 +706,39 @@ class BaseQuerySet(object):
with switch_db(self._document, alias) as cls: with switch_db(self._document, alias) as cls:
collection = cls._get_collection() collection = cls._get_collection()
return self._clone_into(self.__class__(self._document, collection)) return self.clone_into(self.__class__(self._document, collection))
def clone(self): def clone(self):
"""Create a copy of the current queryset.""" """Creates a copy of the current
return self._clone_into(self.__class__(self._document, self._collection_obj)) :class:`~mongoengine.queryset.QuerySet`
def _clone_into(self, new_qs): .. versionadded:: 0.5
"""Copy all of the relevant properties of this queryset to
a new queryset (which has to be an instance of
:class:`~mongoengine.queryset.base.BaseQuerySet`).
""" """
if not isinstance(new_qs, BaseQuerySet): return self.clone_into(self.__class__(self._document, self._collection_obj))
def clone_into(self, cls):
"""Creates a copy of the current
:class:`~mongoengine.queryset.base.BaseQuerySet` into another child class
"""
if not isinstance(cls, BaseQuerySet):
raise OperationError( raise OperationError(
'%s is not a subclass of BaseQuerySet' % new_qs.__name__) '%s is not a subclass of BaseQuerySet' % cls.__name__)
copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj',
'_where_clause', '_loaded_fields', '_ordering', '_snapshot', '_where_clause', '_loaded_fields', '_ordering', '_snapshot',
'_timeout', '_class_check', '_slave_okay', '_read_preference', '_timeout', '_class_check', '_slave_okay', '_read_preference',
'_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce',
'_limit', '_skip', '_hint', '_auto_dereference', '_limit', '_skip', '_hint', '_auto_dereference',
'_search_text', 'only_fields', '_max_time_ms', '_comment') '_search_text', 'only_fields', '_max_time_ms')
for prop in copy_props: for prop in copy_props:
val = getattr(self, prop) val = getattr(self, prop)
setattr(new_qs, prop, copy.copy(val)) setattr(cls, prop, copy.copy(val))
if self._cursor_obj: if self._cursor_obj:
new_qs._cursor_obj = self._cursor_obj.clone() cls._cursor_obj = self._cursor_obj.clone()
return new_qs return cls
def select_related(self, max_depth=1): def select_related(self, max_depth=1):
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects or """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or
@@ -758,11 +760,7 @@ class BaseQuerySet(object):
""" """
queryset = self.clone() queryset = self.clone()
queryset._limit = n if n != 0 else 1 queryset._limit = n if n != 0 else 1
# Return self to allow chaining
# If a cursor object has already been created, apply the limit to it.
if queryset._cursor_obj:
queryset._cursor_obj.limit(queryset._limit)
return queryset return queryset
def skip(self, n): def skip(self, n):
@@ -773,11 +771,6 @@ class BaseQuerySet(object):
""" """
queryset = self.clone() queryset = self.clone()
queryset._skip = n queryset._skip = n
# If a cursor object has already been created, apply the skip to it.
if queryset._cursor_obj:
queryset._cursor_obj.skip(queryset._skip)
return queryset return queryset
def hint(self, index=None): def hint(self, index=None):
@@ -795,11 +788,6 @@ class BaseQuerySet(object):
""" """
queryset = self.clone() queryset = self.clone()
queryset._hint = index queryset._hint = index
# If a cursor object has already been created, apply the hint to it.
if queryset._cursor_obj:
queryset._cursor_obj.hint(queryset._hint)
return queryset return queryset
def batch_size(self, size): def batch_size(self, size):
@@ -813,11 +801,6 @@ class BaseQuerySet(object):
""" """
queryset = self.clone() queryset = self.clone()
queryset._batch_size = size queryset._batch_size = size
# If a cursor object has already been created, apply the batch size to it.
if queryset._cursor_obj:
queryset._cursor_obj.batch_size(queryset._batch_size)
return queryset return queryset
def distinct(self, field): def distinct(self, field):
@@ -989,31 +972,13 @@ class BaseQuerySet(object):
def order_by(self, *keys): def order_by(self, *keys):
"""Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The
order may be specified by prepending each of the keys by a + or a -. order may be specified by prepending each of the keys by a + or a -.
Ascending order is assumed. If no keys are passed, existing ordering Ascending order is assumed.
is cleared instead.
:param keys: fields to order the query results by; keys may be :param keys: fields to order the query results by; keys may be
prefixed with **+** or **-** to determine the ordering direction prefixed with **+** or **-** to determine the ordering direction
""" """
queryset = self.clone() queryset = self.clone()
queryset._ordering = queryset._get_order_by(keys)
old_ordering = queryset._ordering
new_ordering = queryset._get_order_by(keys)
if queryset._cursor_obj:
# If a cursor object has already been created, apply the sort to it
if new_ordering:
queryset._cursor_obj.sort(new_ordering)
# If we're trying to clear a previous explicit ordering, we need
# to clear the cursor entirely (because PyMongo doesn't allow
# clearing an existing sort on a cursor).
elif old_ordering:
queryset._cursor_obj = None
queryset._ordering = new_ordering
return queryset return queryset
def comment(self, text): def comment(self, text):
@@ -1459,13 +1424,10 @@ class BaseQuerySet(object):
raise StopIteration raise StopIteration
raw_doc = self._cursor.next() raw_doc = self._cursor.next()
if self._as_pymongo: if self._as_pymongo:
return self._get_as_pymongo(raw_doc) return self._get_as_pymongo(raw_doc)
doc = self._document._from_son(raw_doc,
doc = self._document._from_son( _auto_dereference=self._auto_dereference, only_fields=self.only_fields)
raw_doc, _auto_dereference=self._auto_dereference,
only_fields=self.only_fields)
if self._scalar: if self._scalar:
return self._get_scalar(doc) return self._get_scalar(doc)
@@ -1475,6 +1437,7 @@ class BaseQuerySet(object):
def rewind(self): def rewind(self):
"""Rewind the cursor to its unevaluated state. """Rewind the cursor to its unevaluated state.
.. versionadded:: 0.3 .. versionadded:: 0.3
""" """
self._iter = False self._iter = False
@@ -1524,54 +1487,43 @@ class BaseQuerySet(object):
@property @property
def _cursor(self): def _cursor(self):
"""Return a PyMongo cursor object corresponding to this queryset.""" if self._cursor_obj is None:
# If _cursor_obj already exists, return it immediately. # In PyMongo 3+, we define the read preference on a collection
if self._cursor_obj is not None: # level, not a cursor level. Thus, we need to get a cloned
return self._cursor_obj # collection object using `with_options` first.
if IS_PYMONGO_3 and self._read_preference is not None:
self._cursor_obj = self._collection\
.with_options(read_preference=self._read_preference)\
.find(self._query, **self._cursor_args)
else:
self._cursor_obj = self._collection.find(self._query,
**self._cursor_args)
# Apply where clauses to cursor
if self._where_clause:
where_clause = self._sub_js_fields(self._where_clause)
self._cursor_obj.where(where_clause)
# Create a new PyMongo cursor. if self._ordering:
# XXX In PyMongo 3+, we define the read preference on a collection # Apply query ordering
# level, not a cursor level. Thus, we need to get a cloned collection self._cursor_obj.sort(self._ordering)
# object using `with_options` first. elif self._ordering is None and self._document._meta['ordering']:
if IS_PYMONGO_3 and self._read_preference is not None: # Otherwise, apply the ordering from the document model, unless
self._cursor_obj = self._collection\ # it's been explicitly cleared via order_by with no arguments
.with_options(read_preference=self._read_preference)\ order = self._get_order_by(self._document._meta['ordering'])
.find(self._query, **self._cursor_args) self._cursor_obj.sort(order)
else:
self._cursor_obj = self._collection.find(self._query,
**self._cursor_args)
# Apply "where" clauses to cursor
if self._where_clause:
where_clause = self._sub_js_fields(self._where_clause)
self._cursor_obj.where(where_clause)
# Apply ordering to the cursor. if self._limit is not None:
# XXX self._ordering can be equal to: self._cursor_obj.limit(self._limit)
# * None if we didn't explicitly call order_by on this queryset.
# * A list of PyMongo-style sorting tuples.
# * An empty list if we explicitly called order_by() without any
# arguments. This indicates that we want to clear the default
# ordering.
if self._ordering:
# explicit ordering
self._cursor_obj.sort(self._ordering)
elif self._ordering is None and self._document._meta['ordering']:
# default ordering
order = self._get_order_by(self._document._meta['ordering'])
self._cursor_obj.sort(order)
if self._limit is not None: if self._skip is not None:
self._cursor_obj.limit(self._limit) self._cursor_obj.skip(self._skip)
if self._skip is not None: if self._hint != -1:
self._cursor_obj.skip(self._skip) self._cursor_obj.hint(self._hint)
if self._hint != -1: if self._batch_size is not None:
self._cursor_obj.hint(self._hint) self._cursor_obj.batch_size(self._batch_size)
if self._batch_size is not None:
self._cursor_obj.batch_size(self._batch_size)
return self._cursor_obj return self._cursor_obj
@@ -1746,13 +1698,7 @@ class BaseQuerySet(object):
return ret return ret
def _get_order_by(self, keys): def _get_order_by(self, keys):
"""Given a list of MongoEngine-style sort keys, return a list """Creates a list of order by fields"""
of sorting tuples that can be applied to a PyMongo cursor. For
example:
>>> qs._get_order_by(['-last_name', 'first_name'])
[('last_name', -1), ('first_name', 1)]
"""
key_list = [] key_list = []
for key in keys: for key in keys:
if not key: if not key:
@@ -1765,19 +1711,17 @@ class BaseQuerySet(object):
direction = pymongo.ASCENDING direction = pymongo.ASCENDING
if key[0] == '-': if key[0] == '-':
direction = pymongo.DESCENDING direction = pymongo.DESCENDING
if key[0] in ('-', '+'): if key[0] in ('-', '+'):
key = key[1:] key = key[1:]
key = key.replace('__', '.') key = key.replace('__', '.')
try: try:
key = self._document._translate_field_name(key) key = self._document._translate_field_name(key)
except Exception: except Exception:
# TODO this exception should be more specific
pass pass
key_list.append((key, direction)) key_list.append((key, direction))
if self._cursor_obj and key_list:
self._cursor_obj.sort(key_list)
return key_list return key_list
def _get_scalar(self, doc): def _get_scalar(self, doc):
@@ -1875,21 +1819,10 @@ class BaseQuerySet(object):
return code return code
def _chainable_method(self, method_name, val): def _chainable_method(self, method_name, val):
"""Call a particular method on the PyMongo cursor call a particular chainable method
with the provided value.
"""
queryset = self.clone() queryset = self.clone()
method = getattr(queryset._cursor, method_name)
# Get an existing cursor object or create a new one method(val)
cursor = queryset._cursor
# Find the requested method on the cursor and call it with the
# provided value
getattr(cursor, method_name)(val)
# Cache the value on the queryset._{method_name}
setattr(queryset, '_' + method_name, val) setattr(queryset, '_' + method_name, val)
return queryset return queryset
# Deprecated # Deprecated

View File

@@ -136,15 +136,13 @@ class QuerySet(BaseQuerySet):
return self._len return self._len
def no_cache(self): def no_cache(self):
"""Convert to a non-caching queryset """Convert to a non_caching queryset
.. versionadded:: 0.8.3 Convert to non caching queryset .. versionadded:: 0.8.3 Convert to non caching queryset
""" """
if self._result_cache is not None: if self._result_cache is not None:
raise OperationError('QuerySet already cached') raise OperationError('QuerySet already cached')
return self.clone_into(QuerySetNoCache(self._document, self._collection))
return self._clone_into(QuerySetNoCache(self._document,
self._collection))
class QuerySetNoCache(BaseQuerySet): class QuerySetNoCache(BaseQuerySet):
@@ -155,7 +153,7 @@ class QuerySetNoCache(BaseQuerySet):
.. versionadded:: 0.8.3 Convert to caching queryset .. versionadded:: 0.8.3 Convert to caching queryset
""" """
return self._clone_into(QuerySet(self._document, self._collection)) return self.clone_into(QuerySet(self._document, self._collection))
def __repr__(self): def __repr__(self):
"""Provides the string representation of the QuerySet """Provides the string representation of the QuerySet

View File

@@ -2,14 +2,14 @@
import unittest import unittest
import sys import sys
from nose.plugins.skip import SkipTest
from datetime import datetime
import pymongo import pymongo
from mongoengine import * from nose.plugins.skip import SkipTest
from mongoengine.connection import get_db from datetime import datetime
from tests.utils import get_mongodb_version, needs_mongodb_v26 from mongoengine import *
from mongoengine.connection import get_db, get_connection
__all__ = ("IndexesTest", ) __all__ = ("IndexesTest", )
@@ -494,7 +494,8 @@ class IndexesTest(unittest.TestCase):
obj = Test(a=1) obj = Test(a=1)
obj.save() obj.save()
IS_MONGODB_3 = get_mongodb_version()[0] >= 3 connection = get_connection()
IS_MONGODB_3 = connection.server_info()['versionArray'][0] >= 3
# Need to be explicit about covered indexes as mongoDB doesn't know if # Need to be explicit about covered indexes as mongoDB doesn't know if
# the documents returned might have more keys in that here. # the documents returned might have more keys in that here.
@@ -732,6 +733,14 @@ class IndexesTest(unittest.TestCase):
Log.drop_collection() Log.drop_collection()
if pymongo.version_tuple[0] < 2 and pymongo.version_tuple[1] < 3:
raise SkipTest('pymongo needs to be 2.3 or higher for this test')
connection = get_connection()
version_array = connection.server_info()['versionArray']
if version_array[0] < 2 and version_array[1] < 2:
raise SkipTest('MongoDB needs to be 2.2 or higher for this test')
# Indexes are lazy so use list() to perform query # Indexes are lazy so use list() to perform query
list(Log.objects) list(Log.objects)
info = Log.objects._collection.index_information() info = Log.objects._collection.index_information()
@@ -865,8 +874,8 @@ class IndexesTest(unittest.TestCase):
info['provider_ids.foo_1_provider_ids.bar_1']['key']) info['provider_ids.foo_1_provider_ids.bar_1']['key'])
self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse']) self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse'])
@needs_mongodb_v26
def test_text_indexes(self): def test_text_indexes(self):
class Book(Document): class Book(Document):
title = DictField() title = DictField()
meta = { meta = {

View File

@@ -1232,19 +1232,6 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(person.name, None) self.assertEqual(person.name, None)
self.assertEqual(person.age, None) self.assertEqual(person.age, None)
def test_update_rename_operator(self):
"""Test the $rename operator."""
coll = self.Person._get_collection()
doc = self.Person(name='John').save()
raw_doc = coll.find_one({'_id': doc.pk})
self.assertEqual(set(raw_doc.keys()), set(['_id', '_cls', 'name']))
doc.update(rename__name='first_name')
raw_doc = coll.find_one({'_id': doc.pk})
self.assertEqual(set(raw_doc.keys()),
set(['_id', '_cls', 'first_name']))
self.assertEqual(raw_doc['first_name'], 'John')
def test_inserts_if_you_set_the_pk(self): def test_inserts_if_you_set_the_pk(self):
p1 = self.Person(name='p1', id=bson.ObjectId()).save() p1 = self.Person(name='p1', id=bson.ObjectId()).save()
p2 = self.Person(name='p2') p2 = self.Person(name='p2')

View File

@@ -1,12 +1,13 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import six
from nose.plugins.skip import SkipTest
import datetime import datetime
import unittest import unittest
import uuid import uuid
import math import math
import itertools import itertools
import re import re
from nose.plugins.skip import SkipTest
import six import six
try: try:
@@ -26,13 +27,21 @@ from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList, from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList,
_document_registry) _document_registry)
from mongoengine.errors import NotRegistered, DoesNotExist
from tests.utils import MongoDBTestCase
__all__ = ("FieldTest", "EmbeddedDocumentListFieldTestCase") __all__ = ("FieldTest", "EmbeddedDocumentListFieldTestCase")
class FieldTest(MongoDBTestCase): class FieldTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
def tearDown(self):
self.db.drop_collection('fs.files')
self.db.drop_collection('fs.chunks')
self.db.drop_collection('mongoengine.counters')
def test_default_values_nothing_set(self): def test_default_values_nothing_set(self):
"""Ensure that default field values are used when creating a document. """Ensure that default field values are used when creating a document.
@@ -218,9 +227,9 @@ class FieldTest(MongoDBTestCase):
self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime)) self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime))
def test_not_required_handles_none_from_database(self): def test_not_required_handles_none_from_database(self):
"""Ensure that every field can handle null values from the """Ensure that every fields can handle null values from the database.
database.
""" """
class HandleNoneFields(Document): class HandleNoneFields(Document):
str_fld = StringField(required=True) str_fld = StringField(required=True)
int_fld = IntField(required=True) int_fld = IntField(required=True)
@@ -297,24 +306,6 @@ class FieldTest(MongoDBTestCase):
person.id = '497ce96f395f2f052a494fd4' person.id = '497ce96f395f2f052a494fd4'
person.validate() person.validate()
def test_db_field_validation(self):
"""Ensure that db_field doesn't accept invalid values."""
# dot in the name
with self.assertRaises(ValueError):
class User(Document):
name = StringField(db_field='user.name')
# name starting with $
with self.assertRaises(ValueError):
class User(Document):
name = StringField(db_field='$name')
# name containing a null character
with self.assertRaises(ValueError):
class User(Document):
name = StringField(db_field='name\0')
def test_string_validation(self): def test_string_validation(self):
"""Ensure that invalid values cannot be assigned to string fields. """Ensure that invalid values cannot be assigned to string fields.
""" """
@@ -341,12 +332,11 @@ class FieldTest(MongoDBTestCase):
person.validate() person.validate()
def test_url_validation(self): def test_url_validation(self):
"""Ensure that URLFields validate urls properly.""" """Ensure that URLFields validate urls properly.
"""
class Link(Document): class Link(Document):
url = URLField() url = URLField()
Link.drop_collection()
link = Link() link = Link()
link.url = 'google' link.url = 'google'
self.assertRaises(ValidationError, link.validate) self.assertRaises(ValidationError, link.validate)
@@ -354,27 +344,6 @@ class FieldTest(MongoDBTestCase):
link.url = 'http://www.google.com:8080' link.url = 'http://www.google.com:8080'
link.validate() link.validate()
def test_unicode_url_validation(self):
"""Ensure unicode URLs are validated properly."""
class Link(Document):
url = URLField()
Link.drop_collection()
link = Link()
link.url = u'http://привет.com'
# TODO fix URL validation - this *IS* a valid URL
# For now we just want to make sure that the error message is correct
try:
link.validate()
self.assertTrue(False)
except ValidationError as e:
self.assertEqual(
unicode(e),
u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])"
)
def test_url_scheme_validation(self): def test_url_scheme_validation(self):
"""Ensure that URLFields validate urls with specific schemes properly. """Ensure that URLFields validate urls with specific schemes properly.
""" """
@@ -1998,7 +1967,7 @@ class FieldTest(MongoDBTestCase):
self.assertEqual(content, User.objects.first().groups[0].content) self.assertEqual(content, User.objects.first().groups[0].content)
def test_reference_miss(self): def test_reference_miss(self):
"""Ensure an exception is raised when dereferencing unknown document """Ensure an exception is raised when dereferencing unknow document
""" """
class Foo(Document): class Foo(Document):
@@ -3199,42 +3168,26 @@ class FieldTest(MongoDBTestCase):
att.delete() att.delete()
self.assertEqual(0, Attachment.objects.count()) self.assertEqual(0, Attachment.objects.count())
def test_choices_allow_using_sets_as_choices(self): def test_choices_validation(self):
"""Ensure that sets can be used when setting choices """Ensure that value is in a container of allowed values.
""" """
class Shirt(Document): class Shirt(Document):
size = StringField(choices={'M', 'L'}) size = StringField(max_length=3, choices=(
('S', 'Small'), ('M', 'Medium'), ('L', 'Large'),
('XL', 'Extra Large'), ('XXL', 'Extra Extra Large')))
Shirt(size='M').validate() Shirt.drop_collection()
def test_choices_validation_allow_no_value(self):
"""Ensure that .validate passes and no value was provided
for a field setup with choices
"""
class Shirt(Document):
size = StringField(choices=('S', 'M'))
shirt = Shirt() shirt = Shirt()
shirt.validate() shirt.validate()
def test_choices_validation_accept_possible_value(self): shirt.size = "S"
"""Ensure that value is in a container of allowed values.
"""
class Shirt(Document):
size = StringField(choices=('S', 'M'))
shirt = Shirt(size='S')
shirt.validate() shirt.validate()
def test_choices_validation_reject_unknown_value(self): shirt.size = "XS"
"""Ensure that unallowed value are rejected upon validation self.assertRaises(ValidationError, shirt.validate)
"""
class Shirt(Document):
size = StringField(choices=('S', 'M'))
shirt = Shirt(size="XS") Shirt.drop_collection()
with self.assertRaises(ValidationError):
shirt.validate()
def test_choices_validation_documents(self): def test_choices_validation_documents(self):
""" """
@@ -4020,25 +3973,30 @@ class FieldTest(MongoDBTestCase):
"""Tests if a `FieldDoesNotExist` exception is raised when trying to """Tests if a `FieldDoesNotExist` exception is raised when trying to
instanciate a document with a field that's not defined. instanciate a document with a field that's not defined.
""" """
class Doc(Document):
foo = StringField()
with self.assertRaises(FieldDoesNotExist): class Doc(Document):
foo = StringField(db_field='f')
def test():
Doc(bar='test') Doc(bar='test')
self.assertRaises(FieldDoesNotExist, test)
def test_undefined_field_exception_with_strict(self): def test_undefined_field_exception_with_strict(self):
"""Tests if a `FieldDoesNotExist` exception is raised when trying to """Tests if a `FieldDoesNotExist` exception is raised when trying to
instanciate a document with a field that's not defined, instanciate a document with a field that's not defined,
even when strict is set to False. even when strict is set to False.
""" """
class Doc(Document): class Doc(Document):
foo = StringField() foo = StringField(db_field='f')
meta = {'strict': False} meta = {'strict': False}
with self.assertRaises(FieldDoesNotExist): def test():
Doc(bar='test') Doc(bar='test')
self.assertRaises(FieldDoesNotExist, test)
def test_long_field_is_considered_as_int64(self): def test_long_field_is_considered_as_int64(self):
""" """
Tests that long fields are stored as long in mongo, even if long value Tests that long fields are stored as long in mongo, even if long value
@@ -4053,13 +4011,12 @@ class FieldTest(MongoDBTestCase):
self.assertTrue(isinstance(doc.some_long, six.integer_types)) self.assertTrue(isinstance(doc.some_long, six.integer_types))
class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.db = connect(db='EmbeddedDocumentListFieldTestCase')
def setUp(self):
"""
Create two BlogPost entries in the database, each with
several EmbeddedDocuments.
"""
class Comments(EmbeddedDocument): class Comments(EmbeddedDocument):
author = StringField() author = StringField()
message = StringField() message = StringField()
@@ -4067,11 +4024,14 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
class BlogPost(Document): class BlogPost(Document):
comments = EmbeddedDocumentListField(Comments) comments = EmbeddedDocumentListField(Comments)
BlogPost.drop_collection() cls.Comments = Comments
cls.BlogPost = BlogPost
self.Comments = Comments
self.BlogPost = BlogPost
def setUp(self):
"""
Create two BlogPost entries in the database, each with
several EmbeddedDocuments.
"""
self.post1 = self.BlogPost(comments=[ self.post1 = self.BlogPost(comments=[
self.Comments(author='user1', message='message1'), self.Comments(author='user1', message='message1'),
self.Comments(author='user2', message='message1') self.Comments(author='user2', message='message1')
@@ -4083,6 +4043,13 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
self.Comments(author='user3', message='message1') self.Comments(author='user3', message='message1')
]).save() ]).save()
def tearDown(self):
self.BlogPost.drop_collection()
@classmethod
def tearDownClass(cls):
cls.db.drop_database('EmbeddedDocumentListFieldTestCase')
def test_no_keyword_filter(self): def test_no_keyword_filter(self):
""" """
Tests the filter method of a List of Embedded Documents Tests the filter method of a List of Embedded Documents
@@ -4440,8 +4407,7 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique)) my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique))
A(my_list=[]).save() A(my_list=[]).save()
with self.assertRaises(NotUniqueError): self.assertRaises(NotUniqueError, lambda: A(my_list=[]).save())
A(my_list=[]).save()
class EmbeddedWithSparseUnique(EmbeddedDocument): class EmbeddedWithSparseUnique(EmbeddedDocument):
number = IntField(unique=True, sparse=True) number = IntField(unique=True, sparse=True)
@@ -4449,9 +4415,6 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
class B(Document): class B(Document):
my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique)) my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique))
A.drop_collection()
B.drop_collection()
B(my_list=[]).save() B(my_list=[]).save()
B(my_list=[]).save() B(my_list=[]).save()
@@ -4491,8 +4454,6 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
a_field = IntField() a_field = IntField()
c_field = IntField(custom_data=custom_data) c_field = IntField(custom_data=custom_data)
CustomData.drop_collection()
a1 = CustomData(a_field=1, c_field=2).save() a1 = CustomData(a_field=1, c_field=2).save()
self.assertEqual(2, a1.c_field) self.assertEqual(2, a1.c_field)
self.assertFalse(hasattr(a1.c_field, 'custom_data')) self.assertFalse(hasattr(a1.c_field, 'custom_data'))

View File

@@ -18,13 +18,15 @@ try:
except ImportError: except ImportError:
HAS_PIL = False HAS_PIL = False
from tests.utils import MongoDBTestCase
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png')
class FileTest(MongoDBTestCase): class FileTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
def tearDown(self): def tearDown(self):
self.db.drop_collection('fs.files') self.db.drop_collection('fs.files')

View File

@@ -1,139 +1,105 @@
import datetime from datetime import datetime, timedelta
import unittest import unittest
from pymongo.errors import OperationFailure
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_connection
from tests.utils import MongoDBTestCase, needs_mongodb_v3 from nose.plugins.skip import SkipTest
__all__ = ("GeoQueriesTest",) __all__ = ("GeoQueriesTest",)
class GeoQueriesTest(MongoDBTestCase): class GeoQueriesTest(unittest.TestCase):
def _create_event_data(self, point_field_class=GeoPointField): def setUp(self):
"""Create some sample data re-used in many of the tests below.""" connect(db='mongoenginetest')
def test_geospatial_operators(self):
"""Ensure that geospatial queries are working.
"""
class Event(Document): class Event(Document):
title = StringField() title = StringField()
date = DateTimeField() date = DateTimeField()
location = point_field_class() location = GeoPointField()
def __unicode__(self): def __unicode__(self):
return self.title return self.title
self.Event = Event
Event.drop_collection() Event.drop_collection()
event1 = Event.objects.create( event1 = Event(title="Coltrane Motion @ Double Door",
title="Coltrane Motion @ Double Door", date=datetime.now() - timedelta(days=1),
date=datetime.datetime.now() - datetime.timedelta(days=1), location=[-87.677137, 41.909889]).save()
location=[-87.677137, 41.909889]) event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
event2 = Event.objects.create( date=datetime.now() - timedelta(days=10),
title="Coltrane Motion @ Bottom of the Hill", location=[-122.4194155, 37.7749295]).save()
date=datetime.datetime.now() - datetime.timedelta(days=10), event3 = Event(title="Coltrane Motion @ Empty Bottle",
location=[-122.4194155, 37.7749295]) date=datetime.now(),
event3 = Event.objects.create( location=[-87.686638, 41.900474]).save()
title="Coltrane Motion @ Empty Bottle",
date=datetime.datetime.now(),
location=[-87.686638, 41.900474])
return event1, event2, event3
def test_near(self):
"""Make sure the "near" operator works."""
event1, event2, event3 = self._create_event_data()
# find all events "near" pitchfork office, chicago. # find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too, # note that "near" will show the san francisco event, too,
# although it sorts to last. # although it sorts to last.
events = self.Event.objects(location__near=[-87.67892, 41.9120459]) events = Event.objects(location__near=[-87.67892, 41.9120459])
self.assertEqual(events.count(), 3) self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2]) self.assertEqual(list(events), [event1, event3, event2])
# ensure ordering is respected by "near"
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
events = events.order_by("-date")
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event3, event1, event2])
def test_near_and_max_distance(self):
"""Ensure the "max_distance" operator works alongside the "near"
operator.
"""
event1, event2, event3 = self._create_event_data()
# find events within 10 degrees of san francisco
point = [-122.415579, 37.7566023]
events = self.Event.objects(location__near=point,
location__max_distance=10)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# $minDistance was added in MongoDB v2.6, but continued being buggy
# until v3.0; skip for older versions
@needs_mongodb_v3
def test_near_and_min_distance(self):
"""Ensure the "min_distance" operator works alongside the "near"
operator.
"""
event1, event2, event3 = self._create_event_data()
# find events at least 10 degrees away of san francisco
point = [-122.415579, 37.7566023]
events = self.Event.objects(location__near=point,
location__min_distance=10)
self.assertEqual(events.count(), 2)
def test_within_distance(self):
"""Make sure the "within_distance" operator works."""
event1, event2, event3 = self._create_event_data()
# find events within 5 degrees of pitchfork office, chicago # find events within 5 degrees of pitchfork office, chicago
point_and_distance = [[-87.67892, 41.9120459], 5] point_and_distance = [[-87.67892, 41.9120459], 5]
events = self.Event.objects( events = Event.objects(location__within_distance=point_and_distance)
location__within_distance=point_and_distance)
self.assertEqual(events.count(), 2) self.assertEqual(events.count(), 2)
events = list(events) events = list(events)
self.assertTrue(event2 not in events) self.assertTrue(event2 not in events)
self.assertTrue(event1 in events) self.assertTrue(event1 in events)
self.assertTrue(event3 in events) self.assertTrue(event3 in events)
# ensure ordering is respected by "near"
events = Event.objects(location__near=[-87.67892, 41.9120459])
events = events.order_by("-date")
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event3, event1, event2])
# find events within 10 degrees of san francisco
point = [-122.415579, 37.7566023]
events = Event.objects(location__near=point, location__max_distance=10)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# find events at least 10 degrees away of san francisco
point = [-122.415579, 37.7566023]
events = Event.objects(location__near=point, location__min_distance=10)
# The following real test passes on MongoDB 3 but minDistance seems
# buggy on older MongoDB versions
if get_connection().server_info()['versionArray'][0] > 2:
self.assertEqual(events.count(), 2)
else:
self.assertTrue(events.count() >= 2)
# find events within 10 degrees of san francisco # find events within 10 degrees of san francisco
point_and_distance = [[-122.415579, 37.7566023], 10] point_and_distance = [[-122.415579, 37.7566023], 10]
events = self.Event.objects( events = Event.objects(location__within_distance=point_and_distance)
location__within_distance=point_and_distance)
self.assertEqual(events.count(), 1) self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2) self.assertEqual(events[0], event2)
# find events within 1 degree of greenpoint, broolyn, nyc, ny # find events within 1 degree of greenpoint, broolyn, nyc, ny
point_and_distance = [[-73.9509714, 40.7237134], 1] point_and_distance = [[-73.9509714, 40.7237134], 1]
events = self.Event.objects( events = Event.objects(location__within_distance=point_and_distance)
location__within_distance=point_and_distance)
self.assertEqual(events.count(), 0) self.assertEqual(events.count(), 0)
# ensure ordering is respected by "within_distance" # ensure ordering is respected by "within_distance"
point_and_distance = [[-87.67892, 41.9120459], 10] point_and_distance = [[-87.67892, 41.9120459], 10]
events = self.Event.objects( events = Event.objects(location__within_distance=point_and_distance)
location__within_distance=point_and_distance)
events = events.order_by("-date") events = events.order_by("-date")
self.assertEqual(events.count(), 2) self.assertEqual(events.count(), 2)
self.assertEqual(events[0], event3) self.assertEqual(events[0], event3)
def test_within_box(self):
"""Ensure the "within_box" operator works."""
event1, event2, event3 = self._create_event_data()
# check that within_box works # check that within_box works
box = [(-125.0, 35.0), (-100.0, 40.0)] box = [(-125.0, 35.0), (-100.0, 40.0)]
events = self.Event.objects(location__within_box=box) events = Event.objects(location__within_box=box)
self.assertEqual(events.count(), 1) self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event2.id) self.assertEqual(events[0].id, event2.id)
def test_within_polygon(self):
"""Ensure the "within_polygon" operator works."""
event1, event2, event3 = self._create_event_data()
polygon = [ polygon = [
(-87.694445, 41.912114), (-87.694445, 41.912114),
(-87.69084, 41.919395), (-87.69084, 41.919395),
@@ -141,7 +107,7 @@ class GeoQueriesTest(MongoDBTestCase):
(-87.654276, 41.911731), (-87.654276, 41.911731),
(-87.656164, 41.898061), (-87.656164, 41.898061),
] ]
events = self.Event.objects(location__within_polygon=polygon) events = Event.objects(location__within_polygon=polygon)
self.assertEqual(events.count(), 1) self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event1.id) self.assertEqual(events[0].id, event1.id)
@@ -150,151 +116,13 @@ class GeoQueriesTest(MongoDBTestCase):
(-1.225891, 52.792797), (-1.225891, 52.792797),
(-4.40094, 53.389881) (-4.40094, 53.389881)
] ]
events = self.Event.objects(location__within_polygon=polygon2) events = Event.objects(location__within_polygon=polygon2)
self.assertEqual(events.count(), 0) self.assertEqual(events.count(), 0)
def test_2dsphere_near(self): def test_geo_spatial_embedded(self):
"""Make sure the "near" operator works with a PointField, which
corresponds to a 2dsphere index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
# find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too,
# although it sorts to last.
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2])
# ensure ordering is respected by "near"
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
events = events.order_by("-date")
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event3, event1, event2])
def test_2dsphere_near_and_max_distance(self):
"""Ensure the "max_distance" operator works alongside the "near"
operator with a 2dsphere index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
# find events within 10km of san francisco
point = [-122.415579, 37.7566023]
events = self.Event.objects(location__near=point,
location__max_distance=10000)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# find events within 1km of greenpoint, broolyn, nyc, ny
events = self.Event.objects(location__near=[-73.9509714, 40.7237134],
location__max_distance=1000)
self.assertEqual(events.count(), 0)
# ensure ordering is respected by "near"
events = self.Event.objects(
location__near=[-87.67892, 41.9120459],
location__max_distance=10000
).order_by("-date")
self.assertEqual(events.count(), 2)
self.assertEqual(events[0], event3)
def test_2dsphere_geo_within_box(self):
"""Ensure the "geo_within_box" operator works with a 2dsphere
index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
# check that within_box works
box = [(-125.0, 35.0), (-100.0, 40.0)]
events = self.Event.objects(location__geo_within_box=box)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event2.id)
def test_2dsphere_geo_within_polygon(self):
"""Ensure the "geo_within_polygon" operator works with a
2dsphere index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
polygon = [
(-87.694445, 41.912114),
(-87.69084, 41.919395),
(-87.681742, 41.927186),
(-87.654276, 41.911731),
(-87.656164, 41.898061),
]
events = self.Event.objects(location__geo_within_polygon=polygon)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event1.id)
polygon2 = [
(-1.742249, 54.033586),
(-1.225891, 52.792797),
(-4.40094, 53.389881)
]
events = self.Event.objects(location__geo_within_polygon=polygon2)
self.assertEqual(events.count(), 0)
# $minDistance was added in MongoDB v2.6, but continued being buggy
# until v3.0; skip for older versions
@needs_mongodb_v3
def test_2dsphere_near_and_min_max_distance(self):
"""Ensure "min_distace" and "max_distance" operators work well
together with the "near" operator in a 2dsphere index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
# ensure min_distance and max_distance combine well
events = self.Event.objects(
location__near=[-87.67892, 41.9120459],
location__min_distance=1000,
location__max_distance=10000
).order_by("-date")
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event3)
# ensure ordering is respected by "near" with "min_distance"
events = self.Event.objects(
location__near=[-87.67892, 41.9120459],
location__min_distance=10000
).order_by("-date")
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
def test_2dsphere_geo_within_center(self):
"""Make sure the "geo_within_center" operator works with a
2dsphere index.
"""
event1, event2, event3 = self._create_event_data(
point_field_class=PointField
)
# find events within 5 degrees of pitchfork office, chicago
point_and_distance = [[-87.67892, 41.9120459], 2]
events = self.Event.objects(
location__geo_within_center=point_and_distance)
self.assertEqual(events.count(), 2)
events = list(events)
self.assertTrue(event2 not in events)
self.assertTrue(event1 in events)
self.assertTrue(event3 in events)
def _test_embedded(self, point_field_class):
"""Helper test method ensuring given point field class works
well in an embedded document.
"""
class Venue(EmbeddedDocument): class Venue(EmbeddedDocument):
location = point_field_class() location = GeoPointField()
name = StringField() name = StringField()
class Event(Document): class Event(Document):
@@ -320,18 +148,16 @@ class GeoQueriesTest(MongoDBTestCase):
self.assertEqual(events.count(), 3) self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2]) self.assertEqual(list(events), [event1, event3, event2])
def test_geo_spatial_embedded(self):
"""Make sure GeoPointField works properly in an embedded document."""
self._test_embedded(point_field_class=GeoPointField)
def test_2dsphere_point_embedded(self):
"""Make sure PointField works properly in an embedded document."""
self._test_embedded(point_field_class=PointField)
# Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039
@needs_mongodb_v3
def test_spherical_geospatial_operators(self): def test_spherical_geospatial_operators(self):
"""Ensure that spherical geospatial queries are working.""" """Ensure that spherical geospatial queries are working
"""
# Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039
connection = get_connection()
info = connection.test.command('buildInfo')
mongodb_version = tuple([int(i) for i in info['version'].split('.')])
if mongodb_version < (2, 6, 4):
raise SkipTest("Need MongoDB version 2.6.4+")
class Point(Document): class Point(Document):
location = GeoPointField() location = GeoPointField()
@@ -351,10 +177,7 @@ class GeoQueriesTest(MongoDBTestCase):
# Same behavior for _within_spherical_distance # Same behavior for _within_spherical_distance
points = Point.objects( points = Point.objects(
location__within_spherical_distance=[ location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius]
[-122, 37.5],
60 / earth_radius
]
) )
self.assertEqual(points.count(), 2) self.assertEqual(points.count(), 2)
@@ -371,9 +194,14 @@ class GeoQueriesTest(MongoDBTestCase):
# Test query works with min_distance, being farer from one point # Test query works with min_distance, being farer from one point
points = Point.objects(location__near_sphere=[-122, 37.8], points = Point.objects(location__near_sphere=[-122, 37.8],
location__min_distance=60 / earth_radius) location__min_distance=60 / earth_radius)
self.assertEqual(points.count(), 1) # The following real test passes on MongoDB 3 but minDistance seems
far_point = points.first() # buggy on older MongoDB versions
self.assertNotEqual(close_point, far_point) if get_connection().server_info()['versionArray'][0] > 2:
self.assertEqual(points.count(), 1)
far_point = points.first()
self.assertNotEqual(close_point, far_point)
else:
self.assertTrue(points.count() >= 1)
# Finds both points, but orders the north point first because it's # Finds both points, but orders the north point first because it's
# closer to the reference point to the north. # closer to the reference point to the north.
@@ -392,15 +220,141 @@ class GeoQueriesTest(MongoDBTestCase):
# Finds only one point because only the first point is within 60km of # Finds only one point because only the first point is within 60km of
# the reference point to the south. # the reference point to the south.
points = Point.objects( points = Point.objects(
location__within_spherical_distance=[ location__within_spherical_distance=[[-122, 36.5], 60/earth_radius])
[-122, 36.5],
60 / earth_radius
]
)
self.assertEqual(points.count(), 1) self.assertEqual(points.count(), 1)
self.assertEqual(points[0].id, south_point.id) self.assertEqual(points[0].id, south_point.id)
def test_2dsphere_point(self):
class Event(Document):
title = StringField()
date = DateTimeField()
location = PointField()
def __unicode__(self):
return self.title
Event.drop_collection()
event1 = Event(title="Coltrane Motion @ Double Door",
date=datetime.now() - timedelta(days=1),
location=[-87.677137, 41.909889])
event1.save()
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
date=datetime.now() - timedelta(days=10),
location=[-122.4194155, 37.7749295]).save()
event3 = Event(title="Coltrane Motion @ Empty Bottle",
date=datetime.now(),
location=[-87.686638, 41.900474]).save()
# find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too,
# although it sorts to last.
events = Event.objects(location__near=[-87.67892, 41.9120459])
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2])
# find events within 5 degrees of pitchfork office, chicago
point_and_distance = [[-87.67892, 41.9120459], 2]
events = Event.objects(location__geo_within_center=point_and_distance)
self.assertEqual(events.count(), 2)
events = list(events)
self.assertTrue(event2 not in events)
self.assertTrue(event1 in events)
self.assertTrue(event3 in events)
# ensure ordering is respected by "near"
events = Event.objects(location__near=[-87.67892, 41.9120459])
events = events.order_by("-date")
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event3, event1, event2])
# find events within 10km of san francisco
point = [-122.415579, 37.7566023]
events = Event.objects(location__near=point, location__max_distance=10000)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# find events within 1km of greenpoint, broolyn, nyc, ny
events = Event.objects(location__near=[-73.9509714, 40.7237134], location__max_distance=1000)
self.assertEqual(events.count(), 0)
# ensure ordering is respected by "near"
events = Event.objects(location__near=[-87.67892, 41.9120459],
location__max_distance=10000).order_by("-date")
self.assertEqual(events.count(), 2)
self.assertEqual(events[0], event3)
# ensure min_distance and max_distance combine well
events = Event.objects(location__near=[-87.67892, 41.9120459],
location__min_distance=1000,
location__max_distance=10000).order_by("-date")
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event3)
# ensure ordering is respected by "near"
events = Event.objects(location__near=[-87.67892, 41.9120459],
# location__min_distance=10000
location__min_distance=10000).order_by("-date")
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# check that within_box works
box = [(-125.0, 35.0), (-100.0, 40.0)]
events = Event.objects(location__geo_within_box=box)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event2.id)
polygon = [
(-87.694445, 41.912114),
(-87.69084, 41.919395),
(-87.681742, 41.927186),
(-87.654276, 41.911731),
(-87.656164, 41.898061),
]
events = Event.objects(location__geo_within_polygon=polygon)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event1.id)
polygon2 = [
(-1.742249, 54.033586),
(-1.225891, 52.792797),
(-4.40094, 53.389881)
]
events = Event.objects(location__geo_within_polygon=polygon2)
self.assertEqual(events.count(), 0)
def test_2dsphere_point_embedded(self):
class Venue(EmbeddedDocument):
location = GeoPointField()
name = StringField()
class Event(Document):
title = StringField()
venue = EmbeddedDocumentField(Venue)
Event.drop_collection()
venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889])
venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295])
event1 = Event(title="Coltrane Motion @ Double Door",
venue=venue1).save()
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
venue=venue2).save()
event3 = Event(title="Coltrane Motion @ Empty Bottle",
venue=venue1).save()
# find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too,
# although it sorts to last.
events = Event.objects(venue__location__near=[-87.67892, 41.9120459])
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2])
def test_linestring(self): def test_linestring(self):
class Road(Document): class Road(Document):
name = StringField() name = StringField()
line = LineStringField() line = LineStringField()
@@ -456,6 +410,7 @@ class GeoQueriesTest(MongoDBTestCase):
self.assertEqual(1, roads) self.assertEqual(1, roads)
def test_polygon(self): def test_polygon(self):
class Road(Document): class Road(Document):
name = StringField() name = StringField()
poly = PolygonField() poly = PolygonField()
@@ -552,6 +507,5 @@ class GeoQueriesTest(MongoDBTestCase):
loc = Location.objects.as_pymongo()[0] loc = Location.objects.as_pymongo()[0]
self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]}) self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]})
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -19,9 +19,6 @@ from mongoengine.python_support import IS_PYMONGO_3
from mongoengine.queryset import (DoesNotExist, MultipleObjectsReturned, from mongoengine.queryset import (DoesNotExist, MultipleObjectsReturned,
QuerySet, QuerySetManager, queryset_manager) QuerySet, QuerySetManager, queryset_manager)
from tests.utils import needs_mongodb_v26, skip_pymongo3
__all__ = ("QuerySetTest",) __all__ = ("QuerySetTest",)
@@ -35,6 +32,37 @@ class db_ops_tracker(query_counter):
return list(self.db.system.profile.find(ignore_query)) return list(self.db.system.profile.find(ignore_query))
def skip_older_mongodb(f):
def _inner(*args, **kwargs):
connection = get_connection()
info = connection.test.command('buildInfo')
mongodb_version = tuple([int(i) for i in info['version'].split('.')])
if mongodb_version < (2, 6):
raise SkipTest("Need MongoDB version 2.6+")
return f(*args, **kwargs)
_inner.__name__ = f.__name__
_inner.__doc__ = f.__doc__
return _inner
def skip_pymongo3(f):
def _inner(*args, **kwargs):
if IS_PYMONGO_3:
raise SkipTest("Useless with PyMongo 3+")
return f(*args, **kwargs)
_inner.__name__ = f.__name__
_inner.__doc__ = f.__doc__
return _inner
class QuerySetTest(unittest.TestCase): class QuerySetTest(unittest.TestCase):
def setUp(self): def setUp(self):
@@ -78,111 +106,58 @@ class QuerySetTest(unittest.TestCase):
list(BlogPost.objects(author2__name="test")) list(BlogPost.objects(author2__name="test"))
def test_find(self): def test_find(self):
"""Ensure that a query returns a valid set of results.""" """Ensure that a query returns a valid set of results.
user_a = self.Person.objects.create(name='User A', age=20) """
user_b = self.Person.objects.create(name='User B', age=30) self.Person(name="User A", age=20).save()
self.Person(name="User B", age=30).save()
# Find all people in the collection # Find all people in the collection
people = self.Person.objects people = self.Person.objects
self.assertEqual(people.count(), 2) self.assertEqual(people.count(), 2)
results = list(people) results = list(people)
self.assertTrue(isinstance(results[0], self.Person)) self.assertTrue(isinstance(results[0], self.Person))
self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode))) self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode)))
self.assertEqual(results[0].name, "User A")
self.assertEqual(results[0], user_a)
self.assertEqual(results[0].name, 'User A')
self.assertEqual(results[0].age, 20) self.assertEqual(results[0].age, 20)
self.assertEqual(results[1].name, "User B")
self.assertEqual(results[1], user_b)
self.assertEqual(results[1].name, 'User B')
self.assertEqual(results[1].age, 30) self.assertEqual(results[1].age, 30)
# Filter people by age # Use a query to filter the people found to just person1
people = self.Person.objects(age=20) people = self.Person.objects(age=20)
self.assertEqual(people.count(), 1) self.assertEqual(people.count(), 1)
person = people.next() person = people.next()
self.assertEqual(person, user_a)
self.assertEqual(person.name, "User A") self.assertEqual(person.name, "User A")
self.assertEqual(person.age, 20) self.assertEqual(person.age, 20)
def test_limit(self): # Test limit
"""Ensure that QuerySet.limit works as expected."""
user_a = self.Person.objects.create(name='User A', age=20)
user_b = self.Person.objects.create(name='User B', age=30)
# Test limit on a new queryset
people = list(self.Person.objects.limit(1)) people = list(self.Person.objects.limit(1))
self.assertEqual(len(people), 1) self.assertEqual(len(people), 1)
self.assertEqual(people[0], user_a) self.assertEqual(people[0].name, 'User A')
# Test limit on an existing queryset # Test skip
people = self.Person.objects
self.assertEqual(len(people), 2)
people2 = people.limit(1)
self.assertEqual(len(people), 2)
self.assertEqual(len(people2), 1)
self.assertEqual(people2[0], user_a)
# Test chaining of only after limit
person = self.Person.objects().limit(1).only('name').first()
self.assertEqual(person, user_a)
self.assertEqual(person.name, 'User A')
self.assertEqual(person.age, None)
def test_skip(self):
"""Ensure that QuerySet.skip works as expected."""
user_a = self.Person.objects.create(name='User A', age=20)
user_b = self.Person.objects.create(name='User B', age=30)
# Test skip on a new queryset
people = list(self.Person.objects.skip(1)) people = list(self.Person.objects.skip(1))
self.assertEqual(len(people), 1) self.assertEqual(len(people), 1)
self.assertEqual(people[0], user_b) self.assertEqual(people[0].name, 'User B')
# Test skip on an existing queryset person3 = self.Person(name="User C", age=40)
people = self.Person.objects person3.save()
self.assertEqual(len(people), 2)
people2 = people.skip(1)
self.assertEqual(len(people), 2)
self.assertEqual(len(people2), 1)
self.assertEqual(people2[0], user_b)
# Test chaining of only after skip
person = self.Person.objects().skip(1).only('name').first()
self.assertEqual(person, user_b)
self.assertEqual(person.name, 'User B')
self.assertEqual(person.age, None)
def test_slice(self):
"""Ensure slicing a queryset works as expected."""
user_a = self.Person.objects.create(name='User A', age=20)
user_b = self.Person.objects.create(name='User B', age=30)
user_c = self.Person.objects.create(name="User C", age=40)
# Test slice limit # Test slice limit
people = list(self.Person.objects[:2]) people = list(self.Person.objects[:2])
self.assertEqual(len(people), 2) self.assertEqual(len(people), 2)
self.assertEqual(people[0], user_a) self.assertEqual(people[0].name, 'User A')
self.assertEqual(people[1], user_b) self.assertEqual(people[1].name, 'User B')
# Test slice skip # Test slice skip
people = list(self.Person.objects[1:]) people = list(self.Person.objects[1:])
self.assertEqual(len(people), 2) self.assertEqual(len(people), 2)
self.assertEqual(people[0], user_b) self.assertEqual(people[0].name, 'User B')
self.assertEqual(people[1], user_c) self.assertEqual(people[1].name, 'User C')
# Test slice limit and skip # Test slice limit and skip
people = list(self.Person.objects[1:2]) people = list(self.Person.objects[1:2])
self.assertEqual(len(people), 1) self.assertEqual(len(people), 1)
self.assertEqual(people[0], user_b) self.assertEqual(people[0].name, 'User B')
# Test slice limit and skip on an existing queryset
people = self.Person.objects
self.assertEqual(len(people), 3)
people2 = people[1:2]
self.assertEqual(len(people2), 1)
self.assertEqual(people2[0], user_b)
# Test slice limit and skip cursor reset # Test slice limit and skip cursor reset
qs = self.Person.objects[1:2] qs = self.Person.objects[1:2]
@@ -193,7 +168,6 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(len(people), 1) self.assertEqual(len(people), 1)
self.assertEqual(people[0].name, 'User B') self.assertEqual(people[0].name, 'User B')
# Test empty slice
people = list(self.Person.objects[1:1]) people = list(self.Person.objects[1:1])
self.assertEqual(len(people), 0) self.assertEqual(len(people), 0)
@@ -213,6 +187,12 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual("[<Person: Person object>, <Person: Person object>]", self.assertEqual("[<Person: Person object>, <Person: Person object>]",
"%s" % self.Person.objects[51:53]) "%s" % self.Person.objects[51:53])
# Test only after limit
self.assertEqual(self.Person.objects().limit(2).only('name')[0].age, None)
# Test only after skip
self.assertEqual(self.Person.objects().skip(2).only('name')[0].age, None)
def test_find_one(self): def test_find_one(self):
"""Ensure that a query using find_one returns a valid result. """Ensure that a query using find_one returns a valid result.
""" """
@@ -571,23 +551,16 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(post.comments[0].by, 'joe') self.assertEqual(post.comments[0].by, 'joe')
self.assertEqual(post.comments[0].votes.score, 4) self.assertEqual(post.comments[0].votes.score, 4)
@needs_mongodb_v26
def test_update_min_max(self): def test_update_min_max(self):
class Scores(Document): class Scores(Document):
high_score = IntField() high_score = IntField()
low_score = IntField() low_score = IntField()
scores = Scores(high_score=800, low_score=200)
scores = Scores.objects.create(high_score=800, low_score=200) scores.save()
Scores.objects(id=scores.id).update(min__low_score=150) Scores.objects(id=scores.id).update(min__low_score=150)
self.assertEqual(Scores.objects.get(id=scores.id).low_score, 150) self.assertEqual(Scores.objects(id=scores.id).get().low_score, 150)
Scores.objects(id=scores.id).update(min__low_score=250) Scores.objects(id=scores.id).update(min__low_score=250)
self.assertEqual(Scores.objects.get(id=scores.id).low_score, 150) self.assertEqual(Scores.objects(id=scores.id).get().low_score, 150)
Scores.objects(id=scores.id).update(max__high_score=1000)
self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000)
Scores.objects(id=scores.id).update(max__high_score=500)
self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000)
def test_updates_can_have_match_operators(self): def test_updates_can_have_match_operators(self):
@@ -991,7 +964,7 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(person.name, "User A") self.assertEqual(person.name, "User A")
self.assertEqual(person.age, 20) self.assertEqual(person.age, 20)
@needs_mongodb_v26 @skip_older_mongodb
@skip_pymongo3 @skip_pymongo3
def test_cursor_args(self): def test_cursor_args(self):
"""Ensures the cursor args can be set as expected """Ensures the cursor args can be set as expected
@@ -1253,7 +1226,6 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
# default ordering should be used by default
with db_ops_tracker() as q: with db_ops_tracker() as q:
BlogPost.objects.filter(title='whatever').first() BlogPost.objects.filter(title='whatever').first()
self.assertEqual(len(q.get_ops()), 1) self.assertEqual(len(q.get_ops()), 1)
@@ -1262,28 +1234,11 @@ class QuerySetTest(unittest.TestCase):
{'published_date': -1} {'published_date': -1}
) )
# calling order_by() should clear the default ordering
with db_ops_tracker() as q: with db_ops_tracker() as q:
BlogPost.objects.filter(title='whatever').order_by().first() BlogPost.objects.filter(title='whatever').order_by().first()
self.assertEqual(len(q.get_ops()), 1) self.assertEqual(len(q.get_ops()), 1)
self.assertFalse('$orderby' in q.get_ops()[0]['query']) self.assertFalse('$orderby' in q.get_ops()[0]['query'])
# calling an explicit order_by should use a specified sort
with db_ops_tracker() as q:
BlogPost.objects.filter(title='whatever').order_by('published_date').first()
self.assertEqual(len(q.get_ops()), 1)
self.assertEqual(
q.get_ops()[0]['query']['$orderby'],
{'published_date': 1}
)
# calling order_by() after an explicit sort should clear it
with db_ops_tracker() as q:
qs = BlogPost.objects.filter(title='whatever').order_by('published_date')
qs.order_by().first()
self.assertEqual(len(q.get_ops()), 1)
self.assertFalse('$orderby' in q.get_ops()[0]['query'])
def test_no_ordering_for_get(self): def test_no_ordering_for_get(self):
""" Ensure that Doc.objects.get doesn't use any ordering. """ Ensure that Doc.objects.get doesn't use any ordering.
""" """
@@ -3108,7 +3063,7 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(Foo.objects.distinct("bar"), [bar]) self.assertEqual(Foo.objects.distinct("bar"), [bar])
@needs_mongodb_v26 @skip_older_mongodb
def test_text_indexes(self): def test_text_indexes(self):
class News(Document): class News(Document):
title = StringField() title = StringField()
@@ -3195,7 +3150,7 @@ class QuerySetTest(unittest.TestCase):
'brasil').order_by('$text_score').first() 'brasil').order_by('$text_score').first()
self.assertEqual(item.get_text_score(), max_text_score) self.assertEqual(item.get_text_score(), max_text_score)
@needs_mongodb_v26 @skip_older_mongodb
def test_distinct_handles_references_to_alias(self): def test_distinct_handles_references_to_alias(self):
register_connection('testdb', 'mongoenginetest2') register_connection('testdb', 'mongoenginetest2')
@@ -4870,7 +4825,6 @@ class QuerySetTest(unittest.TestCase):
self.assertTrue(Person.objects._has_data(), self.assertTrue(Person.objects._has_data(),
'Cursor has data and returned False') 'Cursor has data and returned False')
@needs_mongodb_v26
def test_queryset_aggregation_framework(self): def test_queryset_aggregation_framework(self):
class Person(Document): class Person(Document):
name = StringField() name = StringField()
@@ -4905,13 +4859,17 @@ class QuerySetTest(unittest.TestCase):
{'_id': p1.pk, 'name': "ISABELLA LUANNA"} {'_id': p1.pk, 'name': "ISABELLA LUANNA"}
]) ])
data = Person.objects(age__gte=17, age__lte=40).order_by('-age').aggregate({ data = Person.objects(
'$group': { age__gte=17, age__lte=40).order_by('-age').aggregate(
'_id': None, {'$group': {
'total': {'$sum': 1}, '_id': None,
'avg': {'$avg': '$age'} 'total': {'$sum': 1},
} 'avg': {'$avg': '$age'}
}) }
}
)
self.assertEqual(list(data), [ self.assertEqual(list(data), [
{'_id': None, 'avg': 29, 'total': 2} {'_id': None, 'avg': 29, 'total': 2}
]) ])
@@ -4952,13 +4910,11 @@ class QuerySetTest(unittest.TestCase):
self.assertEquals(Animal.objects(folded_ears=True).count(), 1) self.assertEquals(Animal.objects(folded_ears=True).count(), 1)
self.assertEquals(Animal.objects(whiskers_length=5.1).count(), 1) self.assertEquals(Animal.objects(whiskers_length=5.1).count(), 1)
def test_loop_over_invalid_id_does_not_crash(self): def test_loop_via_invalid_id_does_not_crash(self):
class Person(Document): class Person(Document):
name = StringField() name = StringField()
Person.objects.delete()
Person.drop_collection() Person._get_collection().update({"name": "a"}, {"$set": {"_id": ""}}, upsert=True)
Person._get_collection().insert({'name': 'a', 'id': ''})
for p in Person.objects(): for p in Person.objects():
self.assertEqual(p.name, 'a') self.assertEqual(p.name, 'a')

View File

@@ -35,7 +35,8 @@ class ConnectionTest(unittest.TestCase):
mongoengine.connection._dbs = {} mongoengine.connection._dbs = {}
def test_connect(self): def test_connect(self):
"""Ensure that the connect() method works properly.""" """Ensure that the connect() method works properly.
"""
connect('mongoenginetest') connect('mongoenginetest')
conn = get_connection() conn = get_connection()
@@ -145,7 +146,8 @@ class ConnectionTest(unittest.TestCase):
self.assertEqual(expected_connection, actual_connection) self.assertEqual(expected_connection, actual_connection)
def test_connect_uri(self): def test_connect_uri(self):
"""Ensure that the connect() method works properly with URIs.""" """Ensure that the connect() method works properly with uri's
"""
c = connect(db='mongoenginetest', alias='admin') c = connect(db='mongoenginetest', alias='admin')
c.admin.system.users.remove({}) c.admin.system.users.remove({})
c.mongoenginetest.system.users.remove({}) c.mongoenginetest.system.users.remove({})
@@ -225,8 +227,9 @@ class ConnectionTest(unittest.TestCase):
self.assertRaises(OperationFailure, get_db) self.assertRaises(OperationFailure, get_db)
def test_connect_uri_with_authsource(self): def test_connect_uri_with_authsource(self):
"""Ensure that the connect() method works well with `authSource` """Ensure that the connect() method works well with
option in the URI. the option `authSource` in URI.
This feature was introduced in MongoDB 2.4 and removed in 2.6
""" """
# Create users # Create users
c = connect('mongoenginetest') c = connect('mongoenginetest')
@@ -235,31 +238,30 @@ class ConnectionTest(unittest.TestCase):
# Authentication fails without "authSource" # Authentication fails without "authSource"
if IS_PYMONGO_3: if IS_PYMONGO_3:
test_conn = connect( test_conn = connect('mongoenginetest', alias='test1',
'mongoenginetest', alias='test1', host='mongodb://username2:password@localhost/mongoenginetest')
host='mongodb://username2:password@localhost/mongoenginetest'
)
self.assertRaises(OperationFailure, test_conn.server_info) self.assertRaises(OperationFailure, test_conn.server_info)
else: else:
self.assertRaises( self.assertRaises(
MongoEngineConnectionError, MongoEngineConnectionError, connect, 'mongoenginetest',
connect, 'mongoenginetest', alias='test1', alias='test1',
host='mongodb://username2:password@localhost/mongoenginetest' host='mongodb://username2:password@localhost/mongoenginetest'
) )
self.assertRaises(MongoEngineConnectionError, get_db, 'test1') self.assertRaises(MongoEngineConnectionError, get_db, 'test1')
# Authentication succeeds with "authSource" # Authentication succeeds with "authSource"
authd_conn = connect( connect(
'mongoenginetest', alias='test2', 'mongoenginetest', alias='test2',
host=('mongodb://username2:password@localhost/' host=('mongodb://username2:password@localhost/'
'mongoenginetest?authSource=admin') 'mongoenginetest?authSource=admin')
) )
# This will fail starting from MongoDB 2.6+
db = get_db('test2') db = get_db('test2')
self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest') self.assertEqual(db.name, 'mongoenginetest')
# Clear all users # Clear all users
authd_conn.admin.system.users.remove({}) c.admin.system.users.remove({})
def test_register_connection(self): def test_register_connection(self):
"""Ensure that connections with different aliases may be registered. """Ensure that connections with different aliases may be registered.
@@ -283,7 +285,8 @@ class ConnectionTest(unittest.TestCase):
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
def test_connection_kwargs(self): def test_connection_kwargs(self):
"""Ensure that connection kwargs get passed to pymongo.""" """Ensure that connection kwargs get passed to pymongo.
"""
connect('mongoenginetest', alias='t1', tz_aware=True) connect('mongoenginetest', alias='t1', tz_aware=True)
conn = get_connection('t1') conn = get_connection('t1')
@@ -293,45 +296,6 @@ class ConnectionTest(unittest.TestCase):
conn = get_connection('t2') conn = get_connection('t2')
self.assertFalse(get_tz_awareness(conn)) self.assertFalse(get_tz_awareness(conn))
def test_connection_pool_via_kwarg(self):
"""Ensure we can specify a max connection pool size using
a connection kwarg.
"""
# Use "max_pool_size" or "maxpoolsize" depending on PyMongo version
# (former was changed to the latter as described in
# https://jira.mongodb.org/browse/PYTHON-854).
# TODO remove once PyMongo < 3.0 support is dropped
if pymongo.version_tuple[0] >= 3:
pool_size_kwargs = {'maxpoolsize': 100}
else:
pool_size_kwargs = {'max_pool_size': 100}
conn = connect('mongoenginetest', alias='max_pool_size_via_kwarg', **pool_size_kwargs)
self.assertEqual(conn.max_pool_size, 100)
def test_connection_pool_via_uri(self):
"""Ensure we can specify a max connection pool size using
an option in a connection URI.
"""
if pymongo.version_tuple[0] == 2 and pymongo.version_tuple[1] < 9:
raise SkipTest('maxpoolsize as a URI option is only supported in PyMongo v2.9+')
conn = connect(host='mongodb://localhost/test?maxpoolsize=100', alias='max_pool_size_via_uri')
self.assertEqual(conn.max_pool_size, 100)
def test_write_concern(self):
"""Ensure write concern can be specified in connect() via
a kwarg or as part of the connection URI.
"""
conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true')
conn2 = connect('testing', alias='conn2', w=1, j=True)
if IS_PYMONGO_3:
self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True})
self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True})
else:
self.assertEqual(dict(conn1.write_concern), {'w': 1, 'j': True})
self.assertEqual(dict(conn2.write_concern), {'w': 1, 'j': True})
def test_datetime(self): def test_datetime(self):
connect('mongoenginetest', tz_aware=True) connect('mongoenginetest', tz_aware=True)
d = datetime.datetime(2010, 5, 5, tzinfo=utc) d = datetime.datetime(2010, 5, 5, tzinfo=utc)

View File

@@ -1,78 +0,0 @@
import unittest
from nose.plugins.skip import SkipTest
from mongoengine import connect
from mongoengine.connection import get_db, get_connection
from mongoengine.python_support import IS_PYMONGO_3
MONGO_TEST_DB = 'mongoenginetest'
class MongoDBTestCase(unittest.TestCase):
"""Base class for tests that need a mongodb connection
db is being dropped automatically
"""
@classmethod
def setUpClass(cls):
cls._connection = connect(db=MONGO_TEST_DB)
cls._connection.drop_database(MONGO_TEST_DB)
cls.db = get_db()
@classmethod
def tearDownClass(cls):
cls._connection.drop_database(MONGO_TEST_DB)
def get_mongodb_version():
"""Return the version tuple of the MongoDB server that the default
connection is connected to.
"""
return tuple(get_connection().server_info()['versionArray'])
def _decorated_with_ver_requirement(func, ver_tuple):
"""Return a given function decorated with the version requirement
for a particular MongoDB version tuple.
"""
def _inner(*args, **kwargs):
mongodb_ver = get_mongodb_version()
if mongodb_ver >= ver_tuple:
return func(*args, **kwargs)
raise SkipTest('Needs MongoDB v{}+'.format(
'.'.join([str(v) for v in ver_tuple])
))
_inner.__name__ = func.__name__
_inner.__doc__ = func.__doc__
return _inner
def needs_mongodb_v26(func):
"""Raise a SkipTest exception if we're working with MongoDB version
lower than v2.6.
"""
return _decorated_with_ver_requirement(func, (2, 6))
def needs_mongodb_v3(func):
"""Raise a SkipTest exception if we're working with MongoDB version
lower than v3.0.
"""
return _decorated_with_ver_requirement(func, (3, 0))
def skip_pymongo3(f):
"""Raise a SkipTest exception if we're running a test against
PyMongo v3.x.
"""
def _inner(*args, **kwargs):
if IS_PYMONGO_3:
raise SkipTest("Useless with PyMongo 3+")
return f(*args, **kwargs)
_inner.__name__ = f.__name__
_inner.__doc__ = f.__doc__
return _inner

13
tox.ini
View File

@@ -1,5 +1,5 @@
[tox] [tox]
envlist = {py27,py35,pypy,pypy3}-{mg27,mg28,mg30} envlist = {py26,py27,py33,py34,py35,pypy,pypy3}-{mg27,mg28},flake8
[testenv] [testenv]
commands = commands =
@@ -7,7 +7,16 @@ commands =
deps = deps =
nose nose
mg27: PyMongo<2.8 mg27: PyMongo<2.8
mg28: PyMongo>=2.8,<2.9 mg28: PyMongo>=2.8,<3.0
mg30: PyMongo>=3.0 mg30: PyMongo>=3.0
mgdev: https://github.com/mongodb/mongo-python-driver/tarball/master
setenv = setenv =
PYTHON_EGG_CACHE = {envdir}/python-eggs PYTHON_EGG_CACHE = {envdir}/python-eggs
passenv = windir
[testenv:flake8]
deps =
flake8
flake8-import-order
commands =
flake8