Merge branch 'master' of github.com:MongoEngine/mongoengine into add_validation_to_doc
This commit is contained in:
commit
c218c8bb6c
11
.gitignore
vendored
11
.gitignore
vendored
@ -1,8 +1,15 @@
|
||||
.*
|
||||
!.gitignore
|
||||
*~
|
||||
*.py[co]
|
||||
.*.sw[po]
|
||||
.cache/
|
||||
.coverage
|
||||
.coveragerc
|
||||
.env
|
||||
.idea/
|
||||
.pytest_cache/
|
||||
.tox/
|
||||
.eggs/
|
||||
*.egg
|
||||
docs/.build
|
||||
docs/_build
|
||||
@ -13,8 +20,6 @@ env/
|
||||
.settings
|
||||
.project
|
||||
.pydevproject
|
||||
tests/test_bugfix.py
|
||||
htmlcov/
|
||||
venv
|
||||
venv3
|
||||
scratchpad
|
||||
|
@ -5,17 +5,12 @@ pylint:
|
||||
|
||||
options:
|
||||
additional-builtins:
|
||||
# add xrange and long as valid built-ins. In Python 3, xrange is
|
||||
# translated into range and long is translated into int via 2to3 (see
|
||||
# "use_2to3" in setup.py). This should be removed when we drop Python
|
||||
# 2 support (which probably won't happen any time soon).
|
||||
- xrange
|
||||
# add long as valid built-ins.
|
||||
- long
|
||||
|
||||
pyflakes:
|
||||
disable:
|
||||
# undefined variables are already covered by pylint (and exclude
|
||||
# xrange & long)
|
||||
# undefined variables are already covered by pylint (and exclude long)
|
||||
- F821
|
||||
|
||||
ignore-paths:
|
||||
|
12
.pre-commit-config.yaml
Normal file
12
.pre-commit-config.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
fail_fast: false
|
||||
repos:
|
||||
- repo: https://github.com/ambv/black
|
||||
rev: 19.10b0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.8.0a2
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-import-order
|
75
.travis.yml
75
.travis.yml
@ -1,13 +1,10 @@
|
||||
# For full coverage, we'd have to test all supported Python, MongoDB, and
|
||||
# PyMongo combinations. However, that would result in an overly long build
|
||||
# with a very large number of jobs, hence we only test a subset of all the
|
||||
# combinations:
|
||||
# * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup,
|
||||
# tested against Python v2.7, v3.5, v3.6, and PyPy.
|
||||
# * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo
|
||||
# combination: MongoDB v3.4, PyMongo v3.4, Python v2.7.
|
||||
# * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8.
|
||||
#
|
||||
# combinations.
|
||||
# * Python3.7, MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup,
|
||||
# Other combinations are tested. See below for the details or check the travis jobs
|
||||
|
||||
# We should periodically check MongoDB Server versions supported by MongoDB
|
||||
# Inc., add newly released versions to the test matrix, and remove versions
|
||||
# which have reached their End of Life. See:
|
||||
@ -16,65 +13,69 @@
|
||||
#
|
||||
# Reminder: Update README.rst if you change MongoDB versions we test.
|
||||
|
||||
|
||||
language: python
|
||||
dist: xenial
|
||||
python:
|
||||
- 2.7
|
||||
- 3.5
|
||||
- 3.6
|
||||
- pypy
|
||||
|
||||
dist: xenial
|
||||
- 3.7
|
||||
- 3.8
|
||||
- pypy3
|
||||
|
||||
env:
|
||||
global:
|
||||
- MONGODB_3_4=3.4.17
|
||||
- MONGODB_3_6=3.6.12
|
||||
- MONGODB_3_4=3.4.19
|
||||
- MONGODB_3_6=3.6.13
|
||||
- MONGODB_4_0=4.0.13
|
||||
|
||||
- PYMONGO_3_4=3.4
|
||||
- PYMONGO_3_6=3.6
|
||||
- PYMONGO_3_9=3.9
|
||||
- PYMONGO_3_11=3.11
|
||||
|
||||
- MAIN_PYTHON_VERSION=3.7
|
||||
matrix:
|
||||
- MONGODB=${MONGODB_3_4} PYMONGO=3.x
|
||||
- MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_11}
|
||||
|
||||
matrix:
|
||||
|
||||
# Finish the build as soon as one job fails
|
||||
fast_finish: true
|
||||
|
||||
include:
|
||||
- python: 2.7
|
||||
env: MONGODB=${MONGODB_3_4} PYMONGO=3.4.x
|
||||
- python: 3.6
|
||||
env: MONGODB=${MONGODB_3_6} PYMONGO=3.x
|
||||
- python: 3.7
|
||||
env: MONGODB=${MONGODB_3_6} PYMONGO=3.x
|
||||
|
||||
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6}
|
||||
- python: 3.7
|
||||
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9}
|
||||
- python: 3.7
|
||||
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_11}
|
||||
- python: 3.8
|
||||
env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_11}
|
||||
|
||||
install:
|
||||
# Install Mongo
|
||||
- wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz
|
||||
- tar xzf mongodb-linux-x86_64-${MONGODB}.tgz
|
||||
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version
|
||||
# Install python dependencies
|
||||
# Install Python dependencies.
|
||||
- pip install --upgrade pip
|
||||
- pip install coveralls
|
||||
- pip install flake8 flake8-import-order
|
||||
- pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0
|
||||
- pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
||||
# Install the tox venv
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
||||
- pip install pre-commit
|
||||
- pip install tox
|
||||
# tox dryrun to setup the tox venv (we run a mock test).
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder"
|
||||
|
||||
before_script:
|
||||
- mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data
|
||||
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for py27
|
||||
# Run pre-commit hooks (black, flake8, etc) on entire codebase
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pre-commit run -a; else echo "pre-commit checks only runs on py37"; fi
|
||||
- mongo --eval 'db.version();' # Make sure mongo is awake
|
||||
|
||||
script:
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine"
|
||||
|
||||
# For now only submit coveralls for Python v2.7. Python v3.x currently shows
|
||||
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
|
||||
# code in a separate dir and runs tests on that.
|
||||
after_success:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi
|
||||
|
||||
notifications:
|
||||
irc: irc.freenode.org#mongoengine
|
||||
@ -96,11 +97,11 @@ deploy:
|
||||
distributions: "sdist bdist_wheel"
|
||||
|
||||
# Only deploy on tagged commits (aka GitHub releases) and only for the parent
|
||||
# repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4.
|
||||
# repo's builds running Python v3.7 along with PyMongo v3.x and MongoDB v3.4.
|
||||
# We run Travis against many different Python, PyMongo, and MongoDB versions
|
||||
# and we don't want the deploy to occur multiple times).
|
||||
on:
|
||||
tags: true
|
||||
repo: MongoEngine/mongoengine
|
||||
condition: ($PYMONGO = 3.x) && ($MONGODB = 3.4)
|
||||
python: 2.7
|
||||
condition: ($PYMONGO = ${PYMONGO_3_11}) && ($MONGODB = ${MONGODB_3_4})
|
||||
python: 3.7
|
||||
|
5
AUTHORS
5
AUTHORS
@ -252,3 +252,8 @@ that much better:
|
||||
* Paulo Amaral (https://github.com/pauloAmaral)
|
||||
* Gaurav Dadhania (https://github.com/GVRV)
|
||||
* Yurii Andrieiev (https://github.com/yandrieiev)
|
||||
* Filip Kucharczyk (https://github.com/Pacu2)
|
||||
* Eric Timmons (https://github.com/daewok)
|
||||
* Matthew Simpson (https://github.com/mcsimps2)
|
||||
* Leonardo Domingues (https://github.com/leodmgs)
|
||||
* Agustin Barto (https://github.com/abarto)
|
||||
|
@ -20,23 +20,43 @@ post to the `user group <http://groups.google.com/group/mongoengine-users>`
|
||||
Supported Interpreters
|
||||
----------------------
|
||||
|
||||
MongoEngine supports CPython 2.7 and newer. Language
|
||||
features not supported by all interpreters can not be used.
|
||||
The codebase is written in python 2 so you must be using python 2
|
||||
when developing new features. Compatibility of the library with Python 3
|
||||
relies on the 2to3 package that gets executed as part of the installation
|
||||
build. You should ensure that your code is properly converted by
|
||||
`2to3 <http://docs.python.org/library/2to3.html>`_.
|
||||
MongoEngine supports CPython 3.5 and newer as well as Pypy3.
|
||||
Language features not supported by all interpreters can not be used.
|
||||
|
||||
Python3 codebase
|
||||
----------------------
|
||||
|
||||
Since 0.20, the codebase is exclusively Python 3.
|
||||
|
||||
Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs.
|
||||
Travis runs the tests against the main Python 3.x versions.
|
||||
|
||||
|
||||
Style Guide
|
||||
-----------
|
||||
|
||||
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
|
||||
including 4 space indents. When possible we try to stick to 79 character line
|
||||
limits. However, screens got bigger and an ORM has a strong focus on
|
||||
readability and if it can help, we accept 119 as maximum line length, in a
|
||||
similar way as `django does
|
||||
<https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
|
||||
MongoEngine's codebase is formatted with `black <https://github.com/python/black>`_, other tools like
|
||||
flake8 are also used. Those tools will run as part of the CI and will fail in case the code is not formatted properly.
|
||||
|
||||
To install all development tools, simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python -m pip install -r requirements-dev.txt
|
||||
|
||||
|
||||
You can install `pre-commit <https://pre-commit.com/>`_ into your git hooks,
|
||||
to automatically check and fix any formatting issue before creating a
|
||||
git commit.
|
||||
|
||||
To enable ``pre-commit`` simply run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pre-commit install
|
||||
|
||||
See the ``.pre-commit-config.yaml`` configuration file for more information
|
||||
on how it works.
|
||||
|
||||
Testing
|
||||
-------
|
||||
@ -58,7 +78,7 @@ General Guidelines
|
||||
should adapt to the breaking change in docs/upgrade.rst.
|
||||
- Write inline documentation for new classes and methods.
|
||||
- Write tests and make sure they pass (make sure you have a mongod
|
||||
running on the default port, then execute ``python setup.py nosetests``
|
||||
running on the default port, then execute ``python setup.py test``
|
||||
from the cmd line to run the test suite).
|
||||
- Ensure tests pass on all supported Python, PyMongo, and MongoDB versions.
|
||||
You can test various Python and PyMongo versions locally by executing
|
||||
|
35
README.rst
35
README.rst
@ -26,15 +26,15 @@ an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_.
|
||||
|
||||
Supported MongoDB Versions
|
||||
==========================
|
||||
MongoEngine is currently tested against MongoDB v3.4 and v3.6. Future versions
|
||||
MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions
|
||||
should be supported as well, but aren't actively tested at the moment. Make
|
||||
sure to open an issue or submit a pull request if you experience any problems
|
||||
with MongoDB version > 3.6.
|
||||
with MongoDB version > 4.0.
|
||||
|
||||
Installation
|
||||
============
|
||||
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
||||
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
|
||||
`pip <https://pip.pypa.io/>`_. You can then use ``python -m pip install -U mongoengine``.
|
||||
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||
and thus you can use ``easy_install -U mongoengine``. Another option is
|
||||
`pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine``
|
||||
@ -42,13 +42,14 @@ to both create the virtual environment and install the package. Otherwise, you c
|
||||
download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
|
||||
run ``python setup.py install``.
|
||||
|
||||
The support for Python2 was dropped with MongoEngine 0.20.0
|
||||
|
||||
Dependencies
|
||||
============
|
||||
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_.
|
||||
All of the dependencies can easily be installed via `python -m pip <https://pip.pypa.io/>`_.
|
||||
At the very least, you'll need these two packages to use MongoEngine:
|
||||
|
||||
- pymongo>=3.4
|
||||
- six>=1.10.0
|
||||
|
||||
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
||||
|
||||
@ -58,6 +59,10 @@ If you need to use an ``ImageField`` or ``ImageGridFsProxy``:
|
||||
|
||||
- Pillow>=2.0.0
|
||||
|
||||
If you need to use signals:
|
||||
|
||||
- blinker>=1.3
|
||||
|
||||
Examples
|
||||
========
|
||||
Some simple examples of what MongoEngine code looks like:
|
||||
@ -91,12 +96,11 @@ Some simple examples of what MongoEngine code looks like:
|
||||
|
||||
# Iterate over all posts using the BlogPost superclass
|
||||
>>> for post in BlogPost.objects:
|
||||
... print '===', post.title, '==='
|
||||
... print('===', post.title, '===')
|
||||
... if isinstance(post, TextPost):
|
||||
... print post.content
|
||||
... print(post.content)
|
||||
... elif isinstance(post, LinkPost):
|
||||
... print 'Link:', post.url
|
||||
... print
|
||||
... print('Link:', post.url)
|
||||
...
|
||||
|
||||
# Count all blog posts and its subtypes
|
||||
@ -116,7 +120,8 @@ Some simple examples of what MongoEngine code looks like:
|
||||
Tests
|
||||
=====
|
||||
To run the test suite, ensure you are running a local instance of MongoDB on
|
||||
the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``.
|
||||
the standard port and have ``pytest`` installed. Then, run ``python setup.py test``
|
||||
or simply ``pytest``.
|
||||
|
||||
To run the test suite on every supported Python and PyMongo version, you can
|
||||
use ``tox``. You'll need to make sure you have each supported Python version
|
||||
@ -125,20 +130,18 @@ installed in your environment and then:
|
||||
.. code-block:: shell
|
||||
|
||||
# Install tox
|
||||
$ pip install tox
|
||||
$ python -m pip install tox
|
||||
# Run the test suites
|
||||
$ tox
|
||||
|
||||
If you wish to run a subset of tests, use the nosetests convention:
|
||||
If you wish to run a subset of tests, use the pytest convention:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# Run all the tests in a particular test file
|
||||
$ python setup.py nosetests --tests tests/fields/fields.py
|
||||
$ pytest tests/fields/test_fields.py
|
||||
# Run only particular test class in that file
|
||||
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest
|
||||
# Use the -s option if you want to print some debug statements or use pdb
|
||||
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s
|
||||
$ pytest tests/fields/test_fields.py::TestField
|
||||
|
||||
Community
|
||||
=========
|
||||
|
@ -1,11 +1,18 @@
|
||||
from timeit import repeat
|
||||
|
||||
import mongoengine
|
||||
from mongoengine import (BooleanField, Document, EmailField, EmbeddedDocument,
|
||||
EmbeddedDocumentField, IntField, ListField,
|
||||
StringField)
|
||||
from mongoengine import (
|
||||
BooleanField,
|
||||
Document,
|
||||
EmailField,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
IntField,
|
||||
ListField,
|
||||
StringField,
|
||||
)
|
||||
|
||||
mongoengine.connect(db='mongoengine_benchmark_test')
|
||||
mongoengine.connect(db="mongoengine_benchmark_test")
|
||||
|
||||
|
||||
def timeit(f, n=10000):
|
||||
@ -24,46 +31,41 @@ def test_basic():
|
||||
|
||||
def init_book():
|
||||
return Book(
|
||||
name='Always be closing',
|
||||
name="Always be closing",
|
||||
pages=100,
|
||||
tags=['self-help', 'sales'],
|
||||
tags=["self-help", "sales"],
|
||||
is_published=True,
|
||||
author_email='alec@example.com',
|
||||
author_email="alec@example.com",
|
||||
)
|
||||
|
||||
print('Doc initialization: %.3fus' % (timeit(init_book, 1000) * 10**6))
|
||||
print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6))
|
||||
|
||||
b = init_book()
|
||||
print('Doc getattr: %.3fus' % (timeit(lambda: b.name, 10000) * 10**6))
|
||||
print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6))
|
||||
|
||||
print(
|
||||
'Doc setattr: %.3fus' % (
|
||||
timeit(lambda: setattr(b, 'name', 'New name'), 10000) * 10**6
|
||||
)
|
||||
"Doc setattr: %.3fus"
|
||||
% (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6)
|
||||
)
|
||||
|
||||
print('Doc to mongo: %.3fus' % (timeit(b.to_mongo, 1000) * 10**6))
|
||||
print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6))
|
||||
|
||||
print('Doc validation: %.3fus' % (timeit(b.validate, 1000) * 10**6))
|
||||
print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6))
|
||||
|
||||
def save_book():
|
||||
b._mark_as_changed('name')
|
||||
b._mark_as_changed('tags')
|
||||
b._mark_as_changed("name")
|
||||
b._mark_as_changed("tags")
|
||||
b.save()
|
||||
|
||||
print('Save to database: %.3fus' % (timeit(save_book, 100) * 10**6))
|
||||
print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6))
|
||||
|
||||
son = b.to_mongo()
|
||||
print(
|
||||
'Load from SON: %.3fus' % (
|
||||
timeit(lambda: Book._from_son(son), 1000) * 10**6
|
||||
)
|
||||
"Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6)
|
||||
)
|
||||
|
||||
print(
|
||||
'Load from database: %.3fus' % (
|
||||
timeit(lambda: Book.objects[0], 100) * 10**6
|
||||
)
|
||||
"Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6)
|
||||
)
|
||||
|
||||
def create_and_delete_book():
|
||||
@ -72,9 +74,8 @@ def test_basic():
|
||||
b.delete()
|
||||
|
||||
print(
|
||||
'Init + save to database + delete: %.3fms' % (
|
||||
timeit(create_and_delete_book, 10) * 10**3
|
||||
)
|
||||
"Init + save to database + delete: %.3fms"
|
||||
% (timeit(create_and_delete_book, 10) * 10 ** 3)
|
||||
)
|
||||
|
||||
|
||||
@ -92,42 +93,36 @@ def test_big_doc():
|
||||
|
||||
def init_company():
|
||||
return Company(
|
||||
name='MongoDB, Inc.',
|
||||
name="MongoDB, Inc.",
|
||||
contacts=[
|
||||
Contact(
|
||||
name='Contact %d' % x,
|
||||
title='CEO',
|
||||
address='Address %d' % x,
|
||||
)
|
||||
Contact(name="Contact %d" % x, title="CEO", address="Address %d" % x)
|
||||
for x in range(1000)
|
||||
]
|
||||
],
|
||||
)
|
||||
|
||||
company = init_company()
|
||||
print('Big doc to mongo: %.3fms' % (timeit(company.to_mongo, 100) * 10**3))
|
||||
print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3))
|
||||
|
||||
print('Big doc validation: %.3fms' % (timeit(company.validate, 1000) * 10**3))
|
||||
print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3))
|
||||
|
||||
company.save()
|
||||
|
||||
def save_company():
|
||||
company._mark_as_changed('name')
|
||||
company._mark_as_changed('contacts')
|
||||
company._mark_as_changed("name")
|
||||
company._mark_as_changed("contacts")
|
||||
company.save()
|
||||
|
||||
print('Save to database: %.3fms' % (timeit(save_company, 100) * 10**3))
|
||||
print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3))
|
||||
|
||||
son = company.to_mongo()
|
||||
print(
|
||||
'Load from SON: %.3fms' % (
|
||||
timeit(lambda: Company._from_son(son), 100) * 10**3
|
||||
)
|
||||
"Load from SON: %.3fms"
|
||||
% (timeit(lambda: Company._from_son(son), 100) * 10 ** 3)
|
||||
)
|
||||
|
||||
print(
|
||||
'Load from database: %.3fms' % (
|
||||
timeit(lambda: Company.objects[0], 100) * 10**3
|
||||
)
|
||||
"Load from database: %.3fms"
|
||||
% (timeit(lambda: Company.objects[0], 100) * 10 ** 3)
|
||||
)
|
||||
|
||||
def create_and_delete_company():
|
||||
@ -136,13 +131,12 @@ def test_big_doc():
|
||||
c.delete()
|
||||
|
||||
print(
|
||||
'Init + save to database + delete: %.3fms' % (
|
||||
timeit(create_and_delete_company, 10) * 10**3
|
||||
)
|
||||
"Init + save to database + delete: %.3fms"
|
||||
% (timeit(create_and_delete_company, 10) * 10 ** 3)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
test_basic()
|
||||
print('-' * 100)
|
||||
print("-" * 100)
|
||||
test_big_doc()
|
||||
|
@ -4,12 +4,14 @@ import timeit
|
||||
def main():
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
|
||||
connection = MongoClient()
|
||||
connection.drop_database('mongoengine_benchmark_test')
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.mongoengine_benchmark_test
|
||||
@ -26,10 +28,10 @@ myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('PyMongo: Creating 10000 dictionaries.')
|
||||
print("-" * 100)
|
||||
print("PyMongo: Creating 10000 dictionaries.")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient, WriteConcern
|
||||
@ -49,13 +51,14 @@ myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print("-" * 100)
|
||||
print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
|
||||
connection = MongoClient()
|
||||
connection.drop_database('mongoengine_benchmark_test')
|
||||
connection.close()
|
||||
@ -78,10 +81,10 @@ myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries.')
|
||||
print("-" * 100)
|
||||
print("MongoEngine: Creating 10000 dictionaries.")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
@ -96,10 +99,10 @@ myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries (using a single field assignment).')
|
||||
print("-" * 100)
|
||||
print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
@ -112,10 +115,10 @@ myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print("-" * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
@ -128,10 +131,12 @@ myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).')
|
||||
print("-" * 100)
|
||||
print(
|
||||
'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).'
|
||||
)
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
@ -144,10 +149,12 @@ myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).')
|
||||
print("-" * 100)
|
||||
print(
|
||||
'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).'
|
||||
)
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -6,208 +6,264 @@ Changelog
|
||||
Development
|
||||
===========
|
||||
- (Fill this out as you fix issues and develop your features).
|
||||
- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count
|
||||
and Cursor.count that got deprecated in pymongo >= 3.7.
|
||||
This should have a negative impact on performance of count see Issue #2219
|
||||
- Fix a bug that made the queryset drop the read_preference after clone().
|
||||
- Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311
|
||||
- Bug fix in ListField when updating the first item, it was saving the whole list, instead of
|
||||
just replacing the first item (as it's usually done) #2392
|
||||
|
||||
Changes in 0.20.0
|
||||
=================
|
||||
- ATTENTION: Drop support for Python2
|
||||
- Add Mongo 4.0 to Travis
|
||||
- Fix error when setting a string as a ComplexDateTimeField #2253
|
||||
- Bump development Status classifier to Production/Stable #2232
|
||||
- Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630
|
||||
- Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264
|
||||
- Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267
|
||||
- DictField validate failed without default connection (bug introduced in 0.19.0) #2239
|
||||
- Remove methods that were deprecated years ago:
|
||||
- name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field
|
||||
- Queryset.slave_okay() was deprecated since pymongo3
|
||||
- dropDups was dropped with MongoDB3
|
||||
- ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes``
|
||||
- Added pre-commit for development/CI #2212
|
||||
- Renamed requirements-lint.txt to requirements-dev.txt #2212
|
||||
- Support for setting ReadConcern #2255
|
||||
|
||||
Changes in 0.19.1
|
||||
=================
|
||||
- Tests require Pillow < 7.0.0 as it dropped Python2 support
|
||||
- DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of
|
||||
pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079
|
||||
|
||||
Changes in 0.19.0
|
||||
=================
|
||||
- BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112
|
||||
- Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``.
|
||||
- Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``.
|
||||
- This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``.
|
||||
- BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113
|
||||
- BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111
|
||||
- If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it.
|
||||
- BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103
|
||||
- From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required.
|
||||
- BREAKING CHANGE: A ``LazyReferenceField`` is now stored in the ``_data`` field of its parent as a ``DBRef``, ``Document``, or ``EmbeddedDocument`` (``ObjectId`` is no longer allowed). #2182
|
||||
- DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210
|
||||
- Added ability to check if Q or QNode are empty by parsing them to bool.
|
||||
- Instead of ``Q(name="John").empty`` use ``not Q(name="John")``.
|
||||
- Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125
|
||||
- Only set no_cursor_timeout when requested (fixes an incompatibility with MongoDB 4.2) #2148
|
||||
- ``ListField`` now accepts an optional ``max_length`` parameter. #2110
|
||||
- Improve error message related to InvalidDocumentError #2180
|
||||
- Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152
|
||||
- Added ability to compare Q and Q operations #2204
|
||||
- Added ability to use a db alias on query_counter #2194
|
||||
- Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024
|
||||
- Fix updates of a list field by negative index #2094
|
||||
- Switch from nosetest to pytest as test runner #2114
|
||||
- The codebase is now formatted using ``black``. #2109
|
||||
- Documentation improvements:
|
||||
- Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver.
|
||||
|
||||
Changes in 0.18.2
|
||||
=================
|
||||
- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097
|
||||
- Various code clarity and documentation improvements.
|
||||
|
||||
Changes in 0.18.1
|
||||
=================
|
||||
- Fix a bug introduced in 0.18.0 which was causing `.save()` to update all the fields
|
||||
instead of updating only the modified fields. This bug only occurs when using custom pk #2082
|
||||
- Add Python 3.7 in travis #2058
|
||||
- Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082
|
||||
- Add Python 3.7 to Travis CI. #2058
|
||||
|
||||
Changes in 0.18.0
|
||||
=================
|
||||
- Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2.
|
||||
- MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6 (#2017 #2066).
|
||||
- Improve performance by avoiding a call to `to_mongo` in `Document.save()` #2049
|
||||
- MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066
|
||||
- Improve performance by avoiding a call to ``to_mongo`` in ``Document.save``. #2049
|
||||
- Connection/disconnection improvements:
|
||||
- Expose `mongoengine.connection.disconnect` and `mongoengine.connection.disconnect_all`
|
||||
- Fix disconnecting #566 #1599 #605 #607 #1213 #565
|
||||
- Improve documentation of `connect`/`disconnect`
|
||||
- Fix issue when using multiple connections to the same mongo with different credentials #2047
|
||||
- `connect` fails immediately when db name contains invalid characters #2031 #1718
|
||||
- Fix the default write concern of `Document.save` that was overwriting the connection write concern #568
|
||||
- Fix querying on `List(EmbeddedDocument)` subclasses fields #1961 #1492
|
||||
- Fix querying on `(Generic)EmbeddedDocument` subclasses fields #475
|
||||
- Fix `QuerySet.aggregate` so that it takes limit and skip value into account #2029
|
||||
- Generate unique indices for `SortedListField` and `EmbeddedDocumentListFields` #2020
|
||||
- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e `validation` parameter of a `Field`). It is now expected to raise a `ValidationError` instead of returning True/False #2050
|
||||
- BREAKING CHANGES (associated with connect/disconnect fixes):
|
||||
- Calling `connect` 2 times with the same alias and different parameter will raise an error (should call `disconnect` first).
|
||||
- `disconnect` now clears `mongoengine.connection._connection_settings`.
|
||||
- `disconnect` now clears the cached attribute `Document._collection`.
|
||||
- BREAKING CHANGE: `EmbeddedDocument.save` & `.reload` is no longier exist #1552
|
||||
- Expose ``mongoengine.connection.disconnect`` and ``mongoengine.connection.disconnect_all``.
|
||||
- Fix disconnecting. #566 #1599 #605 #607 #1213 #565
|
||||
- Improve documentation of ``connect``/``disconnect``.
|
||||
- Fix issue when using multiple connections to the same mongo with different credentials. #2047
|
||||
- ``connect`` fails immediately when db name contains invalid characters. #2031 #1718
|
||||
- Fix the default write concern of ``Document.save`` that was overwriting the connection write concern. #568
|
||||
- Fix querying on ``List(EmbeddedDocument)`` subclasses fields. #1961 #1492
|
||||
- Fix querying on ``(Generic)EmbeddedDocument`` subclasses fields. #475
|
||||
- Fix ``QuerySet.aggregate`` so that it takes limit and skip value into account. #2029
|
||||
- Generate unique indices for ``SortedListField`` and ``EmbeddedDocumentListFields``. #2020
|
||||
- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e ``validation`` parameter of a ``Field``). It is now expected to raise a ``ValidationError`` instead of returning ``True``/``False``. #2050
|
||||
- BREAKING CHANGES (associated with connection/disconnection fixes):
|
||||
- Calling ``connect`` 2 times with the same alias and different parameter will raise an error (should call ``disconnect`` first).
|
||||
- ``disconnect`` now clears ``mongoengine.connection._connection_settings``.
|
||||
- ``disconnect`` now clears the cached attribute ``Document._collection``.
|
||||
- BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552
|
||||
|
||||
Changes in 0.17.0
|
||||
=================
|
||||
- Fix .only() working improperly after using .count() of the same instance of QuerySet
|
||||
- Fix batch_size that was not copied when cloning a queryset object #2011
|
||||
- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (_cls, _id) when using `QuerySet.as_pymongo` #1976
|
||||
- Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time (#1995)
|
||||
- Fix InvalidStringData error when using modify on a BinaryField #1127
|
||||
- DEPRECATION: `EmbeddedDocument.save` & `.reload` are marked as deprecated and will be removed in a next version of mongoengine #1552
|
||||
- Fix test suite and CI to support MongoDB 3.4 #1445
|
||||
- Fix reference fields querying the database on each access if value contains orphan DBRefs
|
||||
- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976
|
||||
- Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995
|
||||
- DEPRECATION: ``EmbeddedDocument.save`` & ``.reload`` are marked as deprecated and will be removed in a next version of MongoEngine. #1552
|
||||
- Fix ``QuerySet.only`` working improperly after using ``QuerySet.count`` of the same instance of a ``QuerySet``.
|
||||
- Fix ``batch_size`` that was not copied when cloning a ``QuerySet`` object. #2011
|
||||
- Fix ``InvalidStringData`` error when using ``modify`` on a ``BinaryField``. #1127
|
||||
- Fix test suite and CI to support MongoDB v3.4. #1445
|
||||
- Fix reference fields querying the database on each access if value contains orphan DBRefs.
|
||||
|
||||
=================
|
||||
Changes in 0.16.3
|
||||
=================
|
||||
- Fix $push with $position operator not working with lists in embedded document #1965
|
||||
- Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965
|
||||
|
||||
=================
|
||||
Changes in 0.16.2
|
||||
=================
|
||||
- Fix .save() that fails when called with write_concern=None (regression of 0.16.1) #1958
|
||||
- Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958
|
||||
|
||||
=================
|
||||
Changes in 0.16.1
|
||||
=================
|
||||
- Fix `_cls` that is not set properly in Document constructor (regression) #1950
|
||||
- Fix bug in _delta method - Update of a ListField depends on an unrelated dynamic field update #1733
|
||||
- Remove deprecated `save()` method and used `insert_one()` #1899
|
||||
- Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950
|
||||
- Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733
|
||||
- Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899
|
||||
|
||||
=================
|
||||
Changes in 0.16.0
|
||||
=================
|
||||
- Various improvements to the doc
|
||||
- Improvement to code quality
|
||||
- POTENTIAL BREAKING CHANGES:
|
||||
- EmbeddedDocumentField will no longer accept references to Document classes in its constructor #1661
|
||||
- Get rid of the `basecls` parameter from the DictField constructor (dead code) #1876
|
||||
- default value of ComplexDateTime is now None (and no longer the current datetime) #1368
|
||||
- Fix unhashable TypeError when referencing a Document with a compound key in an EmbeddedDocument #1685
|
||||
- Fix bug where an EmbeddedDocument with the same id as its parent would not be tracked for changes #1768
|
||||
- Fix the fact that bulk `insert()` was not setting primary keys of inserted documents instances #1919
|
||||
- Fix bug when referencing the abstract class in a ReferenceField #1920
|
||||
- Allow modification to the document made in pre_save_post_validation to be taken into account #1202
|
||||
- Replaced MongoDB 2.4 tests in CI by MongoDB 3.2 #1903
|
||||
- Fix side effects of using queryset.`no_dereference` on other documents #1677
|
||||
- Fix TypeError when using lazy django translation objects as translated choices #1879
|
||||
- Improve 2-3 codebase compatibility #1889
|
||||
- Fix the support for changing the default value of ComplexDateTime #1368
|
||||
- Improves error message in case an EmbeddedDocumentListField receives an EmbeddedDocument instance
|
||||
instead of a list #1877
|
||||
- Fix the Decimal operator inc/dec #1517 #1320
|
||||
- Ignore killcursors queries in `query_counter` context manager #1869
|
||||
- Fix the fact that `query_counter` was modifying the initial profiling_level in case it was != 0 #1870
|
||||
- Repaired the `no_sub_classes` context manager + fix the fact that it was swallowing exceptions #1865
|
||||
- Fix index creation error that was swallowed by hasattr under python2 #1688
|
||||
- QuerySet limit function behaviour: Passing 0 as parameter will return all the documents in the cursor #1611
|
||||
- bulk insert updates the ids of the input documents instances #1919
|
||||
- Fix an harmless bug related to GenericReferenceField where modifications in the generic-referenced document
|
||||
were tracked in the parent #1934
|
||||
- Improve validator of BinaryField #273
|
||||
- Implemented lazy regex compiling in Field classes to improve 'import mongoengine' performance #1806
|
||||
- Updated GridFSProxy.__str__ so that it would always print both the filename and grid_id #710
|
||||
- Add __repr__ to Q and QCombination #1843
|
||||
- fix bug in BaseList.__iter__ operator (was occuring when modifying a BaseList while iterating over it) #1676
|
||||
- Added field `DateField`#513
|
||||
- ``EmbeddedDocumentField`` will no longer accept references to Document classes in its constructor. #1661
|
||||
- Get rid of the ``basecls`` parameter from the ``DictField`` constructor (dead code). #1876
|
||||
- Default value of the ``ComplexDateTime`` field is now ``None`` (and no longer the current datetime). #1368
|
||||
- Fix an unhashable ``TypeError`` when referencing a ``Document`` with a compound key in an ``EmbeddedDocument``. #1685
|
||||
- Fix a bug where an ``EmbeddedDocument`` with the same id as its parent would not be tracked for changes. #1768
|
||||
- Fix the fact that a bulk ``QuerySet.insert`` was not setting primary keys of inserted document instances. #1919
|
||||
- Fix a bug when referencing an abstract class in a ``ReferenceField``. #1920
|
||||
- Allow modifications to the document made in ``pre_save_post_validation`` to be taken into account. #1202
|
||||
- Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903
|
||||
- Fix side effects of using ``QuerySet.no_dereference`` on other documents. #1677
|
||||
- Fix ``TypeError`` when using lazy Django translation objects as translated choices. #1879
|
||||
- Improve Python 2-3 codebase compatibility. #1889
|
||||
- Fix support for changing the default value of the ``ComplexDateTime`` field. #1368
|
||||
- Improve error message in case an ``EmbeddedDocumentListField`` receives an ``EmbeddedDocument`` instance instead of a list. #1877
|
||||
- Fix the ``inc`` and ``dec`` operators for the ``DecimalField``. #1517 #1320
|
||||
- Ignore ``killcursors`` queries in ``query_counter`` context manager. #1869
|
||||
- Fix the fact that ``query_counter`` was modifying the initial profiling level in case it was != 0. #1870
|
||||
- Repair the ``no_sub_classes`` context manager + fix the fact that it was swallowing exceptions. #1865
|
||||
- Fix index creation error that was swallowed by ``hasattr`` under Python 2. #1688
|
||||
- ``QuerySet.limit`` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611
|
||||
- Bulk insert updates the IDs of the input documents instances. #1919
|
||||
- Fix a harmless bug related to ``GenericReferenceField`` where modifications in the generic-referenced document were tracked in the parent. #1934
|
||||
- Improve validation of the ``BinaryField``. #273
|
||||
- Implement lazy regex compiling in Field classes to improve ``import mongoengine`` performance. #1806
|
||||
- Update ``GridFSProxy.__str__`` so that it would always print both the filename and grid_id. #710
|
||||
- Add ``__repr__`` to ``Q`` and ``QCombination`` classes. #1843
|
||||
- Fix bug in the ``BaseList.__iter__`` operator (was occuring when modifying a BaseList while iterating over it). #1676
|
||||
- Add a ``DateField``. #513
|
||||
- Various improvements to the documentation.
|
||||
- Various code quality improvements.
|
||||
|
||||
Changes in 0.15.3
|
||||
=================
|
||||
- BREAKING CHANGES: `Queryset.update/update_one` methods now returns an UpdateResult when `full_result=True` is provided and no longer a dict (relates to #1491)
|
||||
- Subfield resolve error in generic_emdedded_document query #1651 #1652
|
||||
- use each modifier only with $position #1673 #1675
|
||||
- Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704
|
||||
- Fix validation error instance in GenericEmbeddedDocumentField #1067
|
||||
- Update cached fields when fields argument is given #1712
|
||||
- Add a db parameter to register_connection for compatibility with connect
|
||||
- Use insert_one, insert_many in Document.insert #1491
|
||||
- Use new update_one, update_many on document/queryset update #1491
|
||||
- Use insert_one, insert_many in Document.insert #1491
|
||||
- Fix reload(fields) affect changed fields #1371
|
||||
- Fix Read-only access to database fails when trying to create indexes #1338
|
||||
- ``Queryset.update/update_one`` methods now return an ``UpdateResult`` when ``full_result=True`` is provided and no longer a dict. #1491
|
||||
- Improve ``LazyReferenceField`` and ``GenericLazyReferenceField`` with nested fields. #1704
|
||||
- Fix the subfield resolve error in ``generic_emdedded_document`` query. #1651 #1652
|
||||
- Use each modifier only with ``$position``. #1673 #1675
|
||||
- Fix validation errors in the ``GenericEmbeddedDocumentField``. #1067
|
||||
- Update cached fields when a ``fields`` argument is given. #1712
|
||||
- Add a ``db`` parameter to ``register_connection`` for compatibility with ``connect``.
|
||||
- Use PyMongo v3.x's ``insert_one`` and ``insert_many`` in ``Document.insert``. #1491
|
||||
- Use PyMongo v3.x's ``update_one`` and ``update_many`` in ``Document.update`` and ``QuerySet.update``. #1491
|
||||
- Fix how ``reload(fields)`` affects changed fields. #1371
|
||||
- Fix a bug where the read-only access to the database fails when trying to create indexes. #1338
|
||||
|
||||
Changes in 0.15.0
|
||||
=================
|
||||
- Add LazyReferenceField and GenericLazyReferenceField to address #1230
|
||||
- Add ``LazyReferenceField`` and ``GenericLazyReferenceField``. #1230
|
||||
|
||||
Changes in 0.14.1
|
||||
=================
|
||||
- Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630
|
||||
- Added support for the `$position` param in the `$push` operator #1566
|
||||
- Fixed `DateTimeField` interpreting an empty string as today #1533
|
||||
- Added a missing `__ne__` method to the `GridFSProxy` class #1632
|
||||
- Fixed `BaseQuerySet._fields_to_db_fields` #1553
|
||||
- Remove ``SemiStrictDict`` and start using a regular dict for ``BaseDocument._data``. #1630
|
||||
- Add support for the ``$position`` param in the ``$push`` operator. #1566
|
||||
- Fix ``DateTimeField`` interpreting an empty string as today. #1533
|
||||
- Add a missing ``__ne__`` method to the ``GridFSProxy`` class. #1632
|
||||
- Fix ``BaseQuerySet._fields_to_db_fields``. #1553
|
||||
|
||||
Changes in 0.14.0
|
||||
=================
|
||||
- BREAKING CHANGE: Removed the `coerce_types` param from `QuerySet.as_pymongo` #1549
|
||||
- POTENTIAL BREAKING CHANGE: Made EmbeddedDocument not hashable by default #1528
|
||||
- Improved code quality #1531, #1540, #1541, #1547
|
||||
- BREAKING CHANGE: Remove the ``coerce_types`` param from ``QuerySet.as_pymongo``. #1549
|
||||
- POTENTIAL BREAKING CHANGE: Make ``EmbeddedDocument`` not hashable by default. #1528
|
||||
- Improve code quality. #1531, #1540, #1541, #1547
|
||||
|
||||
Changes in 0.13.0
|
||||
=================
|
||||
- POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see
|
||||
docs/upgrade.rst for details.
|
||||
- POTENTIAL BREAKING CHANGE: Added Unicode support to the ``EmailField``, see docs/upgrade.rst for details.
|
||||
|
||||
Changes in 0.12.0
|
||||
=================
|
||||
- POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476
|
||||
- POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476
|
||||
- Fixed the way `Document.objects.create` works with duplicate IDs #1485
|
||||
- Fixed connecting to a replica set with PyMongo 2.x #1436
|
||||
- Fixed using sets in field choices #1481
|
||||
- Fixed deleting items from a `ListField` #1318
|
||||
- Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237
|
||||
- Fixed behavior of a `dec` update operator #1450
|
||||
- Added a `rename` update operator #1454
|
||||
- Added validation for the `db_field` parameter #1448
|
||||
- Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440
|
||||
- Fixed the error message displayed when validating unicode URLs #1486
|
||||
- Raise an error when trying to save an abstract document #1449
|
||||
- POTENTIAL BREAKING CHANGE: Fix ``limit``/``skip``/``hint``/``batch_size`` chaining. #1476
|
||||
- POTENTIAL BREAKING CHANGE: Change a public ``QuerySet.clone_into`` method to a private ``QuerySet._clone_into``. #1476
|
||||
- Fix the way ``Document.objects.create`` works with duplicate IDs. #1485
|
||||
- Fix connecting to a replica set with PyMongo 2.x. #1436
|
||||
- Fix using sets in field choices. #1481
|
||||
- Fix deleting items from a ``ListField``. #1318
|
||||
- Fix an obscure error message when filtering by ``field__in=non_iterable``. #1237
|
||||
- Fix behavior of a ``dec`` update operator. #1450
|
||||
- Add a ``rename`` update operator. #1454
|
||||
- Add validation for the ``db_field`` parameter. #1448
|
||||
- Fix the error message displayed when querying an ``EmbeddedDocumentField`` by an invalid value. #1440
|
||||
- Fix the error message displayed when validating Unicode URLs. #1486
|
||||
- Raise an error when trying to save an abstract document. #1449
|
||||
|
||||
Changes in 0.11.0
|
||||
=================
|
||||
- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428
|
||||
- BREAKING CHANGE: Dropped Python 2.6 support. #1428
|
||||
- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428
|
||||
- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334
|
||||
- Fixed absent rounding for DecimalField when `force_string` is set. #1103
|
||||
- BREAKING CHANGE: Rename ``ConnectionError`` to ``MongoEngineConnectionError`` since the former is a built-in exception name in Python v3.x. #1428
|
||||
- BREAKING CHANGE: Drop Python v2.6 support. #1428
|
||||
- BREAKING CHANGE: ``from mongoengine.base import ErrorClass`` won't work anymore for any error from ``mongoengine.errors`` (e.g. ``ValidationError``). Use ``from mongoengine.errors import ErrorClass instead``. #1428
|
||||
- BREAKING CHANGE: Accessing a broken reference will raise a ``DoesNotExist`` error. In the past it used to return ``None``. #1334
|
||||
- Fix absent rounding for the ``DecimalField`` when ``force_string`` is set. #1103
|
||||
|
||||
Changes in 0.10.8
|
||||
=================
|
||||
- Added support for QuerySet.batch_size (#1426)
|
||||
- Fixed query set iteration within iteration #1427
|
||||
- Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421
|
||||
- Added ability to filter the generic reference field by ObjectId and DBRef #1425
|
||||
- Fixed delete cascade for models with a custom primary key field #1247
|
||||
- Added ability to specify an authentication mechanism (e.g. X.509) #1333
|
||||
- Added support for falsey primary keys (e.g. doc.pk = 0) #1354
|
||||
- Fixed QuerySet#sum/average for fields w/ explicit db_field #1417
|
||||
- Fixed filtering by embedded_doc=None #1422
|
||||
- Added support for cursor.comment #1420
|
||||
- Fixed doc.get_<field>_display #1419
|
||||
- Fixed __repr__ method of the StrictDict #1424
|
||||
- Added a deprecation warning for Python 2.6
|
||||
- Add support for ``QuerySet.batch_size``. (#1426)
|
||||
- Fix a query set iteration within an iteration. #1427
|
||||
- Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421
|
||||
- Add an ability to filter the ``GenericReferenceField`` by an ``ObjectId`` and a ``DBRef``. #1425
|
||||
- Fix cascading deletes for models with a custom primary key field. #1247
|
||||
- Add ability to specify an authentication mechanism (e.g. X.509). #1333
|
||||
- Add support for falsy primary keys (e.g. ``doc.pk = 0``). #1354
|
||||
- Fix ``QuerySet.sum/average`` for fields w/ an explicit ``db_field``. #1417
|
||||
- Fix filtering by ``embedded_doc=None``. #1422
|
||||
- Add support for ``Cursor.comment``. #1420
|
||||
- Fix ``doc.get_<field>_display`` methods. #1419
|
||||
- Fix the ``__repr__`` method of the ``StrictDict`` #1424
|
||||
- Add a deprecation warning for Python v2.6.
|
||||
|
||||
Changes in 0.10.7
|
||||
=================
|
||||
- Dropped Python 3.2 support #1390
|
||||
- Fixed the bug where dynamic doc has index inside a dict field #1278
|
||||
- Fixed: ListField minus index assignment does not work #1128
|
||||
- Fixed cascade delete mixing among collections #1224
|
||||
- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206
|
||||
- Raise `OperationError` when trying to do a `drop_collection` on document with no collection set.
|
||||
- count on ListField of EmbeddedDocumentField fails. #1187
|
||||
- Fixed long fields stored as int32 in Python 3. #1253
|
||||
- MapField now handles unicodes keys correctly. #1267
|
||||
- ListField now handles negative indicies correctly. #1270
|
||||
- Fixed AttributeError when initializing EmbeddedDocument with positional args. #681
|
||||
- Fixed no_cursor_timeout error with pymongo 3.0+ #1304
|
||||
- Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336
|
||||
- Fixed support for `__` to escape field names that match operators names in `update` #1351
|
||||
- Fixed BaseDocument#_mark_as_changed #1369
|
||||
- Added support for pickling QuerySet instances. #1397
|
||||
- Fixed connecting to a list of hosts #1389
|
||||
- Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334
|
||||
- Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218
|
||||
- Improvements to the dictionary fields docs #1383
|
||||
- Drop Python 3.2 support #1390
|
||||
- Fix a bug where a dynamic doc has an index inside a dict field. #1278
|
||||
- Fix: ``ListField`` minus index assignment does not work. #1128
|
||||
- Fix cascade delete mixing among collections. #1224
|
||||
- Add ``signal_kwargs`` argument to ``Document.save``, ``Document.delete`` and ``BaseQuerySet.insert`` to be passed to signals calls. #1206
|
||||
- Raise ``OperationError`` when trying to do a ``drop_collection`` on document with no collection set.
|
||||
- Fix a bug where a count on ``ListField`` of ``EmbeddedDocumentField`` fails. #1187
|
||||
- Fix ``LongField`` values stored as int32 in Python 3. #1253
|
||||
- ``MapField`` now handles unicode keys correctly. #1267
|
||||
- ``ListField`` now handles negative indicies correctly. #1270
|
||||
- Fix an ``AttributeError`` when initializing an ``EmbeddedDocument`` with positional args. #681
|
||||
- Fix a ``no_cursor_timeout`` error with PyMongo v3.x. #1304
|
||||
- Replace map-reduce based ``QuerySet.sum/average`` with aggregation-based implementations. #1336
|
||||
- Fix support for ``__`` to escape field names that match operators' names in ``update``. #1351
|
||||
- Fix ``BaseDocument._mark_as_changed``. #1369
|
||||
- Add support for pickling ``QuerySet`` instances. #1397
|
||||
- Fix connecting to a list of hosts. #1389
|
||||
- Fix a bug where accessing broken references wouldn't raise a ``DoesNotExist`` error. #1334
|
||||
- Fix not being able to specify ``use_db_field=False`` on ``ListField(EmbeddedDocumentField)`` instances. #1218
|
||||
- Improvements to the dictionary field's docs. #1383
|
||||
|
||||
Changes in 0.10.6
|
||||
=================
|
||||
- Add support for mocking MongoEngine based on mongomock. #1151
|
||||
- Fixed not being able to run tests on Windows. #1153
|
||||
- Fix not being able to run tests on Windows. #1153
|
||||
- Allow creation of sparse compound indexes. #1114
|
||||
- count on ListField of EmbeddedDocumentField fails. #1187
|
||||
|
||||
Changes in 0.10.5
|
||||
=================
|
||||
@ -215,12 +271,12 @@ Changes in 0.10.5
|
||||
|
||||
Changes in 0.10.4
|
||||
=================
|
||||
- SaveConditionError is now importable from the top level package. #1165
|
||||
- upsert_one method added. #1157
|
||||
- ``SaveConditionError`` is now importable from the top level package. #1165
|
||||
- Add a ``QuerySet.upsert_one`` method. #1157
|
||||
|
||||
Changes in 0.10.3
|
||||
=================
|
||||
- Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042
|
||||
- Fix ``read_preference`` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042
|
||||
|
||||
Changes in 0.10.2
|
||||
=================
|
||||
@ -230,16 +286,16 @@ Changes in 0.10.2
|
||||
|
||||
Changes in 0.10.1
|
||||
=================
|
||||
- Fix infinite recursion with CASCADE delete rules under specific conditions. #1046
|
||||
- Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047
|
||||
- Fix ignored chained options #842
|
||||
- Document save's save_condition error raises `SaveConditionError` exception #1070
|
||||
- Fix Document.reload for DynamicDocument. #1050
|
||||
- StrictDict & SemiStrictDict are shadowed at init time. #1105
|
||||
- Fix ListField minus index assignment does not work. #1119
|
||||
- Remove code that marks field as changed when the field has default but not existed in database #1126
|
||||
- Remove test dependencies (nose and rednose) from install dependencies list. #1079
|
||||
- Recursively build query when using elemMatch operator. #1130
|
||||
- Fix infinite recursion with cascade delete rules under specific conditions. #1046
|
||||
- Fix ``CachedReferenceField`` bug when loading cached docs as ``DBRef`` but failing to save them. #1047
|
||||
- Fix ignored chained options. #842
|
||||
- ``Document.save``'s ``save_condition`` error raises a ``SaveConditionError`` exception. #1070
|
||||
- Fix ``Document.reload`` for the ``DynamicDocument``. #1050
|
||||
- ``StrictDict`` & ``SemiStrictDict`` are shadowed at init time. #1105
|
||||
- Fix ``ListField`` negative index assignment not working. #1119
|
||||
- Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126
|
||||
- Remove test dependencies (nose and rednose) from install dependencies. #1079
|
||||
- Recursively build a query when using the ``elemMatch`` operator. #1130
|
||||
- Fix instance back references for lists of embedded documents. #1131
|
||||
|
||||
Changes in 0.10.0
|
||||
@ -250,7 +306,7 @@ Changes in 0.10.0
|
||||
- Removed get_or_create() deprecated since 0.8.0. #300
|
||||
- Improve Document._created status when switch collection and db #1020
|
||||
- Queryset update doesn't go through field validation #453
|
||||
- Added support for specifying authentication source as option `authSource` in URI. #967
|
||||
- Added support for specifying authentication source as option ``authSource`` in URI. #967
|
||||
- Fixed mark_as_changed to handle higher/lower level fields changed. #927
|
||||
- ListField of embedded docs doesn't set the _instance attribute when iterating over it #914
|
||||
- Support += and *= for ListField #595
|
||||
@ -266,7 +322,7 @@ Changes in 0.10.0
|
||||
- Fixes some internal _id handling issue. #961
|
||||
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
|
||||
- Capped collection multiple of 256. #1011
|
||||
- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods.
|
||||
- Added ``BaseQuerySet.aggregate_sum`` and ``BaseQuerySet.aggregate_average`` methods.
|
||||
- Fix for delete with write_concern {'w': 0}. #1008
|
||||
- Allow dynamic lookup for more than two parts. #882
|
||||
- Added support for min_distance on geo queries. #831
|
||||
@ -275,10 +331,10 @@ Changes in 0.10.0
|
||||
Changes in 0.9.0
|
||||
================
|
||||
- Update FileField when creating a new file #714
|
||||
- Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826
|
||||
- Added ``EmbeddedDocumentListField`` for Lists of Embedded Documents. #826
|
||||
- ComplexDateTimeField should fall back to None when null=True #864
|
||||
- Request Support for $min, $max Field update operators #863
|
||||
- `BaseDict` does not follow `setdefault` #866
|
||||
- ``BaseDict`` does not follow ``setdefault`` #866
|
||||
- Add support for $type operator # 766
|
||||
- Fix tests for pymongo 2.8+ #877
|
||||
- No module named 'django.utils.importlib' (Django dev) #872
|
||||
@ -299,13 +355,13 @@ Changes in 0.9.0
|
||||
- Stop ensure_indexes running on a secondaries unless connection is through mongos #746
|
||||
- Not overriding default values when loading a subset of fields #399
|
||||
- Saving document doesn't create new fields in existing collection #620
|
||||
- Added `Queryset.aggregate` wrapper to aggregation framework #703
|
||||
- Added ``Queryset.aggregate`` wrapper to aggregation framework #703
|
||||
- Added support to show original model fields on to_json calls instead of db_field #697
|
||||
- Added Queryset.search_text to Text indexes searchs #700
|
||||
- Fixed tests for Django 1.7 #696
|
||||
- Follow ReferenceFields in EmbeddedDocuments with select_related #690
|
||||
- Added preliminary support for text indexes #680
|
||||
- Added `elemMatch` operator as well - `match` is too obscure #653
|
||||
- Added ``elemMatch`` operator as well - ``match`` is too obscure #653
|
||||
- Added support for progressive JPEG #486 #548
|
||||
- Allow strings to be used in index creation #675
|
||||
- Fixed EmbeddedDoc weakref proxy issue #592
|
||||
@ -341,11 +397,11 @@ Changes in 0.9.0
|
||||
- Increase email field length to accommodate new TLDs #726
|
||||
- index_cls is ignored when deciding to set _cls as index prefix #733
|
||||
- Make 'db' argument to connection optional #737
|
||||
- Allow atomic update for the entire `DictField` #742
|
||||
- Allow atomic update for the entire ``DictField`` #742
|
||||
- Added MultiPointField, MultiLineField, MultiPolygonField
|
||||
- Fix multiple connections aliases being rewritten #748
|
||||
- Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791
|
||||
- Make `in_bulk()` respect `no_dereference()` #775
|
||||
- Make ``in_bulk()`` respect ``no_dereference()`` #775
|
||||
- Handle None from model __str__; Fixes #753 #754
|
||||
- _get_changed_fields fix for embedded documents with id field. #925
|
||||
|
||||
@ -399,18 +455,15 @@ Changes in 0.8.4
|
||||
|
||||
Changes in 0.8.3
|
||||
================
|
||||
- Fixed EmbeddedDocuments with `id` also storing `_id` (#402)
|
||||
- Fixed EmbeddedDocuments with ``id`` also storing ``_id`` (#402)
|
||||
- Added get_proxy_object helper to filefields (#391)
|
||||
- Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365)
|
||||
- Fixed sum and average mapreduce dot notation support (#375, #376, #393)
|
||||
- Fixed as_pymongo to return the id (#386)
|
||||
- Document.select_related() now respects `db_alias` (#377)
|
||||
- Document.select_related() now respects ``db_alias`` (#377)
|
||||
- Reload uses shard_key if applicable (#384)
|
||||
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
|
||||
|
||||
**Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3
|
||||
|
||||
- Fixed pickling dynamic documents `_dynamic_fields` (#387)
|
||||
- Fixed pickling dynamic documents ``_dynamic_fields`` (#387)
|
||||
- Fixed ListField setslice and delslice dirty tracking (#390)
|
||||
- Added Django 1.5 PY3 support (#392)
|
||||
- Added match ($elemMatch) support for EmbeddedDocuments (#379)
|
||||
@ -451,7 +504,7 @@ Changes in 0.8.0
|
||||
================
|
||||
- Fixed querying ReferenceField custom_id (#317)
|
||||
- Fixed pickle issues with collections (#316)
|
||||
- Added `get_next_value` preview for SequenceFields (#319)
|
||||
- Added ``get_next_value`` preview for SequenceFields (#319)
|
||||
- Added no_sub_classes context manager and queryset helper (#312)
|
||||
- Querysets now utilises a local cache
|
||||
- Changed __len__ behaviour in the queryset (#247, #311)
|
||||
@ -480,7 +533,7 @@ Changes in 0.8.0
|
||||
- Updated connection to use MongoClient (#262, #274)
|
||||
- Fixed db_alias and inherited Documents (#143)
|
||||
- Documentation update for document errors (#124)
|
||||
- Deprecated `get_or_create` (#35)
|
||||
- Deprecated ``get_or_create`` (#35)
|
||||
- Updated inheritable objects created by upsert now contain _cls (#118)
|
||||
- Added support for creating documents with embedded documents in a single operation (#6)
|
||||
- Added to_json and from_json to Document (#1)
|
||||
@ -601,7 +654,7 @@ Changes in 0.7.0
|
||||
- Fixed UnboundLocalError in composite index with pk field (#88)
|
||||
- Updated ReferenceField's to optionally store ObjectId strings
|
||||
this will become the default in 0.8 (#89)
|
||||
- Added FutureWarning - save will default to `cascade=False` in 0.8
|
||||
- Added FutureWarning - save will default to ``cascade=False`` in 0.8
|
||||
- Added example of indexing embedded document fields (#75)
|
||||
- Fixed ImageField resizing when forcing size (#80)
|
||||
- Add flexibility for fields handling bad data (#78)
|
||||
@ -697,7 +750,7 @@ Changes in 0.6.8
|
||||
================
|
||||
- Fixed FileField losing reference when no default set
|
||||
- Removed possible race condition from FileField (grid_file)
|
||||
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
|
||||
- Added assignment to save, can now do: ``b = MyDoc(**kwargs).save()``
|
||||
- Added support for pull operations on nested EmbeddedDocuments
|
||||
- Added support for choices with GenericReferenceFields
|
||||
- Added support for choices with GenericEmbeddedDocumentFields
|
||||
@ -712,7 +765,7 @@ Changes in 0.6.7
|
||||
- Fixed indexing on '_id' or 'pk' or 'id'
|
||||
- Invalid data from the DB now raises a InvalidDocumentError
|
||||
- Cleaned up the Validation Error - docs and code
|
||||
- Added meta `auto_create_index` so you can disable index creation
|
||||
- Added meta ``auto_create_index`` so you can disable index creation
|
||||
- Added write concern options to inserts
|
||||
- Fixed typo in meta for index options
|
||||
- Bug fix Read preference now passed correctly
|
||||
@ -753,7 +806,6 @@ Changes in 0.6.1
|
||||
|
||||
Changes in 0.6
|
||||
==============
|
||||
|
||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||
- Added support for covered indexes when inheritance is off
|
||||
- No longer always upsert on save for items with a '_id'
|
||||
@ -978,7 +1030,6 @@ Changes in v0.1.3
|
||||
querying takes place
|
||||
- A few minor bugfixes
|
||||
|
||||
|
||||
Changes in v0.1.2
|
||||
=================
|
||||
- Query values may be processed before before being used in queries
|
||||
@ -987,7 +1038,6 @@ Changes in v0.1.2
|
||||
- Added ``BooleanField``
|
||||
- Added ``Document.reload()`` method
|
||||
|
||||
|
||||
Changes in v0.1.1
|
||||
=================
|
||||
- Documents may now use capped collections
|
||||
|
@ -1,16 +1,19 @@
|
||||
from mongoengine import *
|
||||
|
||||
connect('tumblelog')
|
||||
connect("tumblelog")
|
||||
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
name = StringField(max_length=120)
|
||||
|
||||
|
||||
class User(Document):
|
||||
email = StringField(required=True)
|
||||
first_name = StringField(max_length=50)
|
||||
last_name = StringField(max_length=50)
|
||||
|
||||
|
||||
class Post(Document):
|
||||
title = StringField(max_length=120, required=True)
|
||||
author = ReferenceField(User)
|
||||
@ -18,54 +21,57 @@ class Post(Document):
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
# bugfix
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
|
||||
class TextPost(Post):
|
||||
content = StringField()
|
||||
|
||||
|
||||
class ImagePost(Post):
|
||||
image_path = StringField()
|
||||
|
||||
|
||||
class LinkPost(Post):
|
||||
link_url = StringField()
|
||||
|
||||
|
||||
Post.drop_collection()
|
||||
|
||||
john = User(email='jdoe@example.com', first_name='John', last_name='Doe')
|
||||
john = User(email="jdoe@example.com", first_name="John", last_name="Doe")
|
||||
john.save()
|
||||
|
||||
post1 = TextPost(title='Fun with MongoEngine', author=john)
|
||||
post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
|
||||
post1.tags = ['mongodb', 'mongoengine']
|
||||
post1 = TextPost(title="Fun with MongoEngine", author=john)
|
||||
post1.content = "Took a look at MongoEngine today, looks pretty cool."
|
||||
post1.tags = ["mongodb", "mongoengine"]
|
||||
post1.save()
|
||||
|
||||
post2 = LinkPost(title='MongoEngine Documentation', author=john)
|
||||
post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
|
||||
post2.tags = ['mongoengine']
|
||||
post2 = LinkPost(title="MongoEngine Documentation", author=john)
|
||||
post2.link_url = "http://tractiondigital.com/labs/mongoengine/docs"
|
||||
post2.tags = ["mongoengine"]
|
||||
post2.save()
|
||||
|
||||
print('ALL POSTS')
|
||||
print("ALL POSTS")
|
||||
print()
|
||||
for post in Post.objects:
|
||||
print(post.title)
|
||||
#print '=' * post.title.count()
|
||||
# print '=' * post.title.count()
|
||||
print("=" * 20)
|
||||
|
||||
if isinstance(post, TextPost):
|
||||
print(post.content)
|
||||
|
||||
if isinstance(post, LinkPost):
|
||||
print('Link:', post.link_url)
|
||||
print("Link:", post.link_url)
|
||||
|
||||
print()
|
||||
print()
|
||||
|
||||
print('POSTS TAGGED \'MONGODB\'')
|
||||
print("POSTS TAGGED 'MONGODB'")
|
||||
print()
|
||||
for post in Post.objects(tags='mongodb'):
|
||||
for post in Post.objects(tags="mongodb"):
|
||||
print(post.title)
|
||||
print()
|
||||
|
||||
num_posts = Post.objects(tags='mongodb').count()
|
||||
num_posts = Post.objects(tags="mongodb").count()
|
||||
print('Found %d posts with tag "mongodb"' % num_posts)
|
||||
|
96
docs/conf.py
96
docs/conf.py
@ -11,7 +11,8 @@
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys, os
|
||||
import os
|
||||
import sys
|
||||
|
||||
import sphinx_rtd_theme
|
||||
|
||||
@ -20,29 +21,29 @@ import mongoengine
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path.insert(0, os.path.abspath('..'))
|
||||
sys.path.insert(0, os.path.abspath(".."))
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo']
|
||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo"]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8'
|
||||
# source_encoding = 'utf-8'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u'MongoEngine'
|
||||
copyright = u'2009, MongoEngine Authors'
|
||||
project = u"MongoEngine"
|
||||
copyright = u"2009, MongoEngine Authors"
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
@ -55,68 +56,66 @@ release = mongoengine.get_version()
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
# language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of documents that shouldn't be included in the build.
|
||||
#unused_docs = []
|
||||
# unused_docs = []
|
||||
|
||||
# List of directories, relative to source directory, that shouldn't be searched
|
||||
# for source files.
|
||||
exclude_trees = ['_build']
|
||||
exclude_trees = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
# modindex_common_prefix = []
|
||||
|
||||
|
||||
# -- Options for HTML output ---------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
html_theme_options = {
|
||||
'canonical_url': 'http://docs.mongoengine.org/en/latest/'
|
||||
}
|
||||
html_theme_options = {"canonical_url": "http://docs.mongoengine.org/en/latest/"}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
@ -126,11 +125,11 @@ html_favicon = "favicon.ico"
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
#html_static_path = ['_static']
|
||||
# html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
# html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
@ -138,69 +137,68 @@ html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
html_sidebars = {
|
||||
'index': ['globaltoc.html', 'searchbox.html'],
|
||||
'**': ['localtoc.html', 'relations.html', 'searchbox.html']
|
||||
"index": ["globaltoc.html", "searchbox.html"],
|
||||
"**": ["localtoc.html", "relations.html", "searchbox.html"],
|
||||
}
|
||||
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_use_modindex = True
|
||||
# html_use_modindex = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = ''
|
||||
# html_file_suffix = ''
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'MongoEnginedoc'
|
||||
htmlhelp_basename = "MongoEnginedoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
# The paper size ('letter' or 'a4').
|
||||
latex_paper_size = 'a4'
|
||||
latex_paper_size = "a4"
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#latex_font_size = '10pt'
|
||||
# latex_font_size = '10pt'
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'MongoEngine.tex', 'MongoEngine Documentation',
|
||||
'Ross Lawley', 'manual'),
|
||||
("index", "MongoEngine.tex", "MongoEngine Documentation", "Ross Lawley", "manual")
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
# latex_use_parts = False
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#latex_preamble = ''
|
||||
# latex_preamble = ''
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_use_modindex = True
|
||||
# latex_use_modindex = True
|
||||
|
||||
autoclass_content = 'both'
|
||||
autoclass_content = "both"
|
||||
|
@ -13,7 +13,7 @@ Help Wanted!
|
||||
|
||||
The MongoEngine team is looking for help contributing and maintaining a new
|
||||
Django extension for MongoEngine! If you have Django experience and would like
|
||||
to help contribute to the project, please get in touch on the
|
||||
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
|
||||
to help contribute to the project, please get in touch on the
|
||||
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
|
||||
simply contributing on
|
||||
`GitHub <https://github.com/MongoEngine/django-mongoengine>`_.
|
||||
|
12
docs/faq.rst
Normal file
12
docs/faq.rst
Normal file
@ -0,0 +1,12 @@
|
||||
==========================
|
||||
Frequently Asked Questions
|
||||
==========================
|
||||
|
||||
Does MongoEngine support asynchronous drivers (Motor, TxMongo)?
|
||||
---------------------------------------------------------------
|
||||
|
||||
No, MongoEngine is exclusively based on PyMongo and isn't designed to support other driver.
|
||||
If this is a requirement for your project, check the alternative: `uMongo`_ and `MotorEngine`_.
|
||||
|
||||
.. _uMongo: https://umongo.readthedocs.io/
|
||||
.. _MotorEngine: https://motorengine.readthedocs.io/
|
@ -86,7 +86,7 @@ using 3 different databases to store data::
|
||||
connect(alias='user-db-alias', db='user-db')
|
||||
connect(alias='book-db-alias', db='book-db')
|
||||
connect(alias='users-books-db-alias', db='users-books-db')
|
||||
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
|
@ -352,7 +352,7 @@ Its value can take any of the following constants:
|
||||
Deletion is denied if there still exist references to the object being
|
||||
deleted.
|
||||
:const:`mongoengine.NULLIFY`
|
||||
Any object's fields still referring to the object being deleted are removed
|
||||
Any object's fields still referring to the object being deleted are set to None
|
||||
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
||||
:const:`mongoengine.CASCADE`
|
||||
Any object containing fields that are referring to the object being deleted
|
||||
@ -555,7 +555,6 @@ There are a few top level defaults for all indexes that can be set::
|
||||
'index_background': True,
|
||||
'index_cls': False,
|
||||
'auto_create_index': True,
|
||||
'index_drop_dups': True,
|
||||
}
|
||||
|
||||
|
||||
@ -574,11 +573,6 @@ There are a few top level defaults for all indexes that can be set::
|
||||
in systems where indexes are managed separately. Disabling this will improve
|
||||
performance.
|
||||
|
||||
:attr:`index_drop_dups` (Optional)
|
||||
Set the default value for if an index should drop duplicates
|
||||
Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||
and has no effect
|
||||
|
||||
|
||||
Compound Indexes and Indexing sub documents
|
||||
-------------------------------------------
|
||||
@ -714,11 +708,16 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
||||
Shard keys
|
||||
==========
|
||||
|
||||
If your collection is sharded, then you need to specify the shard key as a tuple,
|
||||
using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`.
|
||||
This ensures that the shard key is sent with the query when calling the
|
||||
:meth:`~mongoengine.document.Document.save` or
|
||||
:meth:`~mongoengine.document.Document.update` method on an existing
|
||||
If your collection is sharded by multiple keys, then you can improve shard
|
||||
routing (and thus the performance of your application) by specifying the shard
|
||||
key, using the :attr:`shard_key` attribute of
|
||||
:attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple.
|
||||
|
||||
This ensures that the full shard key is sent with the query when calling
|
||||
methods such as :meth:`~mongoengine.document.Document.save`,
|
||||
:meth:`~mongoengine.document.Document.update`,
|
||||
:meth:`~mongoengine.document.Document.modify`, or
|
||||
:meth:`~mongoengine.document.Document.delete` on an existing
|
||||
:class:`~mongoengine.Document` instance::
|
||||
|
||||
class LogEntry(Document):
|
||||
@ -728,7 +727,8 @@ This ensures that the shard key is sent with the query when calling the
|
||||
data = StringField()
|
||||
|
||||
meta = {
|
||||
'shard_key': ('machine', 'timestamp',)
|
||||
'shard_key': ('machine', 'timestamp'),
|
||||
'indexes': ('machine', 'timestamp'),
|
||||
}
|
||||
|
||||
.. _document-inheritance:
|
||||
@ -738,7 +738,7 @@ Document inheritance
|
||||
|
||||
To create a specialised type of a :class:`~mongoengine.Document` you have
|
||||
defined, you may subclass it and add any extra fields or methods you may need.
|
||||
As this is new class is not a direct subclass of
|
||||
As this new class is not a direct subclass of
|
||||
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
||||
will use the same collection as its superclass uses. This allows for more
|
||||
convenient and efficient retrieval of related documents -- all you need do is
|
||||
@ -761,6 +761,27 @@ document.::
|
||||
Setting :attr:`allow_inheritance` to True should also be used in
|
||||
:class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it
|
||||
|
||||
When it comes to querying using :attr:`.objects()`, querying `Page.objects()` will query
|
||||
both `Page` and `DatedPage` whereas querying `DatedPage` will only query the `DatedPage` documents.
|
||||
Behind the scenes, MongoEngine deals with inheritance by adding a :attr:`_cls` attribute that contains
|
||||
the class name in every documents. When a document is loaded, MongoEngine checks
|
||||
it's :attr:`_cls` attribute and use that class to construct the instance.::
|
||||
|
||||
Page(title='a funky title').save()
|
||||
DatedPage(title='another title', date=datetime.utcnow()).save()
|
||||
|
||||
print(Page.objects().count()) # 2
|
||||
print(DatedPage.objects().count()) # 1
|
||||
|
||||
# print documents in their native form
|
||||
# we remove 'id' to avoid polluting the output with unnecessary detail
|
||||
qs = Page.objects.exclude('id').as_pymongo()
|
||||
print(list(qs))
|
||||
# [
|
||||
# {'_cls': u 'Page', 'title': 'a funky title'},
|
||||
# {'_cls': u 'Page.DatedPage', 'title': u 'another title', 'date': datetime.datetime(2019, 12, 13, 20, 16, 59, 993000)}
|
||||
# ]
|
||||
|
||||
Working with existing data
|
||||
--------------------------
|
||||
As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and
|
||||
|
@ -10,8 +10,9 @@ Writing
|
||||
GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field
|
||||
object. This field acts as a file-like object and provides a couple of
|
||||
different ways of inserting and retrieving data. Arbitrary metadata such as
|
||||
content type can also be stored alongside the files. In the following example,
|
||||
a document is created to store details about animals, including a photo::
|
||||
content type can also be stored alongside the files. The object returned when accessing a
|
||||
FileField is a proxy to `Pymongo's GridFS <https://api.mongodb.com/python/current/examples/gridfs.html#gridfs-example>`_
|
||||
In the following example, a document is created to store details about animals, including a photo::
|
||||
|
||||
class Animal(Document):
|
||||
genus = StringField()
|
||||
@ -20,8 +21,8 @@ a document is created to store details about animals, including a photo::
|
||||
|
||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||
|
||||
marmot_photo = open('marmot.jpg', 'rb')
|
||||
marmot.photo.put(marmot_photo, content_type = 'image/jpeg')
|
||||
with open('marmot.jpg', 'rb') as fd:
|
||||
marmot.photo.put(fd, content_type = 'image/jpeg')
|
||||
marmot.save()
|
||||
|
||||
Retrieval
|
||||
@ -34,6 +35,20 @@ field. The file can also be retrieved just as easily::
|
||||
photo = marmot.photo.read()
|
||||
content_type = marmot.photo.content_type
|
||||
|
||||
.. note:: If you need to read() the content of a file multiple times, you'll need to "rewind"
|
||||
the file-like object using `seek`::
|
||||
|
||||
marmot = Animal.objects(genus='Marmota').first()
|
||||
content1 = marmot.photo.read()
|
||||
assert content1 != ""
|
||||
|
||||
content2 = marmot.photo.read() # will be empty
|
||||
assert content2 == ""
|
||||
|
||||
marmot.photo.seek(0) # rewind the file by setting the current position of the cursor in the file to 0
|
||||
content3 = marmot.photo.read()
|
||||
assert content3 == content1
|
||||
|
||||
Streaming
|
||||
---------
|
||||
|
||||
|
@ -14,4 +14,5 @@ User Guide
|
||||
gridfs
|
||||
signals
|
||||
text-indexes
|
||||
logging-monitoring
|
||||
mongomock
|
||||
|
@ -12,7 +12,7 @@ MongoEngine is available on PyPI, so you can use :program:`pip`:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install mongoengine
|
||||
$ python -m pip install mongoengine
|
||||
|
||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||
|
80
docs/guide/logging-monitoring.rst
Normal file
80
docs/guide/logging-monitoring.rst
Normal file
@ -0,0 +1,80 @@
|
||||
==================
|
||||
Logging/Monitoring
|
||||
==================
|
||||
|
||||
It is possible to use `pymongo.monitoring <https://api.mongodb.com/python/current/api/pymongo/monitoring.html>`_ to monitor
|
||||
the driver events (e.g: queries, connections, etc). This can be handy if you want to monitor the queries issued by
|
||||
MongoEngine to the driver.
|
||||
|
||||
To use `pymongo.monitoring` with MongoEngine, you need to make sure that you are registering the listeners
|
||||
**before** establishing the database connection (i.e calling `connect`):
|
||||
|
||||
The following snippet provides a basic logging of all command events:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import logging
|
||||
from pymongo import monitoring
|
||||
from mongoengine import *
|
||||
|
||||
log = logging.getLogger()
|
||||
log.setLevel(logging.DEBUG)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
|
||||
class CommandLogger(monitoring.CommandListener):
|
||||
|
||||
def started(self, event):
|
||||
log.debug("Command {0.command_name} with request id "
|
||||
"{0.request_id} started on server "
|
||||
"{0.connection_id}".format(event))
|
||||
|
||||
def succeeded(self, event):
|
||||
log.debug("Command {0.command_name} with request id "
|
||||
"{0.request_id} on server {0.connection_id} "
|
||||
"succeeded in {0.duration_micros} "
|
||||
"microseconds".format(event))
|
||||
|
||||
def failed(self, event):
|
||||
log.debug("Command {0.command_name} with request id "
|
||||
"{0.request_id} on server {0.connection_id} "
|
||||
"failed in {0.duration_micros} "
|
||||
"microseconds".format(event))
|
||||
|
||||
monitoring.register(CommandLogger())
|
||||
|
||||
|
||||
class Jedi(Document):
|
||||
name = StringField()
|
||||
|
||||
|
||||
connect()
|
||||
|
||||
|
||||
log.info('GO!')
|
||||
|
||||
log.info('Saving an item through MongoEngine...')
|
||||
Jedi(name='Obi-Wan Kenobii').save()
|
||||
|
||||
log.info('Querying through MongoEngine...')
|
||||
obiwan = Jedi.objects.first()
|
||||
|
||||
log.info('Updating through MongoEngine...')
|
||||
obiwan.name = 'Obi-Wan Kenobi'
|
||||
obiwan.save()
|
||||
|
||||
|
||||
Executing this prints the following output::
|
||||
|
||||
INFO:root:GO!
|
||||
INFO:root:Saving an item through MongoEngine...
|
||||
DEBUG:root:Command insert with request id 1681692777 started on server ('localhost', 27017)
|
||||
DEBUG:root:Command insert with request id 1681692777 on server ('localhost', 27017) succeeded in 562 microseconds
|
||||
INFO:root:Querying through MongoEngine...
|
||||
DEBUG:root:Command find with request id 1714636915 started on server ('localhost', 27017)
|
||||
DEBUG:root:Command find with request id 1714636915 on server ('localhost', 27017) succeeded in 341 microseconds
|
||||
INFO:root:Updating through MongoEngine...
|
||||
DEBUG:root:Command update with request id 1957747793 started on server ('localhost', 27017)
|
||||
DEBUG:root:Command update with request id 1957747793 on server ('localhost', 27017) succeeded in 455 microseconds
|
||||
|
||||
More details can of course be obtained by checking the `event` argument from the `CommandListener`.
|
@ -2,10 +2,10 @@
|
||||
Use mongomock for testing
|
||||
==============================
|
||||
|
||||
`mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just
|
||||
`mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just
|
||||
what the name implies, mocking a mongo database.
|
||||
|
||||
To use with mongoengine, simply specify mongomock when connecting with
|
||||
To use with mongoengine, simply specify mongomock when connecting with
|
||||
mongoengine:
|
||||
|
||||
.. code-block:: python
|
||||
@ -21,7 +21,7 @@ or with an alias:
|
||||
conn = get_connection('testdb')
|
||||
|
||||
Example of test file:
|
||||
--------
|
||||
---------------------
|
||||
.. code-block:: python
|
||||
|
||||
import unittest
|
||||
@ -45,4 +45,4 @@ Example of test file:
|
||||
pers.save()
|
||||
|
||||
fresh_pers = Person.objects().first()
|
||||
self.assertEqual(fresh_pers.name, 'John')
|
||||
assert fresh_pers.name == 'John'
|
||||
|
@ -222,6 +222,18 @@ keyword argument::
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Sorting/Ordering results
|
||||
========================
|
||||
It is possible to order the results by 1 or more keys using :meth:`~mongoengine.queryset.QuerySet.order_by`.
|
||||
The order may be specified by prepending each of the keys by "+" or "-". Ascending order is assumed if there's no prefix.::
|
||||
|
||||
# Order by ascending date
|
||||
blogs = BlogPost.objects().order_by('date') # equivalent to .order_by('+date')
|
||||
|
||||
# Order by ascending date first, then descending title
|
||||
blogs = BlogPost.objects().order_by('+date', '-title')
|
||||
|
||||
|
||||
Limiting and skipping results
|
||||
=============================
|
||||
Just as with traditional ORMs, you may limit the number of results returned or
|
||||
@ -349,9 +361,9 @@ Just as with limiting and skipping results, there is a method on a
|
||||
You could technically use ``len(User.objects)`` to get the same result, but it
|
||||
would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`.
|
||||
When you execute a server-side count query, you let MongoDB do the heavy
|
||||
lifting and you receive a single integer over the wire. Meanwhile, len()
|
||||
lifting and you receive a single integer over the wire. Meanwhile, ``len()``
|
||||
retrieves all the results, places them in a local cache, and finally counts
|
||||
them. If we compare the performance of the two operations, len() is much slower
|
||||
them. If we compare the performance of the two operations, ``len()`` is much slower
|
||||
than :meth:`~mongoengine.queryset.QuerySet.count`.
|
||||
|
||||
Further aggregation
|
||||
@ -386,6 +398,25 @@ would be generating "tag-clouds"::
|
||||
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
||||
|
||||
|
||||
MongoDB aggregation API
|
||||
-----------------------
|
||||
If you need to run aggregation pipelines, MongoEngine provides an entry point to `Pymongo's aggregation framework <https://api.mongodb.com/python/current/examples/aggregation.html#aggregation-framework>`_
|
||||
through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline.
|
||||
An example of its use would be::
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
|
||||
Person(name='John').save()
|
||||
Person(name='Bob').save()
|
||||
|
||||
pipeline = [
|
||||
{"$sort" : {"name" : -1}},
|
||||
{"$project": {"_id": 0, "name": {"$toUpper": "$name"}}}
|
||||
]
|
||||
data = Person.objects().aggregate(pipeline)
|
||||
assert data == [{'name': 'BOB'}, {'name': 'JOHN'}]
|
||||
|
||||
Query efficiency and performance
|
||||
================================
|
||||
|
||||
@ -578,7 +609,7 @@ to push values with index::
|
||||
.. note::
|
||||
Currently only top level lists are handled, future versions of mongodb /
|
||||
pymongo plan to support nested positional operators. See `The $ positional
|
||||
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
||||
operator <https://docs.mongodb.com/manual/tutorial/update-documents/#Updating-The%24positionaloperator>`_.
|
||||
|
||||
Server-side javascript execution
|
||||
================================
|
||||
|
@ -44,8 +44,8 @@ Available signals include:
|
||||
|
||||
`post_save`
|
||||
Called within :meth:`~mongoengine.Document.save` after most actions
|
||||
(validation, insert/update, and cascades, but not clearing dirty flags) have
|
||||
completed successfully. Passed the additional boolean keyword argument
|
||||
(validation, insert/update, and cascades, but not clearing dirty flags) have
|
||||
completed successfully. Passed the additional boolean keyword argument
|
||||
`created` to indicate if the save was an insert or an update.
|
||||
|
||||
`pre_delete`
|
||||
|
@ -8,7 +8,7 @@ After MongoDB 2.4 version, supports search documents by text indexes.
|
||||
Defining a Document with text index
|
||||
===================================
|
||||
Use the *$* prefix to set a text index, Look the declaration::
|
||||
|
||||
|
||||
class News(Document):
|
||||
title = StringField()
|
||||
content = StringField()
|
||||
@ -35,10 +35,10 @@ Saving a document::
|
||||
content="Various improvements").save()
|
||||
|
||||
Next, start a text search using :attr:`QuerySet.search_text` method::
|
||||
|
||||
|
||||
document = News.objects.search_text('testing').first()
|
||||
document.title # may be: "Using mongodb text search"
|
||||
|
||||
|
||||
document = News.objects.search_text('released').first()
|
||||
document.title # may be: "MongoEngine 0.9 released"
|
||||
|
||||
|
@ -7,7 +7,7 @@ MongoDB. To install it, simply run
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install -U mongoengine
|
||||
$ python -m pip install -U mongoengine
|
||||
|
||||
:doc:`tutorial`
|
||||
A quick tutorial building a tumblelog to get you up and running with
|
||||
@ -23,9 +23,18 @@ MongoDB. To install it, simply run
|
||||
:doc:`upgrade`
|
||||
How to upgrade MongoEngine.
|
||||
|
||||
:doc:`faq`
|
||||
Frequently Asked Questions
|
||||
|
||||
:doc:`django`
|
||||
Using MongoEngine and Django
|
||||
|
||||
MongoDB and driver support
|
||||
--------------------------
|
||||
|
||||
MongoEngine is based on the PyMongo driver and tested against multiple versions of MongoDB.
|
||||
For further details, please refer to the `readme <https://github.com/MongoEngine/mongoengine#mongoengine>`_.
|
||||
|
||||
Community
|
||||
---------
|
||||
|
||||
@ -73,6 +82,7 @@ formats for offline reading.
|
||||
apireference
|
||||
changelog
|
||||
upgrade
|
||||
faq
|
||||
django
|
||||
|
||||
Indices and tables
|
||||
@ -81,4 +91,3 @@ Indices and tables
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
||||
|
@ -18,7 +18,7 @@ location --- running it locally will be easier, but if that is not an option
|
||||
then it may be run on a remote server. If you haven't installed MongoEngine,
|
||||
simply use pip to install it like so::
|
||||
|
||||
$ pip install mongoengine
|
||||
$ python -m pip install mongoengine
|
||||
|
||||
Before we can start using MongoEngine, we need to tell it how to connect to our
|
||||
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
||||
|
@ -52,7 +52,7 @@ rename its occurrences.
|
||||
This release includes a major rehaul of MongoEngine's code quality and
|
||||
introduces a few breaking changes. It also touches many different parts of
|
||||
the package and although all the changes have been tested and scrutinized,
|
||||
you're encouraged to thorougly test the upgrade.
|
||||
you're encouraged to thoroughly test the upgrade.
|
||||
|
||||
First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`.
|
||||
If you import or catch this exception, you'll need to rename it in your code.
|
||||
@ -85,10 +85,10 @@ by default from now on.
|
||||
|
||||
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
|
||||
|
||||
pip uninstall pymongo
|
||||
pip uninstall mongoengine
|
||||
pip install pymongo==2.8
|
||||
pip install mongoengine
|
||||
python -m pip uninstall pymongo
|
||||
python -m pip uninstall mongoengine
|
||||
python -m pip install pymongo==2.8
|
||||
python -m pip install mongoengine
|
||||
|
||||
0.8.7
|
||||
*****
|
||||
@ -153,7 +153,7 @@ inherited classes like so: ::
|
||||
|
||||
# 4. Remove indexes
|
||||
info = collection.index_information()
|
||||
indexes_to_drop = [key for key, value in info.iteritems()
|
||||
indexes_to_drop = [key for key, value in info.items()
|
||||
if '_types' in dict(value['key'])]
|
||||
for index in indexes_to_drop:
|
||||
collection.drop_index(index)
|
||||
|
@ -18,12 +18,17 @@ from mongoengine.queryset import *
|
||||
from mongoengine.signals import *
|
||||
|
||||
|
||||
__all__ = (list(document.__all__) + list(fields.__all__) +
|
||||
list(connection.__all__) + list(queryset.__all__) +
|
||||
list(signals.__all__) + list(errors.__all__))
|
||||
__all__ = (
|
||||
list(document.__all__)
|
||||
+ list(fields.__all__)
|
||||
+ list(connection.__all__)
|
||||
+ list(queryset.__all__)
|
||||
+ list(signals.__all__)
|
||||
+ list(errors.__all__)
|
||||
)
|
||||
|
||||
|
||||
VERSION = (0, 18, 1)
|
||||
VERSION = (0, 20, 0)
|
||||
|
||||
|
||||
def get_version():
|
||||
@ -31,7 +36,7 @@ def get_version():
|
||||
|
||||
For example, if `VERSION == (0, 10, 7)`, return '0.10.7'.
|
||||
"""
|
||||
return '.'.join(map(str, VERSION))
|
||||
return ".".join(map(str, VERSION))
|
||||
|
||||
|
||||
__version__ = get_version()
|
||||
|
@ -12,17 +12,22 @@ from mongoengine.base.metaclasses import *
|
||||
|
||||
__all__ = (
|
||||
# common
|
||||
'UPDATE_OPERATORS', '_document_registry', 'get_document',
|
||||
|
||||
"UPDATE_OPERATORS",
|
||||
"_document_registry",
|
||||
"get_document",
|
||||
# datastructures
|
||||
'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference',
|
||||
|
||||
"BaseDict",
|
||||
"BaseList",
|
||||
"EmbeddedDocumentList",
|
||||
"LazyReference",
|
||||
# document
|
||||
'BaseDocument',
|
||||
|
||||
"BaseDocument",
|
||||
# fields
|
||||
'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField',
|
||||
|
||||
"BaseField",
|
||||
"ComplexBaseField",
|
||||
"ObjectIdField",
|
||||
"GeoJsonBaseField",
|
||||
# metaclasses
|
||||
'DocumentMetaclass', 'TopLevelDocumentMetaclass'
|
||||
"DocumentMetaclass",
|
||||
"TopLevelDocumentMetaclass",
|
||||
)
|
||||
|
@ -1,12 +1,25 @@
|
||||
from mongoengine.errors import NotRegistered
|
||||
|
||||
__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
|
||||
__all__ = ("UPDATE_OPERATORS", "get_document", "_document_registry")
|
||||
|
||||
|
||||
UPDATE_OPERATORS = {'set', 'unset', 'inc', 'dec', 'mul',
|
||||
'pop', 'push', 'push_all', 'pull',
|
||||
'pull_all', 'add_to_set', 'set_on_insert',
|
||||
'min', 'max', 'rename'}
|
||||
UPDATE_OPERATORS = {
|
||||
"set",
|
||||
"unset",
|
||||
"inc",
|
||||
"dec",
|
||||
"mul",
|
||||
"pop",
|
||||
"push",
|
||||
"push_all",
|
||||
"pull",
|
||||
"pull_all",
|
||||
"add_to_set",
|
||||
"set_on_insert",
|
||||
"min",
|
||||
"max",
|
||||
"rename",
|
||||
}
|
||||
|
||||
|
||||
_document_registry = {}
|
||||
@ -17,25 +30,33 @@ def get_document(name):
|
||||
doc = _document_registry.get(name, None)
|
||||
if not doc:
|
||||
# Possible old style name
|
||||
single_end = name.split('.')[-1]
|
||||
compound_end = '.%s' % single_end
|
||||
possible_match = [k for k in _document_registry
|
||||
if k.endswith(compound_end) or k == single_end]
|
||||
single_end = name.split(".")[-1]
|
||||
compound_end = ".%s" % single_end
|
||||
possible_match = [
|
||||
k for k in _document_registry if k.endswith(compound_end) or k == single_end
|
||||
]
|
||||
if len(possible_match) == 1:
|
||||
doc = _document_registry.get(possible_match.pop(), None)
|
||||
if not doc:
|
||||
raise NotRegistered("""
|
||||
raise NotRegistered(
|
||||
"""
|
||||
`%s` has not been registered in the document registry.
|
||||
Importing the document class automatically registers it, has it
|
||||
been imported?
|
||||
""".strip() % name)
|
||||
""".strip()
|
||||
% name
|
||||
)
|
||||
return doc
|
||||
|
||||
|
||||
def _get_documents_by_db(connection_alias, default_connection_alias):
|
||||
"""Get all registered Documents class attached to a given database"""
|
||||
def get_doc_alias(doc_cls):
|
||||
return doc_cls._meta.get('db_alias', default_connection_alias)
|
||||
|
||||
return [doc_cls for doc_cls in _document_registry.values()
|
||||
if get_doc_alias(doc_cls) == connection_alias]
|
||||
def get_doc_alias(doc_cls):
|
||||
return doc_cls._meta.get("db_alias", default_connection_alias)
|
||||
|
||||
return [
|
||||
doc_cls
|
||||
for doc_cls in _document_registry.values()
|
||||
if get_doc_alias(doc_cls) == connection_alias
|
||||
]
|
||||
|
@ -1,30 +1,40 @@
|
||||
import weakref
|
||||
|
||||
from bson import DBRef
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||
|
||||
__all__ = ('BaseDict', 'StrictDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference')
|
||||
__all__ = (
|
||||
"BaseDict",
|
||||
"StrictDict",
|
||||
"BaseList",
|
||||
"EmbeddedDocumentList",
|
||||
"LazyReference",
|
||||
)
|
||||
|
||||
|
||||
def mark_as_changed_wrapper(parent_method):
|
||||
"""Decorators that ensures _mark_as_changed method gets called"""
|
||||
"""Decorator that ensures _mark_as_changed method gets called."""
|
||||
|
||||
def wrapper(self, *args, **kwargs):
|
||||
result = parent_method(self, *args, **kwargs) # Can't use super() in the decorator
|
||||
# Can't use super() in the decorator.
|
||||
result = parent_method(self, *args, **kwargs)
|
||||
self._mark_as_changed()
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def mark_key_as_changed_wrapper(parent_method):
|
||||
"""Decorators that ensures _mark_as_changed method gets called with the key argument"""
|
||||
"""Decorator that ensures _mark_as_changed method gets called with the key argument"""
|
||||
|
||||
def wrapper(self, key, *args, **kwargs):
|
||||
result = parent_method(self, key, *args, **kwargs) # Can't use super() in the decorator
|
||||
# Can't use super() in the decorator.
|
||||
result = parent_method(self, key, *args, **kwargs)
|
||||
self._mark_as_changed(key)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@ -36,12 +46,12 @@ class BaseDict(dict):
|
||||
_name = None
|
||||
|
||||
def __init__(self, dict_items, instance, name):
|
||||
BaseDocument = _import_class('BaseDocument')
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
|
||||
if isinstance(instance, BaseDocument):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
super(BaseDict, self).__init__(dict_items)
|
||||
super().__init__(dict_items)
|
||||
|
||||
def get(self, key, default=None):
|
||||
# get does not use __getitem__ by default so we must override it as well
|
||||
@ -51,18 +61,18 @@ class BaseDict(dict):
|
||||
return default
|
||||
|
||||
def __getitem__(self, key):
|
||||
value = super(BaseDict, self).__getitem__(key)
|
||||
value = super().__getitem__(key)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
value = BaseDict(value, None, "{}.{}".format(self._name, key))
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
value = BaseList(value, None, "{}.{}".format(self._name, key))
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
@ -85,9 +95,9 @@ class BaseDict(dict):
|
||||
setdefault = mark_as_changed_wrapper(dict.setdefault)
|
||||
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, '_mark_as_changed'):
|
||||
if hasattr(self._instance, "_mark_as_changed"):
|
||||
if key:
|
||||
self._instance._mark_as_changed('%s.%s' % (self._name, key))
|
||||
self._instance._mark_as_changed("{}.{}".format(self._name, key))
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
@ -100,39 +110,41 @@ class BaseList(list):
|
||||
_name = None
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
BaseDocument = _import_class('BaseDocument')
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
|
||||
if isinstance(instance, BaseDocument):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
super(BaseList, self).__init__(list_items)
|
||||
super().__init__(list_items)
|
||||
|
||||
def __getitem__(self, key):
|
||||
value = super(BaseList, self).__getitem__(key)
|
||||
# change index to positive value because MongoDB does not support negative one
|
||||
if isinstance(key, int) and key < 0:
|
||||
key = len(self) + key
|
||||
value = super().__getitem__(key)
|
||||
|
||||
if isinstance(key, slice):
|
||||
# When receiving a slice operator, we don't convert the structure and bind
|
||||
# to parent's instance. This is buggy for now but would require more work to be handled properly
|
||||
return value
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
# Replace dict by BaseDict
|
||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
value = BaseDict(value, None, "{}.{}".format(self._name, key))
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
# Replace list by BaseList
|
||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
value = BaseList(value, None, "{}.{}".format(self._name, key))
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __iter__(self):
|
||||
for v in super(BaseList, self).__iter__():
|
||||
yield v
|
||||
yield from super().__iter__()
|
||||
|
||||
def __getstate__(self):
|
||||
self.instance = None
|
||||
@ -150,7 +162,7 @@ class BaseList(list):
|
||||
# instead, we simply marks the whole list as changed
|
||||
changed_key = None
|
||||
|
||||
result = super(BaseList, self).__setitem__(key, value)
|
||||
result = super().__setitem__(key, value)
|
||||
self._mark_as_changed(changed_key)
|
||||
return result
|
||||
|
||||
@ -165,33 +177,19 @@ class BaseList(list):
|
||||
__iadd__ = mark_as_changed_wrapper(list.__iadd__)
|
||||
__imul__ = mark_as_changed_wrapper(list.__imul__)
|
||||
|
||||
if six.PY2:
|
||||
# Under py3 __setslice__, __delslice__ and __getslice__
|
||||
# are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter
|
||||
# so we mimic this under python 2
|
||||
def __setslice__(self, i, j, sequence):
|
||||
return self.__setitem__(slice(i, j), sequence)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
return self.__delitem__(slice(i, j))
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
return self.__getitem__(slice(i, j))
|
||||
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, '_mark_as_changed'):
|
||||
if key:
|
||||
if hasattr(self._instance, "_mark_as_changed"):
|
||||
if key is not None:
|
||||
self._instance._mark_as_changed(
|
||||
'%s.%s' % (self._name, key % len(self))
|
||||
"{}.{}".format(self._name, key % len(self))
|
||||
)
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
|
||||
class EmbeddedDocumentList(BaseList):
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||
super().__init__(list_items, instance, name)
|
||||
self._instance = instance
|
||||
|
||||
@classmethod
|
||||
@ -201,7 +199,7 @@ class EmbeddedDocumentList(BaseList):
|
||||
"""
|
||||
for key, expected_value in kwargs.items():
|
||||
doc_val = getattr(embedded_doc, key)
|
||||
if doc_val != expected_value and six.text_type(doc_val) != expected_value:
|
||||
if doc_val != expected_value and str(doc_val) != expected_value:
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -274,12 +272,10 @@ class EmbeddedDocumentList(BaseList):
|
||||
"""
|
||||
values = self.__only_matches(self, kwargs)
|
||||
if len(values) == 0:
|
||||
raise DoesNotExist(
|
||||
'%s matching query does not exist.' % self._name
|
||||
)
|
||||
raise DoesNotExist("%s matching query does not exist." % self._name)
|
||||
elif len(values) > 1:
|
||||
raise MultipleObjectsReturned(
|
||||
'%d items returned, instead of 1' % len(values)
|
||||
"%d items returned, instead of 1" % len(values)
|
||||
)
|
||||
|
||||
return values[0]
|
||||
@ -293,11 +289,11 @@ class EmbeddedDocumentList(BaseList):
|
||||
|
||||
def create(self, **values):
|
||||
"""
|
||||
Creates a new embedded document and saves it to the database.
|
||||
Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList.
|
||||
|
||||
.. note::
|
||||
The embedded document changes are not automatically saved
|
||||
to the database after calling this method.
|
||||
the instance of the EmbeddedDocument is not automatically saved to the database.
|
||||
You still need to call .save() on the parent Document.
|
||||
|
||||
:param values: A dictionary of values for the embedded document.
|
||||
:return: The new embedded document instance.
|
||||
@ -358,24 +354,24 @@ class EmbeddedDocumentList(BaseList):
|
||||
return len(values)
|
||||
|
||||
|
||||
class StrictDict(object):
|
||||
class StrictDict:
|
||||
__slots__ = ()
|
||||
_special_fields = {'get', 'pop', 'iteritems', 'items', 'keys', 'create'}
|
||||
_special_fields = {"get", "pop", "iteritems", "items", "keys", "create"}
|
||||
_classes = {}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for k, v in iteritems(kwargs):
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
def __getitem__(self, key):
|
||||
key = '_reserved_' + key if key in self._special_fields else key
|
||||
key = "_reserved_" + key if key in self._special_fields else key
|
||||
try:
|
||||
return getattr(self, key)
|
||||
except AttributeError:
|
||||
raise KeyError(key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
key = '_reserved_' + key if key in self._special_fields else key
|
||||
key = "_reserved_" + key if key in self._special_fields else key
|
||||
return setattr(self, key, value)
|
||||
|
||||
def __contains__(self, key):
|
||||
@ -412,37 +408,42 @@ class StrictDict(object):
|
||||
return (key for key in self.__slots__ if hasattr(self, key))
|
||||
|
||||
def __len__(self):
|
||||
return len(list(iteritems(self)))
|
||||
return len(list(self.items()))
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.items() == other.items()
|
||||
return list(self.items()) == list(other.items())
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.items() != other.items()
|
||||
return not (self == other)
|
||||
|
||||
@classmethod
|
||||
def create(cls, allowed_keys):
|
||||
allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys)
|
||||
allowed_keys_tuple = tuple(
|
||||
("_reserved_" + k if k in cls._special_fields else k) for k in allowed_keys
|
||||
)
|
||||
allowed_keys = frozenset(allowed_keys_tuple)
|
||||
if allowed_keys not in cls._classes:
|
||||
|
||||
class SpecificStrictDict(cls):
|
||||
__slots__ = allowed_keys_tuple
|
||||
|
||||
def __repr__(self):
|
||||
return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items())
|
||||
return "{%s}" % ", ".join(
|
||||
'"{!s}": {!r}'.format(k, v) for k, v in self.items()
|
||||
)
|
||||
|
||||
cls._classes[allowed_keys] = SpecificStrictDict
|
||||
return cls._classes[allowed_keys]
|
||||
|
||||
|
||||
class LazyReference(DBRef):
|
||||
__slots__ = ('_cached_doc', 'passthrough', 'document_type')
|
||||
__slots__ = ("_cached_doc", "passthrough", "document_type")
|
||||
|
||||
def fetch(self, force=False):
|
||||
if not self._cached_doc or force:
|
||||
self._cached_doc = self.document_type.objects.get(pk=self.pk)
|
||||
if not self._cached_doc:
|
||||
raise DoesNotExist('Trying to dereference unknown document %s' % (self))
|
||||
raise DoesNotExist("Trying to dereference unknown document %s" % (self))
|
||||
return self._cached_doc
|
||||
|
||||
@property
|
||||
@ -453,7 +454,7 @@ class LazyReference(DBRef):
|
||||
self.document_type = document_type
|
||||
self._cached_doc = cached_doc
|
||||
self.passthrough = passthrough
|
||||
super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk)
|
||||
super().__init__(self.document_type._get_collection_name(), pk)
|
||||
|
||||
def __getitem__(self, name):
|
||||
if not self.passthrough:
|
||||
@ -462,7 +463,7 @@ class LazyReference(DBRef):
|
||||
return document[name]
|
||||
|
||||
def __getattr__(self, name):
|
||||
if not object.__getattribute__(self, 'passthrough'):
|
||||
if not object.__getattribute__(self, "passthrough"):
|
||||
raise AttributeError()
|
||||
document = self.fetch()
|
||||
try:
|
||||
@ -471,4 +472,4 @@ class LazyReference(DBRef):
|
||||
raise AttributeError()
|
||||
|
||||
def __repr__(self):
|
||||
return "<LazyReference(%s, %r)>" % (self.document_type, self.pk)
|
||||
return "<LazyReference({}, {!r})>".format(self.document_type, self.pk)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -4,25 +4,22 @@ import weakref
|
||||
|
||||
from bson import DBRef, ObjectId, SON
|
||||
import pymongo
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.base.common import UPDATE_OPERATORS
|
||||
from mongoengine.base.datastructures import (BaseDict, BaseList,
|
||||
EmbeddedDocumentList)
|
||||
from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import DeprecatedError, ValidationError
|
||||
|
||||
__all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField',
|
||||
'GeoJsonBaseField')
|
||||
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
class BaseField:
|
||||
"""A base class for fields in a MongoDB document. Instances of this class
|
||||
may be added to subclasses of `Document` to define a document's schema.
|
||||
|
||||
.. versionchanged:: 0.5 - added verbose and help text
|
||||
"""
|
||||
|
||||
name = None
|
||||
_geo_index = False
|
||||
_auto_gen = False # Call `generate` to generate a value
|
||||
@ -34,14 +31,23 @@ class BaseField(object):
|
||||
creation_counter = 0
|
||||
auto_creation_counter = -1
|
||||
|
||||
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||
unique=False, unique_with=None, primary_key=False,
|
||||
validation=None, choices=None, null=False, sparse=False,
|
||||
**kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
db_field=None,
|
||||
required=False,
|
||||
default=None,
|
||||
unique=False,
|
||||
unique_with=None,
|
||||
primary_key=False,
|
||||
validation=None,
|
||||
choices=None,
|
||||
null=False,
|
||||
sparse=False,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
:param db_field: The database field to store this field in
|
||||
(defaults to the name of the field)
|
||||
:param name: Deprecated - use db_field
|
||||
:param required: If the field is required. Whether it has to have a
|
||||
value or not. Defaults to False.
|
||||
:param default: (optional) The default value for this field if no value
|
||||
@ -65,11 +71,8 @@ class BaseField(object):
|
||||
existing attributes. Common metadata includes `verbose_name` and
|
||||
`help_text`.
|
||||
"""
|
||||
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||
self.db_field = db_field if not primary_key else "_id"
|
||||
|
||||
if name:
|
||||
msg = 'Field\'s "name" attribute deprecated in favour of "db_field"'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self.required = required or primary_key
|
||||
self.default = default
|
||||
self.unique = bool(unique or unique_with)
|
||||
@ -82,17 +85,14 @@ class BaseField(object):
|
||||
self._owner_document = None
|
||||
|
||||
# Make sure db_field is a string (if it's explicitly defined).
|
||||
if (
|
||||
self.db_field is not None and
|
||||
not isinstance(self.db_field, six.string_types)
|
||||
):
|
||||
raise TypeError('db_field should be a string.')
|
||||
if self.db_field is not None and not isinstance(self.db_field, str):
|
||||
raise TypeError("db_field should be a string.")
|
||||
|
||||
# Make sure db_field doesn't contain any forbidden characters.
|
||||
if isinstance(self.db_field, six.string_types) and (
|
||||
'.' in self.db_field or
|
||||
'\0' in self.db_field or
|
||||
self.db_field.startswith('$')
|
||||
if isinstance(self.db_field, str) and (
|
||||
"." in self.db_field
|
||||
or "\0" in self.db_field
|
||||
or self.db_field.startswith("$")
|
||||
):
|
||||
raise ValueError(
|
||||
'field names cannot contain dots (".") or null characters '
|
||||
@ -102,15 +102,17 @@ class BaseField(object):
|
||||
# Detect and report conflicts between metadata and base properties.
|
||||
conflicts = set(dir(self)) & set(kwargs)
|
||||
if conflicts:
|
||||
raise TypeError('%s already has attribute(s): %s' % (
|
||||
self.__class__.__name__, ', '.join(conflicts)))
|
||||
raise TypeError(
|
||||
"%s already has attribute(s): %s"
|
||||
% (self.__class__.__name__, ", ".join(conflicts))
|
||||
)
|
||||
|
||||
# Assign metadata to the instance
|
||||
# This efficient method is available because no __slots__ are defined.
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
# Adjust the appropriate creation counter, and save our local copy.
|
||||
if self.db_field == '_id':
|
||||
if self.db_field == "_id":
|
||||
self.creation_counter = BaseField.auto_creation_counter
|
||||
BaseField.auto_creation_counter -= 1
|
||||
else:
|
||||
@ -128,10 +130,9 @@ class BaseField(object):
|
||||
return instance._data.get(self.name)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
"""Descriptor for assigning a value to a field in a document.
|
||||
"""
|
||||
# If setting to None and there is a default
|
||||
# Then set the value to the default value
|
||||
"""Descriptor for assigning a value to a field in a document."""
|
||||
# If setting to None and there is a default value provided for this
|
||||
# field, then set the value to the default value.
|
||||
if value is None:
|
||||
if self.null:
|
||||
value = None
|
||||
@ -142,24 +143,29 @@ class BaseField(object):
|
||||
|
||||
if instance._initialised:
|
||||
try:
|
||||
if (self.name not in instance._data or
|
||||
instance._data[self.name] != value):
|
||||
value_has_changed = (
|
||||
self.name not in instance._data
|
||||
or instance._data[self.name] != value
|
||||
)
|
||||
if value_has_changed:
|
||||
instance._mark_as_changed(self.name)
|
||||
except Exception:
|
||||
# Values cant be compared eg: naive and tz datetimes
|
||||
# So mark it as changed
|
||||
# Some values can't be compared and throw an error when we
|
||||
# attempt to do so (e.g. tz-naive and tz-aware datetimes).
|
||||
# Mark the field as changed in such cases.
|
||||
instance._mark_as_changed(self.name)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
if isinstance(value, EmbeddedDocument):
|
||||
value._instance = weakref.proxy(instance)
|
||||
elif isinstance(value, (list, tuple)):
|
||||
for v in value:
|
||||
if isinstance(v, EmbeddedDocument):
|
||||
v._instance = weakref.proxy(instance)
|
||||
|
||||
instance._data[self.name] = value
|
||||
|
||||
def error(self, message='', errors=None, field_name=None):
|
||||
def error(self, message="", errors=None, field_name=None):
|
||||
"""Raise a ValidationError."""
|
||||
field_name = field_name if field_name else self.name
|
||||
raise ValidationError(message, errors=errors, field_name=field_name)
|
||||
@ -176,11 +182,11 @@ class BaseField(object):
|
||||
"""Helper method to call to_mongo with proper inputs."""
|
||||
f_inputs = self.to_mongo.__code__.co_varnames
|
||||
ex_vars = {}
|
||||
if 'fields' in f_inputs:
|
||||
ex_vars['fields'] = fields
|
||||
if "fields" in f_inputs:
|
||||
ex_vars["fields"] = fields
|
||||
|
||||
if 'use_db_field' in f_inputs:
|
||||
ex_vars['use_db_field'] = use_db_field
|
||||
if "use_db_field" in f_inputs:
|
||||
ex_vars["use_db_field"] = use_db_field
|
||||
|
||||
return self.to_mongo(value, **ex_vars)
|
||||
|
||||
@ -195,8 +201,8 @@ class BaseField(object):
|
||||
pass
|
||||
|
||||
def _validate_choices(self, value):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
Document = _import_class("Document")
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
|
||||
choice_list = self.choices
|
||||
if isinstance(next(iter(choice_list)), (list, tuple)):
|
||||
@ -206,16 +212,12 @@ class BaseField(object):
|
||||
# Choices which are other types of Documents
|
||||
if isinstance(value, (Document, EmbeddedDocument)):
|
||||
if not any(isinstance(value, c) for c in choice_list):
|
||||
self.error(
|
||||
'Value must be an instance of %s' % (
|
||||
six.text_type(choice_list)
|
||||
)
|
||||
)
|
||||
self.error("Value must be an instance of %s" % (choice_list))
|
||||
# Choices which are types other than Documents
|
||||
else:
|
||||
values = value if isinstance(value, (list, tuple)) else [value]
|
||||
if len(set(values) - set(choice_list)):
|
||||
self.error('Value must be one of %s' % six.text_type(choice_list))
|
||||
self.error("Value must be one of %s" % str(choice_list))
|
||||
|
||||
def _validate(self, value, **kwargs):
|
||||
# Check the Choices Constraint
|
||||
@ -231,13 +233,17 @@ class BaseField(object):
|
||||
# in favor of having validation raising a ValidationError
|
||||
ret = self.validation(value)
|
||||
if ret is not None:
|
||||
raise DeprecatedError('validation argument for `%s` must not return anything, '
|
||||
'it should raise a ValidationError if validation fails' % self.name)
|
||||
raise DeprecatedError(
|
||||
"validation argument for `%s` must not return anything, "
|
||||
"it should raise a ValidationError if validation fails"
|
||||
% self.name
|
||||
)
|
||||
except ValidationError as ex:
|
||||
self.error(str(ex))
|
||||
else:
|
||||
raise ValueError('validation argument for `"%s"` must be a '
|
||||
'callable.' % self.name)
|
||||
raise ValueError(
|
||||
'validation argument for `"%s"` must be a ' "callable." % self.name
|
||||
)
|
||||
|
||||
self.validate(value, **kwargs)
|
||||
|
||||
@ -271,35 +277,41 @@ class ComplexBaseField(BaseField):
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
||||
|
||||
auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
|
||||
dereference = (auto_dereference and
|
||||
(self.field is None or isinstance(self.field,
|
||||
(GenericReferenceField, ReferenceField))))
|
||||
dereference = auto_dereference and (
|
||||
self.field is None
|
||||
or isinstance(self.field, (GenericReferenceField, ReferenceField))
|
||||
)
|
||||
|
||||
_dereference = _import_class('DeReference')()
|
||||
_dereference = _import_class("DeReference")()
|
||||
|
||||
if (instance._initialised and
|
||||
dereference and
|
||||
instance._data.get(self.name) and
|
||||
not getattr(instance._data[self.name], '_dereferenced', False)):
|
||||
if (
|
||||
instance._initialised
|
||||
and dereference
|
||||
and instance._data.get(self.name)
|
||||
and not getattr(instance._data[self.name], "_dereferenced", False)
|
||||
):
|
||||
instance._data[self.name] = _dereference(
|
||||
instance._data.get(self.name), max_depth=1, instance=instance,
|
||||
name=self.name
|
||||
instance._data.get(self.name),
|
||||
max_depth=1,
|
||||
instance=instance,
|
||||
name=self.name,
|
||||
)
|
||||
if hasattr(instance._data[self.name], '_dereferenced'):
|
||||
if hasattr(instance._data[self.name], "_dereferenced"):
|
||||
instance._data[self.name]._dereferenced = True
|
||||
|
||||
value = super(ComplexBaseField, self).__get__(instance, owner)
|
||||
value = super().__get__(instance, owner)
|
||||
|
||||
# Convert lists / values so we can watch for any changes on them
|
||||
if isinstance(value, (list, tuple)):
|
||||
if (issubclass(type(self), EmbeddedDocumentListField) and
|
||||
not isinstance(value, EmbeddedDocumentList)):
|
||||
if issubclass(type(self), EmbeddedDocumentListField) and not isinstance(
|
||||
value, EmbeddedDocumentList
|
||||
):
|
||||
value = EmbeddedDocumentList(value, instance, self.name)
|
||||
elif not isinstance(value, BaseList):
|
||||
value = BaseList(value, instance, self.name)
|
||||
@ -308,12 +320,13 @@ class ComplexBaseField(BaseField):
|
||||
value = BaseDict(value, instance, self.name)
|
||||
instance._data[self.name] = value
|
||||
|
||||
if (auto_dereference and instance._initialised and
|
||||
isinstance(value, (BaseList, BaseDict)) and
|
||||
not value._dereferenced):
|
||||
value = _dereference(
|
||||
value, max_depth=1, instance=instance, name=self.name
|
||||
)
|
||||
if (
|
||||
auto_dereference
|
||||
and instance._initialised
|
||||
and isinstance(value, (BaseList, BaseDict))
|
||||
and not value._dereferenced
|
||||
):
|
||||
value = _dereference(value, max_depth=1, instance=instance, name=self.name)
|
||||
value._dereferenced = True
|
||||
instance._data[self.name] = value
|
||||
|
||||
@ -321,19 +334,19 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert a MongoDB-compatible type to a Python type."""
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_python'):
|
||||
if hasattr(value, "to_python"):
|
||||
return value.to_python()
|
||||
|
||||
BaseDocument = _import_class('BaseDocument')
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
if isinstance(value, BaseDocument):
|
||||
# Something is wrong, return the value as it is
|
||||
return value
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
if not hasattr(value, "items"):
|
||||
try:
|
||||
is_list = True
|
||||
value = {idx: v for idx, v in enumerate(value)}
|
||||
@ -342,50 +355,54 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
if self.field:
|
||||
self.field._auto_dereference = self._auto_dereference
|
||||
value_dict = {key: self.field.to_python(item)
|
||||
for key, item in value.items()}
|
||||
value_dict = {
|
||||
key: self.field.to_python(item) for key, item in value.items()
|
||||
}
|
||||
else:
|
||||
Document = _import_class('Document')
|
||||
Document = _import_class("Document")
|
||||
value_dict = {}
|
||||
for k, v in value.items():
|
||||
if isinstance(v, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if v.pk is None:
|
||||
self.error('You can only reference documents once they'
|
||||
' have been saved to the database')
|
||||
self.error(
|
||||
"You can only reference documents once they"
|
||||
" have been saved to the database"
|
||||
)
|
||||
collection = v._get_collection_name()
|
||||
value_dict[k] = DBRef(collection, v.pk)
|
||||
elif hasattr(v, 'to_python'):
|
||||
elif hasattr(v, "to_python"):
|
||||
value_dict[k] = v.to_python()
|
||||
else:
|
||||
value_dict[k] = self.to_python(v)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
return [v for _, v in sorted(value_dict.items(),
|
||||
key=operator.itemgetter(0))]
|
||||
return [
|
||||
v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0))
|
||||
]
|
||||
return value_dict
|
||||
|
||||
def to_mongo(self, value, use_db_field=True, fields=None):
|
||||
"""Convert a Python type to a MongoDB-compatible type."""
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
Document = _import_class("Document")
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_mongo'):
|
||||
if hasattr(value, "to_mongo"):
|
||||
if isinstance(value, Document):
|
||||
return GenericReferenceField().to_mongo(value)
|
||||
cls = value.__class__
|
||||
val = value.to_mongo(use_db_field, fields)
|
||||
# If it's a document that is not inherited add _cls
|
||||
if isinstance(value, EmbeddedDocument):
|
||||
val['_cls'] = cls.__name__
|
||||
val["_cls"] = cls.__name__
|
||||
return val
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
if not hasattr(value, "items"):
|
||||
try:
|
||||
is_list = True
|
||||
value = {k: v for k, v in enumerate(value)}
|
||||
@ -395,48 +412,51 @@ class ComplexBaseField(BaseField):
|
||||
if self.field:
|
||||
value_dict = {
|
||||
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
||||
for key, item in iteritems(value)
|
||||
for key, item in value.items()
|
||||
}
|
||||
else:
|
||||
value_dict = {}
|
||||
for k, v in iteritems(value):
|
||||
for k, v in value.items():
|
||||
if isinstance(v, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if v.pk is None:
|
||||
self.error('You can only reference documents once they'
|
||||
' have been saved to the database')
|
||||
self.error(
|
||||
"You can only reference documents once they"
|
||||
" have been saved to the database"
|
||||
)
|
||||
|
||||
# If its a document that is not inheritable it won't have
|
||||
# any _cls data so make it a generic reference allows
|
||||
# us to dereference
|
||||
meta = getattr(v, '_meta', {})
|
||||
allow_inheritance = meta.get('allow_inheritance')
|
||||
meta = getattr(v, "_meta", {})
|
||||
allow_inheritance = meta.get("allow_inheritance")
|
||||
if not allow_inheritance and not self.field:
|
||||
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||
else:
|
||||
collection = v._get_collection_name()
|
||||
value_dict[k] = DBRef(collection, v.pk)
|
||||
elif hasattr(v, 'to_mongo'):
|
||||
elif hasattr(v, "to_mongo"):
|
||||
cls = v.__class__
|
||||
val = v.to_mongo(use_db_field, fields)
|
||||
# If it's a document that is not inherited add _cls
|
||||
if isinstance(v, (Document, EmbeddedDocument)):
|
||||
val['_cls'] = cls.__name__
|
||||
val["_cls"] = cls.__name__
|
||||
value_dict[k] = val
|
||||
else:
|
||||
value_dict[k] = self.to_mongo(v, use_db_field, fields)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
return [v for _, v in sorted(value_dict.items(),
|
||||
key=operator.itemgetter(0))]
|
||||
return [
|
||||
v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0))
|
||||
]
|
||||
return value_dict
|
||||
|
||||
def validate(self, value):
|
||||
"""If field is provided ensure the value is valid."""
|
||||
errors = {}
|
||||
if self.field:
|
||||
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
|
||||
sequence = iteritems(value)
|
||||
if hasattr(value, "items"):
|
||||
sequence = value.items()
|
||||
else:
|
||||
sequence = enumerate(value)
|
||||
for k, v in sequence:
|
||||
@ -449,11 +469,12 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
if errors:
|
||||
field_class = self.field.__class__.__name__
|
||||
self.error('Invalid %s item (%s)' % (field_class, value),
|
||||
errors=errors)
|
||||
self.error(
|
||||
"Invalid {} item ({})".format(field_class, value), errors=errors
|
||||
)
|
||||
# Don't allow empty values if required
|
||||
if self.required and not value:
|
||||
self.error('Field is required and cannot be empty')
|
||||
self.error("Field is required and cannot be empty")
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return self.to_mongo(value)
|
||||
@ -483,10 +504,9 @@ class ObjectIdField(BaseField):
|
||||
def to_mongo(self, value):
|
||||
if not isinstance(value, ObjectId):
|
||||
try:
|
||||
return ObjectId(six.text_type(value))
|
||||
return ObjectId(str(value))
|
||||
except Exception as e:
|
||||
# e.message attribute has been deprecated since Python 2.6
|
||||
self.error(six.text_type(e))
|
||||
self.error(str(e))
|
||||
return value
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
@ -494,9 +514,9 @@ class ObjectIdField(BaseField):
|
||||
|
||||
def validate(self, value):
|
||||
try:
|
||||
ObjectId(six.text_type(value))
|
||||
ObjectId(str(value))
|
||||
except Exception:
|
||||
self.error('Invalid Object ID')
|
||||
self.error("Invalid ObjectID")
|
||||
|
||||
|
||||
class GeoJsonBaseField(BaseField):
|
||||
@ -506,72 +526,73 @@ class GeoJsonBaseField(BaseField):
|
||||
"""
|
||||
|
||||
_geo_index = pymongo.GEOSPHERE
|
||||
_type = 'GeoBase'
|
||||
_type = "GeoBase"
|
||||
|
||||
def __init__(self, auto_index=True, *args, **kwargs):
|
||||
"""
|
||||
:param bool auto_index: Automatically create a '2dsphere' index.\
|
||||
Defaults to `True`.
|
||||
"""
|
||||
self._name = '%sField' % self._type
|
||||
self._name = "%sField" % self._type
|
||||
if not auto_index:
|
||||
self._geo_index = False
|
||||
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def validate(self, value):
|
||||
"""Validate the GeoJson object based on its type."""
|
||||
if isinstance(value, dict):
|
||||
if set(value.keys()) == {'type', 'coordinates'}:
|
||||
if value['type'] != self._type:
|
||||
self.error('%s type must be "%s"' %
|
||||
(self._name, self._type))
|
||||
return self.validate(value['coordinates'])
|
||||
if set(value.keys()) == {"type", "coordinates"}:
|
||||
if value["type"] != self._type:
|
||||
self.error('{} type must be "{}"'.format(self._name, self._type))
|
||||
return self.validate(value["coordinates"])
|
||||
else:
|
||||
self.error('%s can only accept a valid GeoJson dictionary'
|
||||
' or lists of (x, y)' % self._name)
|
||||
self.error(
|
||||
"%s can only accept a valid GeoJson dictionary"
|
||||
" or lists of (x, y)" % self._name
|
||||
)
|
||||
return
|
||||
elif not isinstance(value, (list, tuple)):
|
||||
self.error('%s can only accept lists of [x, y]' % self._name)
|
||||
self.error("%s can only accept lists of [x, y]" % self._name)
|
||||
return
|
||||
|
||||
validate = getattr(self, '_validate_%s' % self._type.lower())
|
||||
validate = getattr(self, "_validate_%s" % self._type.lower())
|
||||
error = validate(value)
|
||||
if error:
|
||||
self.error(error)
|
||||
|
||||
def _validate_polygon(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Polygons must contain list of linestrings'
|
||||
return "Polygons must contain list of linestrings"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid Polygon must contain at least one valid linestring'
|
||||
return "Invalid Polygon must contain at least one valid linestring"
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
error = self._validate_linestring(val, False)
|
||||
if not error and val[0] != val[-1]:
|
||||
error = 'LineStrings must start and end at the same point'
|
||||
error = "LineStrings must start and end at the same point"
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
if errors:
|
||||
if top_level:
|
||||
return 'Invalid Polygon:\n%s' % ', '.join(errors)
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_linestring(self, value, top_level=True):
|
||||
"""Validate a linestring."""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'LineStrings must contain list of coordinate pairs'
|
||||
return "LineStrings must contain list of coordinate pairs"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid LineString must contain at least one valid point'
|
||||
return "Invalid LineString must contain at least one valid point"
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
@ -580,29 +601,30 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
if errors:
|
||||
if top_level:
|
||||
return 'Invalid LineString:\n%s' % ', '.join(errors)
|
||||
return "Invalid LineString:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_point(self, value):
|
||||
"""Validate each set of coords"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Points must be a list of coordinate pairs'
|
||||
return "Points must be a list of coordinate pairs"
|
||||
elif not len(value) == 2:
|
||||
return 'Value (%s) must be a two-dimensional point' % repr(value)
|
||||
elif (not isinstance(value[0], (float, int)) or
|
||||
not isinstance(value[1], (float, int))):
|
||||
return 'Both values (%s) in point must be float or int' % repr(value)
|
||||
return "Value (%s) must be a two-dimensional point" % repr(value)
|
||||
elif not isinstance(value[0], (float, int)) or not isinstance(
|
||||
value[1], (float, int)
|
||||
):
|
||||
return "Both values (%s) in point must be float or int" % repr(value)
|
||||
|
||||
def _validate_multipoint(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPoint must be a list of Point'
|
||||
return "MultiPoint must be a list of Point"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiPoint must contain at least one valid point'
|
||||
return "Invalid MultiPoint must contain at least one valid point"
|
||||
|
||||
errors = []
|
||||
for point in value:
|
||||
@ -611,17 +633,17 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multilinestring(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiLineString must be a list of LineString'
|
||||
return "MultiLineString must be a list of LineString"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiLineString must contain at least one valid linestring'
|
||||
return "Invalid MultiLineString must contain at least one valid linestring"
|
||||
|
||||
errors = []
|
||||
for linestring in value:
|
||||
@ -631,19 +653,19 @@ class GeoJsonBaseField(BaseField):
|
||||
|
||||
if errors:
|
||||
if top_level:
|
||||
return 'Invalid MultiLineString:\n%s' % ', '.join(errors)
|
||||
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multipolygon(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPolygon must be a list of Polygon'
|
||||
return "MultiPolygon must be a list of Polygon"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiPolygon must contain at least one valid Polygon'
|
||||
return "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||
|
||||
errors = []
|
||||
for polygon in value:
|
||||
@ -652,9 +674,9 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return 'Invalid MultiPolygon:\n%s' % ', '.join(errors)
|
||||
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
return SON([('type', self._type), ('coordinates', value)])
|
||||
return SON([("type", self._type), ("coordinates", value)])
|
||||
|
@ -1,18 +1,19 @@
|
||||
import itertools
|
||||
import warnings
|
||||
|
||||
import six
|
||||
from six import iteritems, itervalues
|
||||
|
||||
from mongoengine.base.common import _document_registry
|
||||
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import InvalidDocumentError
|
||||
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
|
||||
MultipleObjectsReturned,
|
||||
QuerySetManager)
|
||||
from mongoengine.queryset import (
|
||||
DO_NOTHING,
|
||||
DoesNotExist,
|
||||
MultipleObjectsReturned,
|
||||
QuerySetManager,
|
||||
)
|
||||
|
||||
|
||||
__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
|
||||
__all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass")
|
||||
|
||||
|
||||
class DocumentMetaclass(type):
|
||||
@ -21,49 +22,51 @@ class DocumentMetaclass(type):
|
||||
# TODO lower complexity of this method
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
flattened_bases = mcs._get_bases(bases)
|
||||
super_new = super(DocumentMetaclass, mcs).__new__
|
||||
super_new = super().__new__
|
||||
|
||||
# If a base class just call super
|
||||
metaclass = attrs.get('my_metaclass')
|
||||
metaclass = attrs.get("my_metaclass")
|
||||
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
||||
return super_new(mcs, name, bases, attrs)
|
||||
|
||||
attrs['_is_document'] = attrs.get('_is_document', False)
|
||||
attrs['_cached_reference_fields'] = []
|
||||
attrs["_is_document"] = attrs.get("_is_document", False)
|
||||
attrs["_cached_reference_fields"] = []
|
||||
|
||||
# EmbeddedDocuments could have meta data for inheritance
|
||||
if 'meta' in attrs:
|
||||
attrs['_meta'] = attrs.pop('meta')
|
||||
if "meta" in attrs:
|
||||
attrs["_meta"] = attrs.pop("meta")
|
||||
|
||||
# EmbeddedDocuments should inherit meta data
|
||||
if '_meta' not in attrs:
|
||||
if "_meta" not in attrs:
|
||||
meta = MetaDict()
|
||||
for base in flattened_bases[::-1]:
|
||||
# Add any mixin metadata from plain objects
|
||||
if hasattr(base, 'meta'):
|
||||
if hasattr(base, "meta"):
|
||||
meta.merge(base.meta)
|
||||
elif hasattr(base, '_meta'):
|
||||
elif hasattr(base, "_meta"):
|
||||
meta.merge(base._meta)
|
||||
attrs['_meta'] = meta
|
||||
attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
||||
attrs["_meta"] = meta
|
||||
attrs["_meta"][
|
||||
"abstract"
|
||||
] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
||||
|
||||
# If allow_inheritance is True, add a "_cls" string field to the attrs
|
||||
if attrs['_meta'].get('allow_inheritance'):
|
||||
StringField = _import_class('StringField')
|
||||
attrs['_cls'] = StringField()
|
||||
if attrs["_meta"].get("allow_inheritance"):
|
||||
StringField = _import_class("StringField")
|
||||
attrs["_cls"] = StringField()
|
||||
|
||||
# Handle document Fields
|
||||
|
||||
# Merge all fields from subclasses
|
||||
doc_fields = {}
|
||||
for base in flattened_bases[::-1]:
|
||||
if hasattr(base, '_fields'):
|
||||
if hasattr(base, "_fields"):
|
||||
doc_fields.update(base._fields)
|
||||
|
||||
# Standard object mixin - merge in any Fields
|
||||
if not hasattr(base, '_meta'):
|
||||
if not hasattr(base, "_meta"):
|
||||
base_fields = {}
|
||||
for attr_name, attr_value in iteritems(base.__dict__):
|
||||
for attr_name, attr_value in base.__dict__.items():
|
||||
if not isinstance(attr_value, BaseField):
|
||||
continue
|
||||
attr_value.name = attr_name
|
||||
@ -75,7 +78,7 @@ class DocumentMetaclass(type):
|
||||
|
||||
# Discover any document fields
|
||||
field_names = {}
|
||||
for attr_name, attr_value in iteritems(attrs):
|
||||
for attr_name, attr_value in attrs.items():
|
||||
if not isinstance(attr_value, BaseField):
|
||||
continue
|
||||
attr_value.name = attr_name
|
||||
@ -84,27 +87,29 @@ class DocumentMetaclass(type):
|
||||
doc_fields[attr_name] = attr_value
|
||||
|
||||
# Count names to ensure no db_field redefinitions
|
||||
field_names[attr_value.db_field] = field_names.get(
|
||||
attr_value.db_field, 0) + 1
|
||||
field_names[attr_value.db_field] = (
|
||||
field_names.get(attr_value.db_field, 0) + 1
|
||||
)
|
||||
|
||||
# Ensure no duplicate db_fields
|
||||
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
|
||||
if duplicate_db_fields:
|
||||
msg = ('Multiple db_fields defined for: %s ' %
|
||||
', '.join(duplicate_db_fields))
|
||||
msg = "Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields)
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
# Set _fields and db_field maps
|
||||
attrs['_fields'] = doc_fields
|
||||
attrs['_db_field_map'] = {k: getattr(v, 'db_field', k)
|
||||
for k, v in doc_fields.items()}
|
||||
attrs['_reverse_db_field_map'] = {
|
||||
v: k for k, v in attrs['_db_field_map'].items()
|
||||
attrs["_fields"] = doc_fields
|
||||
attrs["_db_field_map"] = {
|
||||
k: getattr(v, "db_field", k) for k, v in doc_fields.items()
|
||||
}
|
||||
attrs["_reverse_db_field_map"] = {
|
||||
v: k for k, v in attrs["_db_field_map"].items()
|
||||
}
|
||||
|
||||
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
|
||||
(v.creation_counter, v.name)
|
||||
for v in itervalues(doc_fields)))
|
||||
attrs["_fields_ordered"] = tuple(
|
||||
i[1]
|
||||
for i in sorted((v.creation_counter, v.name) for v in doc_fields.values())
|
||||
)
|
||||
|
||||
#
|
||||
# Set document hierarchy
|
||||
@ -112,32 +117,34 @@ class DocumentMetaclass(type):
|
||||
superclasses = ()
|
||||
class_name = [name]
|
||||
for base in flattened_bases:
|
||||
if (not getattr(base, '_is_base_cls', True) and
|
||||
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||
if not getattr(base, "_is_base_cls", True) and not getattr(
|
||||
base, "_meta", {}
|
||||
).get("abstract", True):
|
||||
# Collate hierarchy for _cls and _subclasses
|
||||
class_name.append(base.__name__)
|
||||
|
||||
if hasattr(base, '_meta'):
|
||||
if hasattr(base, "_meta"):
|
||||
# Warn if allow_inheritance isn't set and prevent
|
||||
# inheritance of classes where inheritance is set to False
|
||||
allow_inheritance = base._meta.get('allow_inheritance')
|
||||
if not allow_inheritance and not base._meta.get('abstract'):
|
||||
raise ValueError('Document %s may not be subclassed. '
|
||||
'To enable inheritance, use the "allow_inheritance" meta attribute.' %
|
||||
base.__name__)
|
||||
allow_inheritance = base._meta.get("allow_inheritance")
|
||||
if not allow_inheritance and not base._meta.get("abstract"):
|
||||
raise ValueError(
|
||||
"Document %s may not be subclassed. "
|
||||
'To enable inheritance, use the "allow_inheritance" meta attribute.'
|
||||
% base.__name__
|
||||
)
|
||||
|
||||
# Get superclasses from last base superclass
|
||||
document_bases = [b for b in flattened_bases
|
||||
if hasattr(b, '_class_name')]
|
||||
document_bases = [b for b in flattened_bases if hasattr(b, "_class_name")]
|
||||
if document_bases:
|
||||
superclasses = document_bases[0]._superclasses
|
||||
superclasses += (document_bases[0]._class_name, )
|
||||
superclasses += (document_bases[0]._class_name,)
|
||||
|
||||
_cls = '.'.join(reversed(class_name))
|
||||
attrs['_class_name'] = _cls
|
||||
attrs['_superclasses'] = superclasses
|
||||
attrs['_subclasses'] = (_cls, )
|
||||
attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
|
||||
_cls = ".".join(reversed(class_name))
|
||||
attrs["_class_name"] = _cls
|
||||
attrs["_superclasses"] = superclasses
|
||||
attrs["_subclasses"] = (_cls,)
|
||||
attrs["_types"] = attrs["_subclasses"] # TODO depreciate _types
|
||||
|
||||
# Create the new_class
|
||||
new_class = super_new(mcs, name, bases, attrs)
|
||||
@ -148,8 +155,12 @@ class DocumentMetaclass(type):
|
||||
base._subclasses += (_cls,)
|
||||
base._types = base._subclasses # TODO depreciate _types
|
||||
|
||||
(Document, EmbeddedDocument, DictField,
|
||||
CachedReferenceField) = mcs._import_classes()
|
||||
(
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
DictField,
|
||||
CachedReferenceField,
|
||||
) = mcs._import_classes()
|
||||
|
||||
if issubclass(new_class, Document):
|
||||
new_class._collection = None
|
||||
@ -157,63 +168,50 @@ class DocumentMetaclass(type):
|
||||
# Add class to the _document_registry
|
||||
_document_registry[new_class._class_name] = new_class
|
||||
|
||||
# In Python 2, User-defined methods objects have special read-only
|
||||
# attributes 'im_func' and 'im_self' which contain the function obj
|
||||
# and class instance object respectively. With Python 3 these special
|
||||
# attributes have been replaced by __func__ and __self__. The Blinker
|
||||
# module continues to use im_func and im_self, so the code below
|
||||
# copies __func__ into im_func and __self__ into im_self for
|
||||
# classmethod objects in Document derived classes.
|
||||
if six.PY3:
|
||||
for val in new_class.__dict__.values():
|
||||
if isinstance(val, classmethod):
|
||||
f = val.__get__(new_class)
|
||||
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
|
||||
f.__dict__.update({'im_func': getattr(f, '__func__')})
|
||||
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
|
||||
f.__dict__.update({'im_self': getattr(f, '__self__')})
|
||||
|
||||
# Handle delete rules
|
||||
for field in itervalues(new_class._fields):
|
||||
for field in new_class._fields.values():
|
||||
f = field
|
||||
if f.owner_document is None:
|
||||
f.owner_document = new_class
|
||||
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
|
||||
delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING)
|
||||
if isinstance(f, CachedReferenceField):
|
||||
|
||||
if issubclass(new_class, EmbeddedDocument):
|
||||
raise InvalidDocumentError('CachedReferenceFields is not '
|
||||
'allowed in EmbeddedDocuments')
|
||||
raise InvalidDocumentError(
|
||||
"CachedReferenceFields is not allowed in EmbeddedDocuments"
|
||||
)
|
||||
|
||||
if f.auto_sync:
|
||||
f.start_listener()
|
||||
|
||||
f.document_type._cached_reference_fields.append(f)
|
||||
|
||||
if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
|
||||
delete_rule = getattr(f.field,
|
||||
'reverse_delete_rule',
|
||||
DO_NOTHING)
|
||||
if isinstance(f, ComplexBaseField) and hasattr(f, "field"):
|
||||
delete_rule = getattr(f.field, "reverse_delete_rule", DO_NOTHING)
|
||||
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
|
||||
msg = ('Reverse delete rules are not supported '
|
||||
'for %s (field: %s)' %
|
||||
(field.__class__.__name__, field.name))
|
||||
msg = (
|
||||
"Reverse delete rules are not supported "
|
||||
"for %s (field: %s)" % (field.__class__.__name__, field.name)
|
||||
)
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
f = field.field
|
||||
|
||||
if delete_rule != DO_NOTHING:
|
||||
if issubclass(new_class, EmbeddedDocument):
|
||||
msg = ('Reverse delete rules are not supported for '
|
||||
'EmbeddedDocuments (field: %s)' % field.name)
|
||||
msg = (
|
||||
"Reverse delete rules are not supported for "
|
||||
"EmbeddedDocuments (field: %s)" % field.name
|
||||
)
|
||||
raise InvalidDocumentError(msg)
|
||||
f.document_type.register_delete_rule(new_class,
|
||||
field.name, delete_rule)
|
||||
f.document_type.register_delete_rule(new_class, field.name, delete_rule)
|
||||
|
||||
if (field.name and hasattr(Document, field.name) and
|
||||
EmbeddedDocument not in new_class.mro()):
|
||||
msg = ('%s is a document method and not a valid '
|
||||
'field name' % field.name)
|
||||
if (
|
||||
field.name
|
||||
and hasattr(Document, field.name)
|
||||
and EmbeddedDocument not in new_class.mro()
|
||||
):
|
||||
msg = "%s is a document method and not a valid field name" % field.name
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
return new_class
|
||||
@ -233,15 +231,14 @@ class DocumentMetaclass(type):
|
||||
if base is object:
|
||||
continue
|
||||
yield base
|
||||
for child_base in mcs.__get_bases(base.__bases__):
|
||||
yield child_base
|
||||
yield from mcs.__get_bases(base.__bases__)
|
||||
|
||||
@classmethod
|
||||
def _import_classes(mcs):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
DictField = _import_class('DictField')
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
Document = _import_class("Document")
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
DictField = _import_class("DictField")
|
||||
CachedReferenceField = _import_class("CachedReferenceField")
|
||||
return Document, EmbeddedDocument, DictField, CachedReferenceField
|
||||
|
||||
|
||||
@ -252,68 +249,69 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
flattened_bases = mcs._get_bases(bases)
|
||||
super_new = super(TopLevelDocumentMetaclass, mcs).__new__
|
||||
super_new = super().__new__
|
||||
|
||||
# Set default _meta data if base class, otherwise get user defined meta
|
||||
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
|
||||
if attrs.get("my_metaclass") == TopLevelDocumentMetaclass:
|
||||
# defaults
|
||||
attrs['_meta'] = {
|
||||
'abstract': True,
|
||||
'max_documents': None,
|
||||
'max_size': None,
|
||||
'ordering': [], # default ordering applied at runtime
|
||||
'indexes': [], # indexes to be ensured at runtime
|
||||
'id_field': None,
|
||||
'index_background': False,
|
||||
'index_drop_dups': False,
|
||||
'index_opts': None,
|
||||
'delete_rules': None,
|
||||
|
||||
attrs["_meta"] = {
|
||||
"abstract": True,
|
||||
"max_documents": None,
|
||||
"max_size": None,
|
||||
"ordering": [], # default ordering applied at runtime
|
||||
"indexes": [], # indexes to be ensured at runtime
|
||||
"id_field": None,
|
||||
"index_background": False,
|
||||
"index_opts": None,
|
||||
"delete_rules": None,
|
||||
# allow_inheritance can be True, False, and None. True means
|
||||
# "allow inheritance", False means "don't allow inheritance",
|
||||
# None means "do whatever your parent does, or don't allow
|
||||
# inheritance if you're a top-level class".
|
||||
'allow_inheritance': None,
|
||||
"allow_inheritance": None,
|
||||
}
|
||||
attrs['_is_base_cls'] = True
|
||||
attrs['_meta'].update(attrs.get('meta', {}))
|
||||
attrs["_is_base_cls"] = True
|
||||
attrs["_meta"].update(attrs.get("meta", {}))
|
||||
else:
|
||||
attrs['_meta'] = attrs.get('meta', {})
|
||||
attrs["_meta"] = attrs.get("meta", {})
|
||||
# Explicitly set abstract to false unless set
|
||||
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
|
||||
attrs['_is_base_cls'] = False
|
||||
attrs["_meta"]["abstract"] = attrs["_meta"].get("abstract", False)
|
||||
attrs["_is_base_cls"] = False
|
||||
|
||||
# Set flag marking as document class - as opposed to an object mixin
|
||||
attrs['_is_document'] = True
|
||||
attrs["_is_document"] = True
|
||||
|
||||
# Ensure queryset_class is inherited
|
||||
if 'objects' in attrs:
|
||||
manager = attrs['objects']
|
||||
if hasattr(manager, 'queryset_class'):
|
||||
attrs['_meta']['queryset_class'] = manager.queryset_class
|
||||
if "objects" in attrs:
|
||||
manager = attrs["objects"]
|
||||
if hasattr(manager, "queryset_class"):
|
||||
attrs["_meta"]["queryset_class"] = manager.queryset_class
|
||||
|
||||
# Clean up top level meta
|
||||
if 'meta' in attrs:
|
||||
del attrs['meta']
|
||||
if "meta" in attrs:
|
||||
del attrs["meta"]
|
||||
|
||||
# Find the parent document class
|
||||
parent_doc_cls = [b for b in flattened_bases
|
||||
if b.__class__ == TopLevelDocumentMetaclass]
|
||||
parent_doc_cls = [
|
||||
b for b in flattened_bases if b.__class__ == TopLevelDocumentMetaclass
|
||||
]
|
||||
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
|
||||
|
||||
# Prevent classes setting collection different to their parents
|
||||
# If parent wasn't an abstract class
|
||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
|
||||
not parent_doc_cls._meta.get('abstract', True)):
|
||||
msg = 'Trying to set a collection on a subclass (%s)' % name
|
||||
if (
|
||||
parent_doc_cls
|
||||
and "collection" in attrs.get("_meta", {})
|
||||
and not parent_doc_cls._meta.get("abstract", True)
|
||||
):
|
||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||
warnings.warn(msg, SyntaxWarning)
|
||||
del attrs['_meta']['collection']
|
||||
del attrs["_meta"]["collection"]
|
||||
|
||||
# Ensure abstract documents have abstract bases
|
||||
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
||||
if (parent_doc_cls and
|
||||
not parent_doc_cls._meta.get('abstract', False)):
|
||||
msg = 'Abstract document cannot have non-abstract base'
|
||||
if attrs.get("_is_base_cls") or attrs["_meta"].get("abstract"):
|
||||
if parent_doc_cls and not parent_doc_cls._meta.get("abstract", False):
|
||||
msg = "Abstract document cannot have non-abstract base"
|
||||
raise ValueError(msg)
|
||||
return super_new(mcs, name, bases, attrs)
|
||||
|
||||
@ -322,38 +320,43 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
meta = MetaDict()
|
||||
for base in flattened_bases[::-1]:
|
||||
# Add any mixin metadata from plain objects
|
||||
if hasattr(base, 'meta'):
|
||||
if hasattr(base, "meta"):
|
||||
meta.merge(base.meta)
|
||||
elif hasattr(base, '_meta'):
|
||||
elif hasattr(base, "_meta"):
|
||||
meta.merge(base._meta)
|
||||
|
||||
# Set collection in the meta if its callable
|
||||
if (getattr(base, '_is_document', False) and
|
||||
not base._meta.get('abstract')):
|
||||
collection = meta.get('collection', None)
|
||||
if getattr(base, "_is_document", False) and not base._meta.get("abstract"):
|
||||
collection = meta.get("collection", None)
|
||||
if callable(collection):
|
||||
meta['collection'] = collection(base)
|
||||
meta["collection"] = collection(base)
|
||||
|
||||
meta.merge(attrs.get('_meta', {})) # Top level meta
|
||||
meta.merge(attrs.get("_meta", {})) # Top level meta
|
||||
|
||||
# Only simple classes (i.e. direct subclasses of Document) may set
|
||||
# allow_inheritance to False. If the base Document allows inheritance,
|
||||
# none of its subclasses can override allow_inheritance to False.
|
||||
simple_class = all([b._meta.get('abstract')
|
||||
for b in flattened_bases if hasattr(b, '_meta')])
|
||||
simple_class = all(
|
||||
[b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")]
|
||||
)
|
||||
if (
|
||||
not simple_class and
|
||||
meta['allow_inheritance'] is False and
|
||||
not meta['abstract']
|
||||
not simple_class
|
||||
and meta["allow_inheritance"] is False
|
||||
and not meta["abstract"]
|
||||
):
|
||||
raise ValueError('Only direct subclasses of Document may set '
|
||||
'"allow_inheritance" to False')
|
||||
raise ValueError(
|
||||
"Only direct subclasses of Document may set "
|
||||
'"allow_inheritance" to False'
|
||||
)
|
||||
|
||||
# Set default collection name
|
||||
if 'collection' not in meta:
|
||||
meta['collection'] = ''.join('_%s' % c if c.isupper() else c
|
||||
for c in name).strip('_').lower()
|
||||
attrs['_meta'] = meta
|
||||
if "collection" not in meta:
|
||||
meta["collection"] = (
|
||||
"".join("_%s" % c if c.isupper() else c for c in name)
|
||||
.strip("_")
|
||||
.lower()
|
||||
)
|
||||
attrs["_meta"] = meta
|
||||
|
||||
# Call super and get the new class
|
||||
new_class = super_new(mcs, name, bases, attrs)
|
||||
@ -361,82 +364,96 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
meta = new_class._meta
|
||||
|
||||
# Set index specifications
|
||||
meta['index_specs'] = new_class._build_index_specs(meta['indexes'])
|
||||
meta["index_specs"] = new_class._build_index_specs(meta["indexes"])
|
||||
|
||||
# If collection is a callable - call it and set the value
|
||||
collection = meta.get('collection')
|
||||
collection = meta.get("collection")
|
||||
if callable(collection):
|
||||
new_class._meta['collection'] = collection(new_class)
|
||||
new_class._meta["collection"] = collection(new_class)
|
||||
|
||||
# Provide a default queryset unless exists or one has been set
|
||||
if 'objects' not in dir(new_class):
|
||||
if "objects" not in dir(new_class):
|
||||
new_class.objects = QuerySetManager()
|
||||
|
||||
# Validate the fields and set primary key if needed
|
||||
for field_name, field in iteritems(new_class._fields):
|
||||
for field_name, field in new_class._fields.items():
|
||||
if field.primary_key:
|
||||
# Ensure only one primary key is set
|
||||
current_pk = new_class._meta.get('id_field')
|
||||
current_pk = new_class._meta.get("id_field")
|
||||
if current_pk and current_pk != field_name:
|
||||
raise ValueError('Cannot override primary key field')
|
||||
raise ValueError("Cannot override primary key field")
|
||||
|
||||
# Set primary key
|
||||
if not current_pk:
|
||||
new_class._meta['id_field'] = field_name
|
||||
new_class._meta["id_field"] = field_name
|
||||
new_class.id = field
|
||||
|
||||
# Set primary key if not defined by the document
|
||||
new_class._auto_id_field = getattr(parent_doc_cls,
|
||||
'_auto_id_field', False)
|
||||
if not new_class._meta.get('id_field'):
|
||||
# After 0.10, find not existing names, instead of overwriting
|
||||
# If the document doesn't explicitly define a primary key field, create
|
||||
# one. Make it an ObjectIdField and give it a non-clashing name ("id"
|
||||
# by default, but can be different if that one's taken).
|
||||
if not new_class._meta.get("id_field"):
|
||||
id_name, id_db_name = mcs.get_auto_id_names(new_class)
|
||||
new_class._auto_id_field = True
|
||||
new_class._meta['id_field'] = id_name
|
||||
new_class._meta["id_field"] = id_name
|
||||
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
|
||||
new_class._fields[id_name].name = id_name
|
||||
new_class.id = new_class._fields[id_name]
|
||||
new_class._db_field_map[id_name] = id_db_name
|
||||
new_class._reverse_db_field_map[id_db_name] = id_name
|
||||
# Prepend id field to _fields_ordered
|
||||
new_class._fields_ordered = (id_name, ) + new_class._fields_ordered
|
||||
|
||||
# Merge in exceptions with parent hierarchy
|
||||
# Prepend the ID field to _fields_ordered (so that it's *always*
|
||||
# the first field).
|
||||
new_class._fields_ordered = (id_name,) + new_class._fields_ordered
|
||||
|
||||
# Merge in exceptions with parent hierarchy.
|
||||
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
|
||||
module = attrs.get('__module__')
|
||||
module = attrs.get("__module__")
|
||||
for exc in exceptions_to_merge:
|
||||
name = exc.__name__
|
||||
parents = tuple(getattr(base, name) for base in flattened_bases
|
||||
if hasattr(base, name)) or (exc,)
|
||||
# Create new exception and set to new_class
|
||||
exception = type(name, parents, {'__module__': module})
|
||||
parents = tuple(
|
||||
getattr(base, name) for base in flattened_bases if hasattr(base, name)
|
||||
) or (exc,)
|
||||
|
||||
# Create a new exception and set it as an attribute on the new
|
||||
# class.
|
||||
exception = type(name, parents, {"__module__": module})
|
||||
setattr(new_class, name, exception)
|
||||
|
||||
return new_class
|
||||
|
||||
@classmethod
|
||||
def get_auto_id_names(mcs, new_class):
|
||||
id_name, id_db_name = ('id', '_id')
|
||||
if id_name not in new_class._fields and \
|
||||
id_db_name not in (v.db_field for v in new_class._fields.values()):
|
||||
"""Find a name for the automatic ID field for the given new class.
|
||||
|
||||
Return a two-element tuple where the first item is the field name (i.e.
|
||||
the attribute name on the object) and the second element is the DB
|
||||
field name (i.e. the name of the key stored in MongoDB).
|
||||
|
||||
Defaults to ('id', '_id'), or generates a non-clashing name in the form
|
||||
of ('auto_id_X', '_auto_id_X') if the default name is already taken.
|
||||
"""
|
||||
id_name, id_db_name = ("id", "_id")
|
||||
existing_fields = {field_name for field_name in new_class._fields}
|
||||
existing_db_fields = {v.db_field for v in new_class._fields.values()}
|
||||
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
||||
return id_name, id_db_name
|
||||
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
|
||||
while id_name in new_class._fields or \
|
||||
id_db_name in (v.db_field for v in new_class._fields.values()):
|
||||
id_name = '{0}_{1}'.format(id_basename, i)
|
||||
id_db_name = '{0}_{1}'.format(id_db_basename, i)
|
||||
i += 1
|
||||
return id_name, id_db_name
|
||||
|
||||
id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0)
|
||||
for i in itertools.count():
|
||||
id_name = "{}_{}".format(id_basename, i)
|
||||
id_db_name = "{}_{}".format(id_db_basename, i)
|
||||
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
||||
return id_name, id_db_name
|
||||
|
||||
|
||||
class MetaDict(dict):
|
||||
"""Custom dictionary for meta classes.
|
||||
Handles the merging of set indexes
|
||||
"""
|
||||
_merge_options = ('indexes',)
|
||||
|
||||
_merge_options = ("indexes",)
|
||||
|
||||
def merge(self, new_options):
|
||||
for k, v in iteritems(new_options):
|
||||
for k, v in new_options.items():
|
||||
if k in self._merge_options:
|
||||
self[k] = self.get(k, []) + v
|
||||
else:
|
||||
@ -445,4 +462,5 @@ class MetaDict(dict):
|
||||
|
||||
class BasesTuple(tuple):
|
||||
"""Special class to handle introspection of bases tuple in __new__"""
|
||||
|
||||
pass
|
||||
|
@ -1,7 +1,7 @@
|
||||
import re
|
||||
|
||||
|
||||
class LazyRegexCompiler(object):
|
||||
class LazyRegexCompiler:
|
||||
"""Descriptor to allow lazy compilation of regex"""
|
||||
|
||||
def __init__(self, pattern, flags=0):
|
||||
|
@ -19,34 +19,44 @@ def _import_class(cls_name):
|
||||
if cls_name in _class_registry_cache:
|
||||
return _class_registry_cache.get(cls_name)
|
||||
|
||||
doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument',
|
||||
'MapReduceDocument')
|
||||
doc_classes = (
|
||||
"Document",
|
||||
"DynamicEmbeddedDocument",
|
||||
"EmbeddedDocument",
|
||||
"MapReduceDocument",
|
||||
)
|
||||
|
||||
# Field Classes
|
||||
if not _field_list_cache:
|
||||
from mongoengine.fields import __all__ as fields
|
||||
|
||||
_field_list_cache.extend(fields)
|
||||
from mongoengine.base.fields import __all__ as fields
|
||||
|
||||
_field_list_cache.extend(fields)
|
||||
|
||||
field_classes = _field_list_cache
|
||||
|
||||
deref_classes = ('DeReference',)
|
||||
deref_classes = ("DeReference",)
|
||||
|
||||
if cls_name == 'BaseDocument':
|
||||
if cls_name == "BaseDocument":
|
||||
from mongoengine.base import document as module
|
||||
import_classes = ['BaseDocument']
|
||||
|
||||
import_classes = ["BaseDocument"]
|
||||
elif cls_name in doc_classes:
|
||||
from mongoengine import document as module
|
||||
|
||||
import_classes = doc_classes
|
||||
elif cls_name in field_classes:
|
||||
from mongoengine import fields as module
|
||||
|
||||
import_classes = field_classes
|
||||
elif cls_name in deref_classes:
|
||||
from mongoengine import dereference as module
|
||||
|
||||
import_classes = deref_classes
|
||||
else:
|
||||
raise ValueError('No import set for: %s' % cls_name)
|
||||
raise ValueError("No import set for: %s" % cls_name)
|
||||
|
||||
for cls in import_classes:
|
||||
_class_registry_cache[cls] = getattr(module, cls)
|
||||
|
@ -1,23 +1,22 @@
|
||||
from pymongo import MongoClient, ReadPreference, uri_parser
|
||||
from pymongo.database import _check_name
|
||||
import six
|
||||
|
||||
__all__ = [
|
||||
'DEFAULT_CONNECTION_NAME',
|
||||
'DEFAULT_DATABASE_NAME',
|
||||
'MongoEngineConnectionError',
|
||||
'connect',
|
||||
'disconnect',
|
||||
'disconnect_all',
|
||||
'get_connection',
|
||||
'get_db',
|
||||
'register_connection',
|
||||
"DEFAULT_CONNECTION_NAME",
|
||||
"DEFAULT_DATABASE_NAME",
|
||||
"ConnectionFailure",
|
||||
"connect",
|
||||
"disconnect",
|
||||
"disconnect_all",
|
||||
"get_connection",
|
||||
"get_db",
|
||||
"register_connection",
|
||||
]
|
||||
|
||||
|
||||
DEFAULT_CONNECTION_NAME = 'default'
|
||||
DEFAULT_DATABASE_NAME = 'test'
|
||||
DEFAULT_HOST = 'localhost'
|
||||
DEFAULT_CONNECTION_NAME = "default"
|
||||
DEFAULT_DATABASE_NAME = "test"
|
||||
DEFAULT_HOST = "localhost"
|
||||
DEFAULT_PORT = 27017
|
||||
|
||||
_connection_settings = {}
|
||||
@ -27,10 +26,11 @@ _dbs = {}
|
||||
READ_PREFERENCE = ReadPreference.PRIMARY
|
||||
|
||||
|
||||
class MongoEngineConnectionError(Exception):
|
||||
class ConnectionFailure(Exception):
|
||||
"""Error raised when the database connection can't be established or
|
||||
when a connection with a requested alias can't be retrieved.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@ -38,19 +38,24 @@ def _check_db_name(name):
|
||||
"""Check if a database name is valid.
|
||||
This functionality is copied from pymongo Database class constructor.
|
||||
"""
|
||||
if not isinstance(name, six.string_types):
|
||||
raise TypeError('name must be an instance of %s' % six.string_types)
|
||||
elif name != '$external':
|
||||
if not isinstance(name, str):
|
||||
raise TypeError("name must be an instance of %s" % str)
|
||||
elif name != "$external":
|
||||
_check_name(name)
|
||||
|
||||
|
||||
def _get_connection_settings(
|
||||
db=None, name=None, host=None, port=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None, password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs):
|
||||
db=None,
|
||||
name=None,
|
||||
host=None,
|
||||
port=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None,
|
||||
password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs
|
||||
):
|
||||
"""Get the connection settings as a dict
|
||||
|
||||
: param db: the name of the database to use, for compatibility with connect
|
||||
@ -73,53 +78,61 @@ def _get_connection_settings(
|
||||
.. versionchanged:: 0.10.6 - added mongomock support
|
||||
"""
|
||||
conn_settings = {
|
||||
'name': name or db or DEFAULT_DATABASE_NAME,
|
||||
'host': host or DEFAULT_HOST,
|
||||
'port': port or DEFAULT_PORT,
|
||||
'read_preference': read_preference,
|
||||
'username': username,
|
||||
'password': password,
|
||||
'authentication_source': authentication_source,
|
||||
'authentication_mechanism': authentication_mechanism
|
||||
"name": name or db or DEFAULT_DATABASE_NAME,
|
||||
"host": host or DEFAULT_HOST,
|
||||
"port": port or DEFAULT_PORT,
|
||||
"read_preference": read_preference,
|
||||
"username": username,
|
||||
"password": password,
|
||||
"authentication_source": authentication_source,
|
||||
"authentication_mechanism": authentication_mechanism,
|
||||
}
|
||||
|
||||
_check_db_name(conn_settings['name'])
|
||||
conn_host = conn_settings['host']
|
||||
_check_db_name(conn_settings["name"])
|
||||
conn_host = conn_settings["host"]
|
||||
|
||||
# Host can be a list or a string, so if string, force to a list.
|
||||
if isinstance(conn_host, six.string_types):
|
||||
if isinstance(conn_host, str):
|
||||
conn_host = [conn_host]
|
||||
|
||||
resolved_hosts = []
|
||||
for entity in conn_host:
|
||||
|
||||
# Handle Mongomock
|
||||
if entity.startswith('mongomock://'):
|
||||
conn_settings['is_mock'] = True
|
||||
if entity.startswith("mongomock://"):
|
||||
conn_settings["is_mock"] = True
|
||||
# `mongomock://` is not a valid url prefix and must be replaced by `mongodb://`
|
||||
resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1))
|
||||
new_entity = entity.replace("mongomock://", "mongodb://", 1)
|
||||
resolved_hosts.append(new_entity)
|
||||
|
||||
uri_dict = uri_parser.parse_uri(new_entity)
|
||||
|
||||
database = uri_dict.get("database")
|
||||
if database:
|
||||
conn_settings["name"] = database
|
||||
|
||||
# Handle URI style connections, only updating connection params which
|
||||
# were explicitly specified in the URI.
|
||||
elif '://' in entity:
|
||||
elif "://" in entity:
|
||||
uri_dict = uri_parser.parse_uri(entity)
|
||||
resolved_hosts.append(entity)
|
||||
|
||||
if uri_dict.get('database'):
|
||||
conn_settings['name'] = uri_dict.get('database')
|
||||
database = uri_dict.get("database")
|
||||
if database:
|
||||
conn_settings["name"] = database
|
||||
|
||||
for param in ('read_preference', 'username', 'password'):
|
||||
for param in ("read_preference", "username", "password"):
|
||||
if uri_dict.get(param):
|
||||
conn_settings[param] = uri_dict[param]
|
||||
|
||||
uri_options = uri_dict['options']
|
||||
if 'replicaset' in uri_options:
|
||||
conn_settings['replicaSet'] = uri_options['replicaset']
|
||||
if 'authsource' in uri_options:
|
||||
conn_settings['authentication_source'] = uri_options['authsource']
|
||||
if 'authmechanism' in uri_options:
|
||||
conn_settings['authentication_mechanism'] = uri_options['authmechanism']
|
||||
if 'readpreference' in uri_options:
|
||||
uri_options = uri_dict["options"]
|
||||
if "replicaset" in uri_options:
|
||||
conn_settings["replicaSet"] = uri_options["replicaset"]
|
||||
if "authsource" in uri_options:
|
||||
conn_settings["authentication_source"] = uri_options["authsource"]
|
||||
if "authmechanism" in uri_options:
|
||||
conn_settings["authentication_mechanism"] = uri_options["authmechanism"]
|
||||
if "readpreference" in uri_options:
|
||||
read_preferences = (
|
||||
ReadPreference.NEAREST,
|
||||
ReadPreference.PRIMARY,
|
||||
@ -133,40 +146,47 @@ def _get_connection_settings(
|
||||
# int (e.g. 3).
|
||||
# TODO simplify the code below once we drop support for
|
||||
# PyMongo v3.4.
|
||||
read_pf_mode = uri_options['readpreference']
|
||||
if isinstance(read_pf_mode, six.string_types):
|
||||
read_pf_mode = uri_options["readpreference"]
|
||||
if isinstance(read_pf_mode, str):
|
||||
read_pf_mode = read_pf_mode.lower()
|
||||
for preference in read_preferences:
|
||||
if (
|
||||
preference.name.lower() == read_pf_mode or
|
||||
preference.mode == read_pf_mode
|
||||
preference.name.lower() == read_pf_mode
|
||||
or preference.mode == read_pf_mode
|
||||
):
|
||||
conn_settings['read_preference'] = preference
|
||||
conn_settings["read_preference"] = preference
|
||||
break
|
||||
else:
|
||||
resolved_hosts.append(entity)
|
||||
conn_settings['host'] = resolved_hosts
|
||||
conn_settings["host"] = resolved_hosts
|
||||
|
||||
# Deprecated parameters that should not be passed on
|
||||
kwargs.pop('slaves', None)
|
||||
kwargs.pop('is_slave', None)
|
||||
kwargs.pop("slaves", None)
|
||||
kwargs.pop("is_slave", None)
|
||||
|
||||
conn_settings.update(kwargs)
|
||||
return conn_settings
|
||||
|
||||
|
||||
def register_connection(alias, db=None, name=None, host=None, port=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None, password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs):
|
||||
def register_connection(
|
||||
alias,
|
||||
db=None,
|
||||
name=None,
|
||||
host=None,
|
||||
port=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None,
|
||||
password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs
|
||||
):
|
||||
"""Register the connection settings.
|
||||
|
||||
: param alias: the name that will be used to refer to this connection
|
||||
throughout MongoEngine
|
||||
: param name: the name of the specific database to use
|
||||
: param db: the name of the database to use, for compatibility with connect
|
||||
: param name: the name of the specific database to use
|
||||
: param host: the host name of the: program: `mongod` instance to connect to
|
||||
: param port: the port that the: program: `mongod` instance is running on
|
||||
: param read_preference: The read preference for the collection
|
||||
@ -185,12 +205,17 @@ def register_connection(alias, db=None, name=None, host=None, port=None,
|
||||
.. versionchanged:: 0.10.6 - added mongomock support
|
||||
"""
|
||||
conn_settings = _get_connection_settings(
|
||||
db=db, name=name, host=host, port=port,
|
||||
db=db,
|
||||
name=name,
|
||||
host=host,
|
||||
port=port,
|
||||
read_preference=read_preference,
|
||||
username=username, password=password,
|
||||
username=username,
|
||||
password=password,
|
||||
authentication_source=authentication_source,
|
||||
authentication_mechanism=authentication_mechanism,
|
||||
**kwargs)
|
||||
**kwargs
|
||||
)
|
||||
_connection_settings[alias] = conn_settings
|
||||
|
||||
|
||||
@ -206,7 +231,7 @@ def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||
if alias in _dbs:
|
||||
# Detach all cached collections in Documents
|
||||
for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME):
|
||||
if issubclass(doc_cls, Document): # Skip EmbeddedDocument
|
||||
if issubclass(doc_cls, Document): # Skip EmbeddedDocument
|
||||
doc_cls._disconnect()
|
||||
|
||||
del _dbs[alias]
|
||||
@ -234,22 +259,24 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
return _connections[alias]
|
||||
|
||||
# Validate that the requested alias exists in the _connection_settings.
|
||||
# Raise MongoEngineConnectionError if it doesn't.
|
||||
# Raise ConnectionFailure if it doesn't.
|
||||
if alias not in _connection_settings:
|
||||
if alias == DEFAULT_CONNECTION_NAME:
|
||||
msg = 'You have not defined a default connection'
|
||||
msg = "You have not defined a default connection"
|
||||
else:
|
||||
msg = 'Connection with alias "%s" has not been defined' % alias
|
||||
raise MongoEngineConnectionError(msg)
|
||||
raise ConnectionFailure(msg)
|
||||
|
||||
def _clean_settings(settings_dict):
|
||||
irrelevant_fields_set = {
|
||||
'name', 'username', 'password',
|
||||
'authentication_source', 'authentication_mechanism'
|
||||
"name",
|
||||
"username",
|
||||
"password",
|
||||
"authentication_source",
|
||||
"authentication_mechanism",
|
||||
}
|
||||
return {
|
||||
k: v for k, v in settings_dict.items()
|
||||
if k not in irrelevant_fields_set
|
||||
k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set
|
||||
}
|
||||
|
||||
raw_conn_settings = _connection_settings[alias].copy()
|
||||
@ -260,13 +287,12 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
conn_settings = _clean_settings(raw_conn_settings)
|
||||
|
||||
# Determine if we should use PyMongo's or mongomock's MongoClient.
|
||||
is_mock = conn_settings.pop('is_mock', False)
|
||||
is_mock = conn_settings.pop("is_mock", False)
|
||||
if is_mock:
|
||||
try:
|
||||
import mongomock
|
||||
except ImportError:
|
||||
raise RuntimeError('You need mongomock installed to mock '
|
||||
'MongoEngine.')
|
||||
raise RuntimeError("You need mongomock installed to mock MongoEngine.")
|
||||
connection_class = mongomock.MongoClient
|
||||
else:
|
||||
connection_class = MongoClient
|
||||
@ -277,9 +303,7 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
connection = existing_connection
|
||||
else:
|
||||
connection = _create_connection(
|
||||
alias=alias,
|
||||
connection_class=connection_class,
|
||||
**conn_settings
|
||||
alias=alias, connection_class=connection_class, **conn_settings
|
||||
)
|
||||
_connections[alias] = connection
|
||||
return _connections[alias]
|
||||
@ -288,13 +312,12 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
def _create_connection(alias, connection_class, **connection_settings):
|
||||
"""
|
||||
Create the new connection for this alias. Raise
|
||||
MongoEngineConnectionError if it can't be established.
|
||||
ConnectionFailure if it can't be established.
|
||||
"""
|
||||
try:
|
||||
return connection_class(**connection_settings)
|
||||
except Exception as e:
|
||||
raise MongoEngineConnectionError(
|
||||
'Cannot connect to database %s :\n%s' % (alias, e))
|
||||
raise ConnectionFailure("Cannot connect to database {} :\n{}".format(alias, e))
|
||||
|
||||
|
||||
def _find_existing_connection(connection_settings):
|
||||
@ -316,7 +339,7 @@ def _find_existing_connection(connection_settings):
|
||||
# Only remove the name but it's important to
|
||||
# keep the username/password/authentication_source/authentication_mechanism
|
||||
# to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047)
|
||||
return {k: v for k, v in settings_dict.items() if k != 'name'}
|
||||
return {k: v for k, v in settings_dict.items() if k != "name"}
|
||||
|
||||
cleaned_conn_settings = _clean_settings(connection_settings)
|
||||
for db_alias, connection_settings in connection_settings_bis:
|
||||
@ -332,14 +355,18 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
if alias not in _dbs:
|
||||
conn = get_connection(alias)
|
||||
conn_settings = _connection_settings[alias]
|
||||
db = conn[conn_settings['name']]
|
||||
auth_kwargs = {'source': conn_settings['authentication_source']}
|
||||
if conn_settings['authentication_mechanism'] is not None:
|
||||
auth_kwargs['mechanism'] = conn_settings['authentication_mechanism']
|
||||
db = conn[conn_settings["name"]]
|
||||
auth_kwargs = {"source": conn_settings["authentication_source"]}
|
||||
if conn_settings["authentication_mechanism"] is not None:
|
||||
auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"]
|
||||
# Authenticate if necessary
|
||||
if conn_settings['username'] and (conn_settings['password'] or
|
||||
conn_settings['authentication_mechanism'] == 'MONGODB-X509'):
|
||||
db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs)
|
||||
if conn_settings["username"] and (
|
||||
conn_settings["password"]
|
||||
or conn_settings["authentication_mechanism"] == "MONGODB-X509"
|
||||
):
|
||||
db.authenticate(
|
||||
conn_settings["username"], conn_settings["password"], **auth_kwargs
|
||||
)
|
||||
_dbs[alias] = db
|
||||
return _dbs[alias]
|
||||
|
||||
@ -368,10 +395,10 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
|
||||
if new_conn_settings != prev_conn_setting:
|
||||
err_msg = (
|
||||
u'A different connection with alias `{}` was already '
|
||||
u'registered. Use disconnect() first'
|
||||
"A different connection with alias `{}` was already "
|
||||
"registered. Use disconnect() first"
|
||||
).format(alias)
|
||||
raise MongoEngineConnectionError(err_msg)
|
||||
raise ConnectionFailure(err_msg)
|
||||
else:
|
||||
register_connection(alias, db, **kwargs)
|
||||
|
||||
|
@ -1,17 +1,24 @@
|
||||
from contextlib import contextmanager
|
||||
|
||||
from pymongo.read_concern import ReadConcern
|
||||
from pymongo.write_concern import WriteConcern
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
from mongoengine.pymongo_support import count_documents
|
||||
|
||||
__all__ = ('switch_db', 'switch_collection', 'no_dereference',
|
||||
'no_sub_classes', 'query_counter', 'set_write_concern')
|
||||
__all__ = (
|
||||
"switch_db",
|
||||
"switch_collection",
|
||||
"no_dereference",
|
||||
"no_sub_classes",
|
||||
"query_counter",
|
||||
"set_write_concern",
|
||||
"set_read_write_concern",
|
||||
)
|
||||
|
||||
|
||||
class switch_db(object):
|
||||
class switch_db:
|
||||
"""switch_db alias context manager.
|
||||
|
||||
Example ::
|
||||
@ -38,21 +45,21 @@ class switch_db(object):
|
||||
self.cls = cls
|
||||
self.collection = cls._get_collection()
|
||||
self.db_alias = db_alias
|
||||
self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)
|
||||
self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)
|
||||
|
||||
def __enter__(self):
|
||||
"""Change the db_alias and clear the cached collection."""
|
||||
self.cls._meta['db_alias'] = self.db_alias
|
||||
self.cls._meta["db_alias"] = self.db_alias
|
||||
self.cls._collection = None
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
"""Reset the db_alias and collection."""
|
||||
self.cls._meta['db_alias'] = self.ori_db_alias
|
||||
self.cls._meta["db_alias"] = self.ori_db_alias
|
||||
self.cls._collection = self.collection
|
||||
|
||||
|
||||
class switch_collection(object):
|
||||
class switch_collection:
|
||||
"""switch_collection alias context manager.
|
||||
|
||||
Example ::
|
||||
@ -94,7 +101,7 @@ class switch_collection(object):
|
||||
self.cls._get_collection_name = self.ori_get_collection_name
|
||||
|
||||
|
||||
class no_dereference(object):
|
||||
class no_dereference:
|
||||
"""no_dereference context manager.
|
||||
|
||||
Turns off all dereferencing in Documents for the duration of the context
|
||||
@ -111,14 +118,15 @@ class no_dereference(object):
|
||||
"""
|
||||
self.cls = cls
|
||||
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
ComplexBaseField = _import_class('ComplexBaseField')
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
ComplexBaseField = _import_class("ComplexBaseField")
|
||||
|
||||
self.deref_fields = [k for k, v in iteritems(self.cls._fields)
|
||||
if isinstance(v, (ReferenceField,
|
||||
GenericReferenceField,
|
||||
ComplexBaseField))]
|
||||
self.deref_fields = [
|
||||
k
|
||||
for k, v in self.cls._fields.items()
|
||||
if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField))
|
||||
]
|
||||
|
||||
def __enter__(self):
|
||||
"""Change the objects default and _auto_dereference values."""
|
||||
@ -133,7 +141,7 @@ class no_dereference(object):
|
||||
return self.cls
|
||||
|
||||
|
||||
class no_sub_classes(object):
|
||||
class no_sub_classes:
|
||||
"""no_sub_classes context manager.
|
||||
|
||||
Only returns instances of this class and no sub (inherited) classes::
|
||||
@ -161,10 +169,10 @@ class no_sub_classes(object):
|
||||
self.cls._subclasses = self.cls_initial_subclasses
|
||||
|
||||
|
||||
class query_counter(object):
|
||||
class query_counter:
|
||||
"""Query_counter context manager to get the number of queries.
|
||||
This works by updating the `profiling_level` of the database so that all queries get logged,
|
||||
resetting the db.system.profile collection at the beginnig of the context and counting the new entries.
|
||||
resetting the db.system.profile collection at the beginning of the context and counting the new entries.
|
||||
|
||||
This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes
|
||||
can interfere with it
|
||||
@ -175,20 +183,17 @@ class query_counter(object):
|
||||
- Some queries are ignored by default by the counter (killcursors, db.system.indexes)
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, alias=DEFAULT_CONNECTION_NAME):
|
||||
"""Construct the query_counter
|
||||
"""
|
||||
self.db = get_db()
|
||||
self.db = get_db(alias=alias)
|
||||
self.initial_profiling_level = None
|
||||
self._ctx_query_counter = 0 # number of queries issued by the context
|
||||
self._ctx_query_counter = 0 # number of queries issued by the context
|
||||
|
||||
self._ignored_query = {
|
||||
'ns':
|
||||
{'$ne': '%s.system.indexes' % self.db.name},
|
||||
'op': # MONGODB < 3.2
|
||||
{'$ne': 'killcursors'},
|
||||
'command.killCursors': # MONGODB >= 3.2
|
||||
{'$exists': False}
|
||||
"ns": {"$ne": "%s.system.indexes" % self.db.name},
|
||||
"op": {"$ne": "killcursors"}, # MONGODB < 3.2
|
||||
"command.killCursors": {"$exists": False}, # MONGODB >= 3.2
|
||||
}
|
||||
|
||||
def _turn_on_profiling(self):
|
||||
@ -231,15 +236,20 @@ class query_counter(object):
|
||||
|
||||
def __repr__(self):
|
||||
"""repr query_counter as the number of queries."""
|
||||
return u"%s" % self._get_count()
|
||||
return "%s" % self._get_count()
|
||||
|
||||
def _get_count(self):
|
||||
"""Get the number of queries by counting the current number of entries in db.system.profile
|
||||
and substracting the queries issued by this context. In fact everytime this is called, 1 query is
|
||||
issued so we need to balance that
|
||||
"""
|
||||
count = count_documents(self.db.system.profile, self._ignored_query) - self._ctx_query_counter
|
||||
self._ctx_query_counter += 1 # Account for the query we just issued to gather the information
|
||||
count = (
|
||||
count_documents(self.db.system.profile, self._ignored_query)
|
||||
- self._ctx_query_counter
|
||||
)
|
||||
self._ctx_query_counter += (
|
||||
1 # Account for the query we just issued to gather the information
|
||||
)
|
||||
return count
|
||||
|
||||
|
||||
@ -248,3 +258,21 @@ def set_write_concern(collection, write_concerns):
|
||||
combined_concerns = dict(collection.write_concern.document.items())
|
||||
combined_concerns.update(write_concerns)
|
||||
yield collection.with_options(write_concern=WriteConcern(**combined_concerns))
|
||||
|
||||
|
||||
@contextmanager
|
||||
def set_read_write_concern(collection, write_concerns, read_concerns):
|
||||
combined_write_concerns = dict(collection.write_concern.document.items())
|
||||
|
||||
if write_concerns is not None:
|
||||
combined_write_concerns.update(write_concerns)
|
||||
|
||||
combined_read_concerns = dict(collection.read_concern.document.items())
|
||||
|
||||
if read_concerns is not None:
|
||||
combined_read_concerns.update(read_concerns)
|
||||
|
||||
yield collection.with_options(
|
||||
write_concern=WriteConcern(**combined_write_concerns),
|
||||
read_concern=ReadConcern(**combined_read_concerns),
|
||||
)
|
||||
|
@ -1,9 +1,12 @@
|
||||
from bson import DBRef, SON
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass, get_document)
|
||||
from mongoengine.base import (
|
||||
BaseDict,
|
||||
BaseList,
|
||||
EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass,
|
||||
get_document,
|
||||
)
|
||||
from mongoengine.base.datastructures import LazyReference
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.document import Document, EmbeddedDocument
|
||||
@ -11,7 +14,7 @@ from mongoengine.fields import DictField, ListField, MapField, ReferenceField
|
||||
from mongoengine.queryset import QuerySet
|
||||
|
||||
|
||||
class DeReference(object):
|
||||
class DeReference:
|
||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||
"""
|
||||
Cheaply dereferences the items to a set depth.
|
||||
@ -25,7 +28,7 @@ class DeReference(object):
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param get: A boolean determining if being called by __get__
|
||||
"""
|
||||
if items is None or isinstance(items, six.string_types):
|
||||
if items is None or isinstance(items, str):
|
||||
return items
|
||||
|
||||
# cheapest way to convert a queryset to a list
|
||||
@ -36,21 +39,23 @@ class DeReference(object):
|
||||
self.max_depth = max_depth
|
||||
doc_type = None
|
||||
|
||||
if instance and isinstance(instance, (Document, EmbeddedDocument,
|
||||
TopLevelDocumentMetaclass)):
|
||||
if instance and isinstance(
|
||||
instance, (Document, EmbeddedDocument, TopLevelDocumentMetaclass)
|
||||
):
|
||||
doc_type = instance._fields.get(name)
|
||||
while hasattr(doc_type, 'field'):
|
||||
while hasattr(doc_type, "field"):
|
||||
doc_type = doc_type.field
|
||||
|
||||
if isinstance(doc_type, ReferenceField):
|
||||
field = doc_type
|
||||
doc_type = doc_type.document_type
|
||||
is_list = not hasattr(items, 'items')
|
||||
is_list = not hasattr(items, "items")
|
||||
|
||||
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||
return items
|
||||
elif not is_list and all(
|
||||
[i.__class__ == doc_type for i in items.values()]):
|
||||
[i.__class__ == doc_type for i in items.values()]
|
||||
):
|
||||
return items
|
||||
elif not field.dbref:
|
||||
# We must turn the ObjectIds into DBRefs
|
||||
@ -72,7 +77,7 @@ class DeReference(object):
|
||||
|
||||
def _get_items_from_dict(items):
|
||||
new_items = {}
|
||||
for k, v in iteritems(items):
|
||||
for k, v in items.items():
|
||||
value = v
|
||||
if isinstance(v, list):
|
||||
value = _get_items_from_list(v)
|
||||
@ -83,7 +88,7 @@ class DeReference(object):
|
||||
new_items[k] = value
|
||||
return new_items
|
||||
|
||||
if not hasattr(items, 'items'):
|
||||
if not hasattr(items, "items"):
|
||||
items = _get_items_from_list(items)
|
||||
else:
|
||||
items = _get_items_from_dict(items)
|
||||
@ -113,20 +118,26 @@ class DeReference(object):
|
||||
depth += 1
|
||||
for item in iterator:
|
||||
if isinstance(item, (Document, EmbeddedDocument)):
|
||||
for field_name, field in iteritems(item._fields):
|
||||
for field_name, field in item._fields.items():
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, LazyReference):
|
||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
||||
continue
|
||||
elif isinstance(v, DBRef):
|
||||
reference_map.setdefault(field.document_type, set()).add(v.id)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
||||
elif isinstance(v, (dict, SON)) and "_ref" in v:
|
||||
reference_map.setdefault(get_document(v["_cls"]), set()).add(
|
||||
v["_ref"].id
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||
field_cls = getattr(
|
||||
getattr(field, "field", None), "document_type", None
|
||||
)
|
||||
references = self._find_references(v, depth)
|
||||
for key, refs in iteritems(references):
|
||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||
for key, refs in references.items():
|
||||
if isinstance(
|
||||
field_cls, (Document, TopLevelDocumentMetaclass)
|
||||
):
|
||||
key = field_cls
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
elif isinstance(item, LazyReference):
|
||||
@ -134,11 +145,13 @@ class DeReference(object):
|
||||
continue
|
||||
elif isinstance(item, DBRef):
|
||||
reference_map.setdefault(item.collection, set()).add(item.id)
|
||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
|
||||
elif isinstance(item, (dict, SON)) and "_ref" in item:
|
||||
reference_map.setdefault(get_document(item["_cls"]), set()).add(
|
||||
item["_ref"].id
|
||||
)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||
references = self._find_references(item, depth - 1)
|
||||
for key, refs in iteritems(references):
|
||||
for key, refs in references.items():
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
|
||||
return reference_map
|
||||
@ -147,40 +160,44 @@ class DeReference(object):
|
||||
"""Fetch all references and convert to their document objects
|
||||
"""
|
||||
object_map = {}
|
||||
for collection, dbrefs in iteritems(self.reference_map):
|
||||
for collection, dbrefs in self.reference_map.items():
|
||||
|
||||
# we use getattr instead of hasattr because hasattr swallows any exception under python2
|
||||
# so it could hide nasty things without raising exceptions (cfr bug #1688))
|
||||
ref_document_cls_exists = (getattr(collection, 'objects', None) is not None)
|
||||
ref_document_cls_exists = getattr(collection, "objects", None) is not None
|
||||
|
||||
if ref_document_cls_exists:
|
||||
col_name = collection._get_collection_name()
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (col_name, dbref) not in object_map]
|
||||
refs = [
|
||||
dbref for dbref in dbrefs if (col_name, dbref) not in object_map
|
||||
]
|
||||
references = collection.objects.in_bulk(refs)
|
||||
for key, doc in iteritems(references):
|
||||
for key, doc in references.items():
|
||||
object_map[(col_name, key)] = doc
|
||||
else: # Generic reference: use the refs data to convert to document
|
||||
if isinstance(doc_type, (ListField, DictField, MapField)):
|
||||
continue
|
||||
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (collection, dbref) not in object_map]
|
||||
refs = [
|
||||
dbref for dbref in dbrefs if (collection, dbref) not in object_map
|
||||
]
|
||||
|
||||
if doc_type:
|
||||
references = doc_type._get_db()[collection].find({'_id': {'$in': refs}})
|
||||
references = doc_type._get_db()[collection].find(
|
||||
{"_id": {"$in": refs}}
|
||||
)
|
||||
for ref in references:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[(collection, doc.id)] = doc
|
||||
else:
|
||||
references = get_db()[collection].find({'_id': {'$in': refs}})
|
||||
references = get_db()[collection].find({"_id": {"$in": refs}})
|
||||
for ref in references:
|
||||
if '_cls' in ref:
|
||||
doc = get_document(ref['_cls'])._from_son(ref)
|
||||
if "_cls" in ref:
|
||||
doc = get_document(ref["_cls"])._from_son(ref)
|
||||
elif doc_type is None:
|
||||
doc = get_document(
|
||||
''.join(x.capitalize()
|
||||
for x in collection.split('_')))._from_son(ref)
|
||||
"".join(x.capitalize() for x in collection.split("_"))
|
||||
)._from_son(ref)
|
||||
else:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[(collection, doc.id)] = doc
|
||||
@ -208,19 +225,20 @@ class DeReference(object):
|
||||
return BaseList(items, instance, name)
|
||||
|
||||
if isinstance(items, (dict, SON)):
|
||||
if '_ref' in items:
|
||||
if "_ref" in items:
|
||||
return self.object_map.get(
|
||||
(items['_ref'].collection, items['_ref'].id), items)
|
||||
elif '_cls' in items:
|
||||
doc = get_document(items['_cls'])._from_son(items)
|
||||
_cls = doc._data.pop('_cls', None)
|
||||
del items['_cls']
|
||||
(items["_ref"].collection, items["_ref"].id), items
|
||||
)
|
||||
elif "_cls" in items:
|
||||
doc = get_document(items["_cls"])._from_son(items)
|
||||
_cls = doc._data.pop("_cls", None)
|
||||
del items["_cls"]
|
||||
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
||||
if _cls is not None:
|
||||
doc._data['_cls'] = _cls
|
||||
doc._data["_cls"] = _cls
|
||||
return doc
|
||||
|
||||
if not hasattr(items, 'items'):
|
||||
if not hasattr(items, "items"):
|
||||
is_list = True
|
||||
list_type = BaseList
|
||||
if isinstance(items, EmbeddedDocumentList):
|
||||
@ -230,7 +248,7 @@ class DeReference(object):
|
||||
data = []
|
||||
else:
|
||||
is_list = False
|
||||
iterator = iteritems(items)
|
||||
iterator = items.items()
|
||||
data = {}
|
||||
|
||||
depth += 1
|
||||
@ -247,17 +265,23 @@ class DeReference(object):
|
||||
v = data[k]._data.get(field_name, None)
|
||||
if isinstance(v, DBRef):
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v.collection, v.id), v)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
(v.collection, v.id), v
|
||||
)
|
||||
elif isinstance(v, (dict, SON)) and "_ref" in v:
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v['_ref'].collection, v['_ref'].id), v)
|
||||
(v["_ref"].collection, v["_ref"].id), v
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name)
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)
|
||||
item_name = "{}.{}.{}".format(name, k, field_name)
|
||||
data[k]._data[field_name] = self._attach_objects(
|
||||
v, depth, instance=instance, name=item_name
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = '%s.%s' % (name, k) if name else name
|
||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
|
||||
elif isinstance(v, DBRef) and hasattr(v, 'id'):
|
||||
item_name = "{}.{}".format(name, k) if name else name
|
||||
data[k] = self._attach_objects(
|
||||
v, depth - 1, instance=instance, name=item_name
|
||||
)
|
||||
elif isinstance(v, DBRef) and hasattr(v, "id"):
|
||||
data[k] = self.object_map.get((v.collection, v.id), v)
|
||||
|
||||
if instance and name:
|
||||
|
@ -4,46 +4,57 @@ import warnings
|
||||
from bson.dbref import DBRef
|
||||
import pymongo
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.base import (BaseDict, BaseDocument, BaseList,
|
||||
DocumentMetaclass, EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass, get_document)
|
||||
from mongoengine.base import (
|
||||
BaseDict,
|
||||
BaseDocument,
|
||||
BaseList,
|
||||
DocumentMetaclass,
|
||||
EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass,
|
||||
get_document,
|
||||
)
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
from mongoengine.context_managers import (set_write_concern,
|
||||
switch_collection,
|
||||
switch_db)
|
||||
from mongoengine.errors import (InvalidDocumentError, InvalidQueryError,
|
||||
SaveConditionError)
|
||||
from mongoengine.context_managers import set_write_concern, switch_collection, switch_db
|
||||
from mongoengine.errors import (
|
||||
InvalidDocumentError,
|
||||
InvalidQueryError,
|
||||
SaveConditionError,
|
||||
)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from mongoengine.queryset import (NotUniqueError, OperationError,
|
||||
QuerySet, transform)
|
||||
from mongoengine.queryset import NotUniqueError, OperationError, QuerySet, transform
|
||||
|
||||
__all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument',
|
||||
'DynamicEmbeddedDocument', 'OperationError',
|
||||
'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument')
|
||||
__all__ = (
|
||||
"Document",
|
||||
"EmbeddedDocument",
|
||||
"DynamicDocument",
|
||||
"DynamicEmbeddedDocument",
|
||||
"OperationError",
|
||||
"InvalidCollectionError",
|
||||
"NotUniqueError",
|
||||
"MapReduceDocument",
|
||||
)
|
||||
|
||||
|
||||
def includes_cls(fields):
|
||||
"""Helper function used for ensuring and comparing indexes."""
|
||||
first_field = None
|
||||
if len(fields):
|
||||
if isinstance(fields[0], six.string_types):
|
||||
if isinstance(fields[0], str):
|
||||
first_field = fields[0]
|
||||
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||
first_field = fields[0][0]
|
||||
return first_field == '_cls'
|
||||
return first_field == "_cls"
|
||||
|
||||
|
||||
class InvalidCollectionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||
"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||
class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass):
|
||||
r"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
||||
fields on :class:`~mongoengine.Document`\ s through the
|
||||
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
||||
@ -56,9 +67,8 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||
:attr:`meta` dictionary.
|
||||
"""
|
||||
|
||||
__slots__ = ('_instance', )
|
||||
__slots__ = ("_instance",)
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = DocumentMetaclass
|
||||
|
||||
@ -69,7 +79,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||
__hash__ = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(EmbeddedDocument, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self._instance = None
|
||||
self._changed_fields = []
|
||||
|
||||
@ -82,16 +92,16 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def to_mongo(self, *args, **kwargs):
|
||||
data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs)
|
||||
data = super().to_mongo(*args, **kwargs)
|
||||
|
||||
# remove _id from the SON if it's in it and it's None
|
||||
if '_id' in data and data['_id'] is None:
|
||||
del data['_id']
|
||||
if "_id" in data and data["_id"] is None:
|
||||
del data["_id"]
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
||||
"""The base class used for defining the structure and properties of
|
||||
collections of documents stored in MongoDB. Inherit from this class, and
|
||||
add fields as class attributes to define a document's structure.
|
||||
@ -143,23 +153,22 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
in the :attr:`meta` dictionary.
|
||||
"""
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
|
||||
__slots__ = ('__objects',)
|
||||
__slots__ = ("__objects",)
|
||||
|
||||
@property
|
||||
def pk(self):
|
||||
"""Get the primary key."""
|
||||
if 'id_field' not in self._meta:
|
||||
if "id_field" not in self._meta:
|
||||
return None
|
||||
return getattr(self, self._meta['id_field'])
|
||||
return getattr(self, self._meta["id_field"])
|
||||
|
||||
@pk.setter
|
||||
def pk(self, value):
|
||||
"""Set the primary key."""
|
||||
return setattr(self, self._meta['id_field'], value)
|
||||
return setattr(self, self._meta["id_field"], value)
|
||||
|
||||
def __hash__(self):
|
||||
"""Return the hash based on the PK of this document. If it's new
|
||||
@ -173,7 +182,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
@classmethod
|
||||
def _get_db(cls):
|
||||
"""Some Model using other db_alias"""
|
||||
return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME))
|
||||
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME))
|
||||
|
||||
@classmethod
|
||||
def _disconnect(cls):
|
||||
@ -190,9 +199,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
2. Creates indexes defined in this document's :attr:`meta` dictionary.
|
||||
This happens only if `auto_create_index` is True.
|
||||
"""
|
||||
if not hasattr(cls, '_collection') or cls._collection is None:
|
||||
if not hasattr(cls, "_collection") or cls._collection is None:
|
||||
# Get the collection, either capped or regular.
|
||||
if cls._meta.get('max_size') or cls._meta.get('max_documents'):
|
||||
if cls._meta.get("max_size") or cls._meta.get("max_documents"):
|
||||
cls._collection = cls._get_capped_collection()
|
||||
else:
|
||||
db = cls._get_db()
|
||||
@ -203,8 +212,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
# set to False.
|
||||
# Also there is no need to ensure indexes on slave.
|
||||
db = cls._get_db()
|
||||
if cls._meta.get('auto_create_index', True) and\
|
||||
db.client.is_primary:
|
||||
if cls._meta.get("auto_create_index", True) and db.client.is_primary:
|
||||
cls.ensure_indexes()
|
||||
|
||||
return cls._collection
|
||||
@ -216,8 +224,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
collection_name = cls._get_collection_name()
|
||||
|
||||
# Get max document limit and max byte size from meta.
|
||||
max_size = cls._meta.get('max_size') or 10 * 2 ** 20 # 10MB default
|
||||
max_documents = cls._meta.get('max_documents')
|
||||
max_size = cls._meta.get("max_size") or 10 * 2 ** 20 # 10MB default
|
||||
max_documents = cls._meta.get("max_documents")
|
||||
|
||||
# MongoDB will automatically raise the size to make it a multiple of
|
||||
# 256 bytes. We raise it here ourselves to be able to reliably compare
|
||||
@ -227,37 +235,36 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
# If the collection already exists and has different options
|
||||
# (i.e. isn't capped or has different max/size), raise an error.
|
||||
if collection_name in list_collection_names(db, include_system_collections=True):
|
||||
if collection_name in list_collection_names(
|
||||
db, include_system_collections=True
|
||||
):
|
||||
collection = db[collection_name]
|
||||
options = collection.options()
|
||||
if (
|
||||
options.get('max') != max_documents or
|
||||
options.get('size') != max_size
|
||||
):
|
||||
if options.get("max") != max_documents or options.get("size") != max_size:
|
||||
raise InvalidCollectionError(
|
||||
'Cannot create collection "{}" as a capped '
|
||||
'collection as it already exists'.format(cls._collection)
|
||||
"collection as it already exists".format(cls._collection)
|
||||
)
|
||||
|
||||
return collection
|
||||
|
||||
# Create a new capped collection.
|
||||
opts = {'capped': True, 'size': max_size}
|
||||
opts = {"capped": True, "size": max_size}
|
||||
if max_documents:
|
||||
opts['max'] = max_documents
|
||||
opts["max"] = max_documents
|
||||
|
||||
return db.create_collection(collection_name, **opts)
|
||||
|
||||
def to_mongo(self, *args, **kwargs):
|
||||
data = super(Document, self).to_mongo(*args, **kwargs)
|
||||
data = super().to_mongo(*args, **kwargs)
|
||||
|
||||
# If '_id' is None, try and set it from self._data. If that
|
||||
# doesn't exist either, remove '_id' from the SON completely.
|
||||
if data['_id'] is None:
|
||||
if self._data.get('id') is None:
|
||||
del data['_id']
|
||||
if data["_id"] is None:
|
||||
if self._data.get("id") is None:
|
||||
del data["_id"]
|
||||
else:
|
||||
data['_id'] = self._data['id']
|
||||
data["_id"] = self._data["id"]
|
||||
|
||||
return data
|
||||
|
||||
@ -279,15 +286,17 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
query = {}
|
||||
|
||||
if self.pk is None:
|
||||
raise InvalidDocumentError('The document does not have a primary key.')
|
||||
raise InvalidDocumentError("The document does not have a primary key.")
|
||||
|
||||
id_field = self._meta['id_field']
|
||||
id_field = self._meta["id_field"]
|
||||
query = query.copy() if isinstance(query, dict) else query.to_query(self)
|
||||
|
||||
if id_field not in query:
|
||||
query[id_field] = self.pk
|
||||
elif query[id_field] != self.pk:
|
||||
raise InvalidQueryError('Invalid document modify query: it must modify only this document.')
|
||||
raise InvalidQueryError(
|
||||
"Invalid document modify query: it must modify only this document."
|
||||
)
|
||||
|
||||
# Need to add shard key to query, or you get an error
|
||||
query.update(self._object_key)
|
||||
@ -304,12 +313,22 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
return True
|
||||
|
||||
def save(self, force_insert=False, validate=True, clean=True,
|
||||
write_concern=None, cascade=None, cascade_kwargs=None,
|
||||
_refs=None, save_condition=None, signal_kwargs=None, **kwargs):
|
||||
def save(
|
||||
self,
|
||||
force_insert=False,
|
||||
validate=True,
|
||||
clean=True,
|
||||
write_concern=None,
|
||||
cascade=None,
|
||||
cascade_kwargs=None,
|
||||
_refs=None,
|
||||
save_condition=None,
|
||||
signal_kwargs=None,
|
||||
**kwargs
|
||||
):
|
||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||
document already exists, it will be updated, otherwise it will be
|
||||
created.
|
||||
created. Returns the saved object instance.
|
||||
|
||||
:param force_insert: only try to create a new document, don't allow
|
||||
updates of existing documents.
|
||||
@ -360,8 +379,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
"""
|
||||
signal_kwargs = signal_kwargs or {}
|
||||
|
||||
if self._meta.get('abstract'):
|
||||
raise InvalidDocumentError('Cannot save an abstract document.')
|
||||
if self._meta.get("abstract"):
|
||||
raise InvalidDocumentError("Cannot save an abstract document.")
|
||||
|
||||
signals.pre_save.send(self.__class__, document=self, **signal_kwargs)
|
||||
|
||||
@ -371,15 +390,16 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
if write_concern is None:
|
||||
write_concern = {}
|
||||
|
||||
doc_id = self.to_mongo(fields=[self._meta['id_field']])
|
||||
created = ('_id' not in doc_id or self._created or force_insert)
|
||||
doc_id = self.to_mongo(fields=[self._meta["id_field"]])
|
||||
created = "_id" not in doc_id or self._created or force_insert
|
||||
|
||||
signals.pre_save_post_validation.send(self.__class__, document=self,
|
||||
created=created, **signal_kwargs)
|
||||
signals.pre_save_post_validation.send(
|
||||
self.__class__, document=self, created=created, **signal_kwargs
|
||||
)
|
||||
# it might be refreshed by the pre_save_post_validation hook, e.g., for etag generation
|
||||
doc = self.to_mongo()
|
||||
|
||||
if self._meta.get('auto_create_index', True):
|
||||
if self._meta.get("auto_create_index", True):
|
||||
self.ensure_indexes()
|
||||
|
||||
try:
|
||||
@ -387,44 +407,45 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
if created:
|
||||
object_id = self._save_create(doc, force_insert, write_concern)
|
||||
else:
|
||||
object_id, created = self._save_update(doc, save_condition,
|
||||
write_concern)
|
||||
object_id, created = self._save_update(
|
||||
doc, save_condition, write_concern
|
||||
)
|
||||
|
||||
if cascade is None:
|
||||
cascade = (self._meta.get('cascade', False) or
|
||||
cascade_kwargs is not None)
|
||||
cascade = self._meta.get("cascade", False) or cascade_kwargs is not None
|
||||
|
||||
if cascade:
|
||||
kwargs = {
|
||||
'force_insert': force_insert,
|
||||
'validate': validate,
|
||||
'write_concern': write_concern,
|
||||
'cascade': cascade
|
||||
"force_insert": force_insert,
|
||||
"validate": validate,
|
||||
"write_concern": write_concern,
|
||||
"cascade": cascade,
|
||||
}
|
||||
if cascade_kwargs: # Allow granular control over cascades
|
||||
kwargs.update(cascade_kwargs)
|
||||
kwargs['_refs'] = _refs
|
||||
kwargs["_refs"] = _refs
|
||||
self.cascade_save(**kwargs)
|
||||
|
||||
except pymongo.errors.DuplicateKeyError as err:
|
||||
message = u'Tried to save duplicate unique keys (%s)'
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
message = "Tried to save duplicate unique keys (%s)"
|
||||
raise NotUniqueError(message % err)
|
||||
except pymongo.errors.OperationFailure as err:
|
||||
message = 'Could not save document (%s)'
|
||||
if re.match('^E1100[01] duplicate key', six.text_type(err)):
|
||||
message = "Could not save document (%s)"
|
||||
if re.match("^E1100[01] duplicate key", str(err)):
|
||||
# E11000 - duplicate key error index
|
||||
# E11001 - duplicate key on update
|
||||
message = u'Tried to save duplicate unique keys (%s)'
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
raise OperationError(message % six.text_type(err))
|
||||
message = "Tried to save duplicate unique keys (%s)"
|
||||
raise NotUniqueError(message % err)
|
||||
raise OperationError(message % err)
|
||||
|
||||
# Make sure we store the PK on this document now that it's saved
|
||||
id_field = self._meta['id_field']
|
||||
if created or id_field not in self._meta.get('shard_key', []):
|
||||
id_field = self._meta["id_field"]
|
||||
if created or id_field not in self._meta.get("shard_key", []):
|
||||
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||
|
||||
signals.post_save.send(self.__class__, document=self,
|
||||
created=created, **signal_kwargs)
|
||||
signals.post_save.send(
|
||||
self.__class__, document=self, created=created, **signal_kwargs
|
||||
)
|
||||
|
||||
self._clear_changed_fields()
|
||||
self._created = False
|
||||
@ -442,11 +463,12 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
return wc_collection.insert_one(doc).inserted_id
|
||||
# insert_one will provoke UniqueError alongside save does not
|
||||
# therefore, it need to catch and call replace_one.
|
||||
if '_id' in doc:
|
||||
if "_id" in doc:
|
||||
raw_object = wc_collection.find_one_and_replace(
|
||||
{'_id': doc['_id']}, doc)
|
||||
{"_id": doc["_id"]}, doc
|
||||
)
|
||||
if raw_object:
|
||||
return doc['_id']
|
||||
return doc["_id"]
|
||||
|
||||
object_id = wc_collection.insert_one(doc).inserted_id
|
||||
|
||||
@ -461,9 +483,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
update_doc = {}
|
||||
if updates:
|
||||
update_doc['$set'] = updates
|
||||
update_doc["$set"] = updates
|
||||
if removals:
|
||||
update_doc['$unset'] = removals
|
||||
update_doc["$unset"] = removals
|
||||
|
||||
return update_doc
|
||||
|
||||
@ -473,39 +495,38 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
Helper method, should only be used inside save().
|
||||
"""
|
||||
collection = self._get_collection()
|
||||
object_id = doc['_id']
|
||||
object_id = doc["_id"]
|
||||
created = False
|
||||
|
||||
select_dict = {}
|
||||
if save_condition is not None:
|
||||
select_dict = transform.query(self.__class__, **save_condition)
|
||||
|
||||
select_dict['_id'] = object_id
|
||||
select_dict["_id"] = object_id
|
||||
|
||||
# Need to add shard key to query, or you get an error
|
||||
shard_key = self._meta.get('shard_key', tuple())
|
||||
shard_key = self._meta.get("shard_key", tuple())
|
||||
for k in shard_key:
|
||||
path = self._lookup_field(k.split('.'))
|
||||
path = self._lookup_field(k.split("."))
|
||||
actual_key = [p.db_field for p in path]
|
||||
val = doc
|
||||
for ak in actual_key:
|
||||
val = val[ak]
|
||||
select_dict['.'.join(actual_key)] = val
|
||||
select_dict[".".join(actual_key)] = val
|
||||
|
||||
update_doc = self._get_update_doc()
|
||||
if update_doc:
|
||||
upsert = save_condition is None
|
||||
with set_write_concern(collection, write_concern) as wc_collection:
|
||||
last_error = wc_collection.update_one(
|
||||
select_dict,
|
||||
update_doc,
|
||||
upsert=upsert
|
||||
select_dict, update_doc, upsert=upsert
|
||||
).raw_result
|
||||
if not upsert and last_error['n'] == 0:
|
||||
raise SaveConditionError('Race condition preventing'
|
||||
' document update detected')
|
||||
if not upsert and last_error["n"] == 0:
|
||||
raise SaveConditionError(
|
||||
"Race condition preventing document update detected"
|
||||
)
|
||||
if last_error is not None:
|
||||
updated_existing = last_error.get('updatedExisting')
|
||||
updated_existing = last_error.get("updatedExisting")
|
||||
if updated_existing is False:
|
||||
created = True
|
||||
# !!! This is bad, means we accidentally created a new,
|
||||
@ -518,24 +539,23 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
"""Recursively save any references and generic references on the
|
||||
document.
|
||||
"""
|
||||
_refs = kwargs.get('_refs') or []
|
||||
_refs = kwargs.get("_refs") or []
|
||||
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
|
||||
for name, cls in self._fields.items():
|
||||
if not isinstance(cls, (ReferenceField,
|
||||
GenericReferenceField)):
|
||||
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
|
||||
continue
|
||||
|
||||
ref = self._data.get(name)
|
||||
if not ref or isinstance(ref, DBRef):
|
||||
continue
|
||||
|
||||
if not getattr(ref, '_changed_fields', True):
|
||||
if not getattr(ref, "_changed_fields", True):
|
||||
continue
|
||||
|
||||
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
||||
ref_id = "{},{}".format(ref.__class__.__name__, str(ref._data))
|
||||
if ref and ref_id not in _refs:
|
||||
_refs.append(ref_id)
|
||||
kwargs["_refs"] = _refs
|
||||
@ -544,27 +564,31 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
@property
|
||||
def _qs(self):
|
||||
"""Return the queryset to use for updating / reloading / deletions."""
|
||||
if not hasattr(self, '__objects'):
|
||||
"""Return the default queryset corresponding to this document."""
|
||||
if not hasattr(self, "__objects"):
|
||||
self.__objects = QuerySet(self, self._get_collection())
|
||||
return self.__objects
|
||||
|
||||
@property
|
||||
def _object_key(self):
|
||||
"""Get the query dict that can be used to fetch this object from
|
||||
the database. Most of the time it's a simple PK lookup, but in
|
||||
case of a sharded collection with a compound shard key, it can
|
||||
contain a more complex query.
|
||||
"""Return a query dict that can be used to fetch this document.
|
||||
|
||||
Most of the time the dict is a simple PK lookup, but in case of
|
||||
a sharded collection with a compound shard key, it can contain a more
|
||||
complex query.
|
||||
|
||||
Note that the dict returned by this method uses MongoEngine field
|
||||
names instead of PyMongo field names (e.g. "pk" instead of "_id",
|
||||
"some__nested__field" instead of "some.nested.field", etc.).
|
||||
"""
|
||||
select_dict = {'pk': self.pk}
|
||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||
select_dict = {"pk": self.pk}
|
||||
shard_key = self.__class__._meta.get("shard_key", tuple())
|
||||
for k in shard_key:
|
||||
path = self._lookup_field(k.split('.'))
|
||||
actual_key = [p.db_field for p in path]
|
||||
val = self
|
||||
for ak in actual_key:
|
||||
val = getattr(val, ak)
|
||||
select_dict['__'.join(actual_key)] = val
|
||||
field_parts = k.split(".")
|
||||
for part in field_parts:
|
||||
val = getattr(val, part)
|
||||
select_dict["__".join(field_parts)] = val
|
||||
return select_dict
|
||||
|
||||
def update(self, **kwargs):
|
||||
@ -575,14 +599,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
been saved.
|
||||
"""
|
||||
if self.pk is None:
|
||||
if kwargs.get('upsert', False):
|
||||
if kwargs.get("upsert", False):
|
||||
query = self.to_mongo()
|
||||
if '_cls' in query:
|
||||
del query['_cls']
|
||||
if "_cls" in query:
|
||||
del query["_cls"]
|
||||
return self._qs.filter(**query).update_one(**kwargs)
|
||||
else:
|
||||
raise OperationError(
|
||||
'attempt to update a document not yet saved')
|
||||
raise OperationError("attempt to update a document not yet saved")
|
||||
|
||||
# Need to add shard key to query, or you get an error
|
||||
return self._qs.filter(**self._object_key).update_one(**kwargs)
|
||||
@ -606,16 +629,17 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
signals.pre_delete.send(self.__class__, document=self, **signal_kwargs)
|
||||
|
||||
# Delete FileFields separately
|
||||
FileField = _import_class('FileField')
|
||||
for name, field in iteritems(self._fields):
|
||||
FileField = _import_class("FileField")
|
||||
for name, field in self._fields.items():
|
||||
if isinstance(field, FileField):
|
||||
getattr(self, name).delete()
|
||||
|
||||
try:
|
||||
self._qs.filter(
|
||||
**self._object_key).delete(write_concern=write_concern, _from_doc_delete=True)
|
||||
self._qs.filter(**self._object_key).delete(
|
||||
write_concern=write_concern, _from_doc_delete=True
|
||||
)
|
||||
except pymongo.errors.OperationFailure as err:
|
||||
message = u'Could not delete document (%s)' % err.message
|
||||
message = "Could not delete document (%s)" % err.args
|
||||
raise OperationError(message)
|
||||
signals.post_delete.send(self.__class__, document=self, **signal_kwargs)
|
||||
|
||||
@ -684,7 +708,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
DeReference = _import_class('DeReference')
|
||||
DeReference = _import_class("DeReference")
|
||||
DeReference()([self], max_depth + 1)
|
||||
return self
|
||||
|
||||
@ -702,20 +726,24 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
if fields and isinstance(fields[0], int):
|
||||
max_depth = fields[0]
|
||||
fields = fields[1:]
|
||||
elif 'max_depth' in kwargs:
|
||||
max_depth = kwargs['max_depth']
|
||||
elif "max_depth" in kwargs:
|
||||
max_depth = kwargs["max_depth"]
|
||||
|
||||
if self.pk is None:
|
||||
raise self.DoesNotExist('Document does not exist')
|
||||
raise self.DoesNotExist("Document does not exist")
|
||||
|
||||
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
|
||||
**self._object_key).only(*fields).limit(
|
||||
1).select_related(max_depth=max_depth)
|
||||
obj = (
|
||||
self._qs.read_preference(ReadPreference.PRIMARY)
|
||||
.filter(**self._object_key)
|
||||
.only(*fields)
|
||||
.limit(1)
|
||||
.select_related(max_depth=max_depth)
|
||||
)
|
||||
|
||||
if obj:
|
||||
obj = obj[0]
|
||||
else:
|
||||
raise self.DoesNotExist('Document does not exist')
|
||||
raise self.DoesNotExist("Document does not exist")
|
||||
for field in obj._data:
|
||||
if not fields or field in fields:
|
||||
try:
|
||||
@ -731,9 +759,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
# i.e. obj.update(unset__field=1) followed by obj.reload()
|
||||
delattr(self, field)
|
||||
|
||||
self._changed_fields = list(
|
||||
set(self._changed_fields) - set(fields)
|
||||
) if fields else obj._changed_fields
|
||||
self._changed_fields = (
|
||||
list(set(self._changed_fields) - set(fields))
|
||||
if fields
|
||||
else obj._changed_fields
|
||||
)
|
||||
self._created = False
|
||||
return self
|
||||
|
||||
@ -759,7 +789,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
"""Returns an instance of :class:`~bson.dbref.DBRef` useful in
|
||||
`__raw__` queries."""
|
||||
if self.pk is None:
|
||||
msg = 'Only saved documents can have a valid dbref'
|
||||
msg = "Only saved documents can have a valid dbref"
|
||||
raise OperationError(msg)
|
||||
return DBRef(self.__class__._get_collection_name(), self.pk)
|
||||
|
||||
@ -768,18 +798,22 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
"""This method registers the delete rules to apply when removing this
|
||||
object.
|
||||
"""
|
||||
classes = [get_document(class_name)
|
||||
for class_name in cls._subclasses
|
||||
if class_name != cls.__name__] + [cls]
|
||||
documents = [get_document(class_name)
|
||||
for class_name in document_cls._subclasses
|
||||
if class_name != document_cls.__name__] + [document_cls]
|
||||
classes = [
|
||||
get_document(class_name)
|
||||
for class_name in cls._subclasses
|
||||
if class_name != cls.__name__
|
||||
] + [cls]
|
||||
documents = [
|
||||
get_document(class_name)
|
||||
for class_name in document_cls._subclasses
|
||||
if class_name != document_cls.__name__
|
||||
] + [document_cls]
|
||||
|
||||
for klass in classes:
|
||||
for document_cls in documents:
|
||||
delete_rules = klass._meta.get('delete_rules') or {}
|
||||
delete_rules = klass._meta.get("delete_rules") or {}
|
||||
delete_rules[(document_cls, field_name)] = rule
|
||||
klass._meta['delete_rules'] = delete_rules
|
||||
klass._meta["delete_rules"] = delete_rules
|
||||
|
||||
@classmethod
|
||||
def drop_collection(cls):
|
||||
@ -794,8 +828,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
"""
|
||||
coll_name = cls._get_collection_name()
|
||||
if not coll_name:
|
||||
raise OperationError('Document %s has no collection defined '
|
||||
'(is it abstract ?)' % cls)
|
||||
raise OperationError(
|
||||
"Document %s has no collection defined (is it abstract ?)" % cls
|
||||
)
|
||||
cls._collection = None
|
||||
db = cls._get_db()
|
||||
db.drop_collection(coll_name)
|
||||
@ -811,19 +846,14 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
"""
|
||||
index_spec = cls._build_index_spec(keys)
|
||||
index_spec = index_spec.copy()
|
||||
fields = index_spec.pop('fields')
|
||||
drop_dups = kwargs.get('drop_dups', False)
|
||||
if drop_dups:
|
||||
msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
index_spec['background'] = background
|
||||
fields = index_spec.pop("fields")
|
||||
index_spec["background"] = background
|
||||
index_spec.update(kwargs)
|
||||
|
||||
return cls._get_collection().create_index(fields, **index_spec)
|
||||
|
||||
@classmethod
|
||||
def ensure_index(cls, key_or_list, drop_dups=False, background=False,
|
||||
**kwargs):
|
||||
def ensure_index(cls, key_or_list, background=False, **kwargs):
|
||||
"""Ensure that the given indexes are in place. Deprecated in favour
|
||||
of create_index.
|
||||
|
||||
@ -831,12 +861,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
construct a multi-field index); keys may be prefixed with a **+**
|
||||
or a **-** to determine the index ordering
|
||||
:param background: Allows index creation in the background
|
||||
:param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value
|
||||
will be removed if PyMongo3+ is used
|
||||
"""
|
||||
if drop_dups:
|
||||
msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
return cls.create_index(key_or_list, background=background, **kwargs)
|
||||
|
||||
@classmethod
|
||||
@ -848,13 +873,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
.. note:: You can disable automatic index creation by setting
|
||||
`auto_create_index` to False in the documents meta data
|
||||
"""
|
||||
background = cls._meta.get('index_background', False)
|
||||
drop_dups = cls._meta.get('index_drop_dups', False)
|
||||
index_opts = cls._meta.get('index_opts') or {}
|
||||
index_cls = cls._meta.get('index_cls', True)
|
||||
if drop_dups:
|
||||
msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
background = cls._meta.get("index_background", False)
|
||||
index_opts = cls._meta.get("index_opts") or {}
|
||||
index_cls = cls._meta.get("index_cls", True)
|
||||
|
||||
collection = cls._get_collection()
|
||||
# 746: when connection is via mongos, the read preference is not necessarily an indication that
|
||||
@ -869,40 +890,39 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
cls_indexed = False
|
||||
|
||||
# Ensure document-defined indexes are created
|
||||
if cls._meta['index_specs']:
|
||||
index_spec = cls._meta['index_specs']
|
||||
if cls._meta["index_specs"]:
|
||||
index_spec = cls._meta["index_specs"]
|
||||
for spec in index_spec:
|
||||
spec = spec.copy()
|
||||
fields = spec.pop('fields')
|
||||
fields = spec.pop("fields")
|
||||
cls_indexed = cls_indexed or includes_cls(fields)
|
||||
opts = index_opts.copy()
|
||||
opts.update(spec)
|
||||
|
||||
# we shouldn't pass 'cls' to the collection.ensureIndex options
|
||||
# because of https://jira.mongodb.org/browse/SERVER-769
|
||||
if 'cls' in opts:
|
||||
del opts['cls']
|
||||
if "cls" in opts:
|
||||
del opts["cls"]
|
||||
|
||||
collection.create_index(fields, background=background, **opts)
|
||||
|
||||
# If _cls is being used (for polymorphism), it needs an index,
|
||||
# only if another index doesn't begin with _cls
|
||||
if index_cls and not cls_indexed and cls._meta.get('allow_inheritance'):
|
||||
if index_cls and not cls_indexed and cls._meta.get("allow_inheritance"):
|
||||
|
||||
# we shouldn't pass 'cls' to the collection.ensureIndex options
|
||||
# because of https://jira.mongodb.org/browse/SERVER-769
|
||||
if 'cls' in index_opts:
|
||||
del index_opts['cls']
|
||||
if "cls" in index_opts:
|
||||
del index_opts["cls"]
|
||||
|
||||
collection.create_index('_cls', background=background,
|
||||
**index_opts)
|
||||
collection.create_index("_cls", background=background, **index_opts)
|
||||
|
||||
@classmethod
|
||||
def list_indexes(cls):
|
||||
""" Lists all of the indexes that should be created for given
|
||||
collection. It includes all the indexes from super- and sub-classes.
|
||||
"""
|
||||
if cls._meta.get('abstract'):
|
||||
if cls._meta.get("abstract"):
|
||||
return []
|
||||
|
||||
# get all the base classes, subclasses and siblings
|
||||
@ -910,22 +930,27 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
def get_classes(cls):
|
||||
|
||||
if (cls not in classes and
|
||||
isinstance(cls, TopLevelDocumentMetaclass)):
|
||||
if cls not in classes and isinstance(cls, TopLevelDocumentMetaclass):
|
||||
classes.append(cls)
|
||||
|
||||
for base_cls in cls.__bases__:
|
||||
if (isinstance(base_cls, TopLevelDocumentMetaclass) and
|
||||
base_cls != Document and
|
||||
not base_cls._meta.get('abstract') and
|
||||
base_cls._get_collection().full_name == cls._get_collection().full_name and
|
||||
base_cls not in classes):
|
||||
if (
|
||||
isinstance(base_cls, TopLevelDocumentMetaclass)
|
||||
and base_cls != Document
|
||||
and not base_cls._meta.get("abstract")
|
||||
and base_cls._get_collection().full_name
|
||||
== cls._get_collection().full_name
|
||||
and base_cls not in classes
|
||||
):
|
||||
classes.append(base_cls)
|
||||
get_classes(base_cls)
|
||||
for subclass in cls.__subclasses__():
|
||||
if (isinstance(base_cls, TopLevelDocumentMetaclass) and
|
||||
subclass._get_collection().full_name == cls._get_collection().full_name and
|
||||
subclass not in classes):
|
||||
if (
|
||||
isinstance(base_cls, TopLevelDocumentMetaclass)
|
||||
and subclass._get_collection().full_name
|
||||
== cls._get_collection().full_name
|
||||
and subclass not in classes
|
||||
):
|
||||
classes.append(subclass)
|
||||
get_classes(subclass)
|
||||
|
||||
@ -935,11 +960,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
def get_indexes_spec(cls):
|
||||
indexes = []
|
||||
|
||||
if cls._meta['index_specs']:
|
||||
index_spec = cls._meta['index_specs']
|
||||
if cls._meta["index_specs"]:
|
||||
index_spec = cls._meta["index_specs"]
|
||||
for spec in index_spec:
|
||||
spec = spec.copy()
|
||||
fields = spec.pop('fields')
|
||||
fields = spec.pop("fields")
|
||||
indexes.append(fields)
|
||||
return indexes
|
||||
|
||||
@ -950,10 +975,10 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
indexes.append(index)
|
||||
|
||||
# finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed
|
||||
if [(u'_id', 1)] not in indexes:
|
||||
indexes.append([(u'_id', 1)])
|
||||
if cls._meta.get('index_cls', True) and cls._meta.get('allow_inheritance'):
|
||||
indexes.append([(u'_cls', 1)])
|
||||
if [("_id", 1)] not in indexes:
|
||||
indexes.append([("_id", 1)])
|
||||
if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"):
|
||||
indexes.append([("_cls", 1)])
|
||||
|
||||
return indexes
|
||||
|
||||
@ -967,30 +992,29 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
existing = []
|
||||
for info in cls._get_collection().index_information().values():
|
||||
if '_fts' in info['key'][0]:
|
||||
index_type = info['key'][0][1]
|
||||
text_index_fields = info.get('weights').keys()
|
||||
existing.append(
|
||||
[(key, index_type) for key in text_index_fields])
|
||||
if "_fts" in info["key"][0]:
|
||||
index_type = info["key"][0][1]
|
||||
text_index_fields = info.get("weights").keys()
|
||||
existing.append([(key, index_type) for key in text_index_fields])
|
||||
else:
|
||||
existing.append(info['key'])
|
||||
existing.append(info["key"])
|
||||
missing = [index for index in required if index not in existing]
|
||||
extra = [index for index in existing if index not in required]
|
||||
|
||||
# if { _cls: 1 } is missing, make sure it's *really* necessary
|
||||
if [(u'_cls', 1)] in missing:
|
||||
if [("_cls", 1)] in missing:
|
||||
cls_obsolete = False
|
||||
for index in existing:
|
||||
if includes_cls(index) and index not in extra:
|
||||
cls_obsolete = True
|
||||
break
|
||||
if cls_obsolete:
|
||||
missing.remove([(u'_cls', 1)])
|
||||
missing.remove([("_cls", 1)])
|
||||
|
||||
return {'missing': missing, 'extra': extra}
|
||||
return {"missing": missing, "extra": extra}
|
||||
|
||||
|
||||
class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)):
|
||||
class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass):
|
||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||
way as an ordinary document but has expanded style properties. Any data
|
||||
@ -1004,7 +1028,6 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)):
|
||||
There is one caveat on Dynamic Documents: undeclared fields cannot start with `_`
|
||||
"""
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
|
||||
@ -1019,16 +1042,15 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)):
|
||||
setattr(self, field_name, None)
|
||||
self._dynamic_fields[field_name].null = False
|
||||
else:
|
||||
super(DynamicDocument, self).__delattr__(*args, **kwargs)
|
||||
super().__delattr__(*args, **kwargs)
|
||||
|
||||
|
||||
class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)):
|
||||
class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass):
|
||||
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
||||
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
||||
information about dynamic documents.
|
||||
"""
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = DocumentMetaclass
|
||||
|
||||
@ -1048,7 +1070,7 @@ class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocu
|
||||
setattr(self, field_name, None)
|
||||
|
||||
|
||||
class MapReduceDocument(object):
|
||||
class MapReduceDocument:
|
||||
"""A document returned from a map/reduce query.
|
||||
|
||||
:param collection: An instance of :class:`~pymongo.Collection`
|
||||
@ -1072,17 +1094,16 @@ class MapReduceDocument(object):
|
||||
"""Lazy-load the object referenced by ``self.key``. ``self.key``
|
||||
should be the ``primary_key``.
|
||||
"""
|
||||
id_field = self._document()._meta['id_field']
|
||||
id_field = self._document()._meta["id_field"]
|
||||
id_field_type = type(id_field)
|
||||
|
||||
if not isinstance(self.key, id_field_type):
|
||||
try:
|
||||
self.key = id_field_type(self.key)
|
||||
except Exception:
|
||||
raise Exception('Could not cast key as %s' %
|
||||
id_field_type.__name__)
|
||||
raise Exception("Could not cast key as %s" % id_field_type.__name__)
|
||||
|
||||
if not hasattr(self, '_key_object'):
|
||||
if not hasattr(self, "_key_object"):
|
||||
self._key_object = self._document.objects.with_id(self.key)
|
||||
return self._key_object
|
||||
return self._key_object
|
||||
|
@ -1,12 +1,21 @@
|
||||
from collections import defaultdict
|
||||
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
|
||||
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
|
||||
'OperationError', 'NotUniqueError', 'FieldDoesNotExist',
|
||||
'ValidationError', 'SaveConditionError', 'DeprecatedError')
|
||||
__all__ = (
|
||||
"NotRegistered",
|
||||
"InvalidDocumentError",
|
||||
"LookUpError",
|
||||
"DoesNotExist",
|
||||
"MultipleObjectsReturned",
|
||||
"InvalidQueryError",
|
||||
"OperationError",
|
||||
"NotUniqueError",
|
||||
"BulkWriteError",
|
||||
"FieldDoesNotExist",
|
||||
"ValidationError",
|
||||
"SaveConditionError",
|
||||
"DeprecatedError",
|
||||
)
|
||||
|
||||
|
||||
class NotRegistered(Exception):
|
||||
@ -41,6 +50,10 @@ class NotUniqueError(OperationError):
|
||||
pass
|
||||
|
||||
|
||||
class BulkWriteError(OperationError):
|
||||
pass
|
||||
|
||||
|
||||
class SaveConditionError(OperationError):
|
||||
pass
|
||||
|
||||
@ -71,25 +84,25 @@ class ValidationError(AssertionError):
|
||||
field_name = None
|
||||
_message = None
|
||||
|
||||
def __init__(self, message='', **kwargs):
|
||||
super(ValidationError, self).__init__(message)
|
||||
self.errors = kwargs.get('errors', {})
|
||||
self.field_name = kwargs.get('field_name')
|
||||
def __init__(self, message="", **kwargs):
|
||||
super().__init__(message)
|
||||
self.errors = kwargs.get("errors", {})
|
||||
self.field_name = kwargs.get("field_name")
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return six.text_type(self.message)
|
||||
return str(self.message)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s,)' % (self.__class__.__name__, self.message)
|
||||
return "{}({},)".format(self.__class__.__name__, self.message)
|
||||
|
||||
def __getattribute__(self, name):
|
||||
message = super(ValidationError, self).__getattribute__(name)
|
||||
if name == 'message':
|
||||
message = super().__getattribute__(name)
|
||||
if name == "message":
|
||||
if self.field_name:
|
||||
message = '%s' % message
|
||||
message = "%s" % message
|
||||
if self.errors:
|
||||
message = '%s(%s)' % (message, self._format_errors())
|
||||
message = "{}({})".format(message, self._format_errors())
|
||||
return message
|
||||
|
||||
def _get_message(self):
|
||||
@ -111,12 +124,12 @@ class ValidationError(AssertionError):
|
||||
def build_dict(source):
|
||||
errors_dict = {}
|
||||
if isinstance(source, dict):
|
||||
for field_name, error in iteritems(source):
|
||||
for field_name, error in source.items():
|
||||
errors_dict[field_name] = build_dict(error)
|
||||
elif isinstance(source, ValidationError) and source.errors:
|
||||
return build_dict(source.errors)
|
||||
else:
|
||||
return six.text_type(source)
|
||||
return str(source)
|
||||
|
||||
return errors_dict
|
||||
|
||||
@ -128,22 +141,22 @@ class ValidationError(AssertionError):
|
||||
def _format_errors(self):
|
||||
"""Returns a string listing all errors within a document"""
|
||||
|
||||
def generate_key(value, prefix=''):
|
||||
def generate_key(value, prefix=""):
|
||||
if isinstance(value, list):
|
||||
value = ' '.join([generate_key(k) for k in value])
|
||||
value = " ".join([generate_key(k) for k in value])
|
||||
elif isinstance(value, dict):
|
||||
value = ' '.join(
|
||||
[generate_key(v, k) for k, v in iteritems(value)])
|
||||
value = " ".join([generate_key(v, k) for k, v in value.items()])
|
||||
|
||||
results = '%s.%s' % (prefix, value) if prefix else value
|
||||
results = "{}.{}".format(prefix, value) if prefix else value
|
||||
return results
|
||||
|
||||
error_dict = defaultdict(list)
|
||||
for k, v in iteritems(self.to_dict()):
|
||||
for k, v in self.to_dict().items():
|
||||
error_dict[generate_key(v)].append(k)
|
||||
return ' '.join(['%s: %s' % (k, v) for k, v in iteritems(error_dict)])
|
||||
return " ".join(["{}: {}".format(k, v) for k, v in error_dict.items()])
|
||||
|
||||
|
||||
class DeprecatedError(Exception):
|
||||
"""Raise when a user uses a feature that has been Deprecated"""
|
||||
|
||||
pass
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -11,9 +11,9 @@ MONGODB_36 = (3, 6)
|
||||
|
||||
|
||||
def get_mongodb_version():
|
||||
"""Return the version of the connected mongoDB (first 2 digits)
|
||||
"""Return the version of the default connected mongoDB (first 2 digits)
|
||||
|
||||
:return: tuple(int, int)
|
||||
"""
|
||||
version_list = get_connection().server_info()['versionArray'][:2] # e.g. (3, 2)
|
||||
version_list = get_connection().server_info()["versionArray"][:2] # e.g: (3, 2)
|
||||
return tuple(version_list)
|
||||
|
@ -2,6 +2,7 @@
|
||||
Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support.
|
||||
"""
|
||||
import pymongo
|
||||
from pymongo.errors import OperationFailure
|
||||
|
||||
_PYMONGO_37 = (3, 7)
|
||||
|
||||
@ -10,13 +11,41 @@ PYMONGO_VERSION = tuple(pymongo.version_tuple[:2])
|
||||
IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37
|
||||
|
||||
|
||||
def count_documents(collection, filter):
|
||||
"""Pymongo>3.7 deprecates count in favour of count_documents"""
|
||||
def count_documents(
|
||||
collection, filter, skip=None, limit=None, hint=None, collation=None
|
||||
):
|
||||
"""Pymongo>3.7 deprecates count in favour of count_documents
|
||||
"""
|
||||
if limit == 0:
|
||||
return 0 # Pymongo raises an OperationFailure if called with limit=0
|
||||
|
||||
kwargs = {}
|
||||
if skip is not None:
|
||||
kwargs["skip"] = skip
|
||||
if limit is not None:
|
||||
kwargs["limit"] = limit
|
||||
if hint not in (-1, None):
|
||||
kwargs["hint"] = hint
|
||||
if collation is not None:
|
||||
kwargs["collation"] = collation
|
||||
|
||||
# count_documents appeared in pymongo 3.7
|
||||
if IS_PYMONGO_GTE_37:
|
||||
return collection.count_documents(filter)
|
||||
else:
|
||||
count = collection.find(filter).count()
|
||||
return count
|
||||
try:
|
||||
return collection.count_documents(filter=filter, **kwargs)
|
||||
except OperationFailure:
|
||||
# OperationFailure - accounts for some operators that used to work
|
||||
# with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere)
|
||||
# fallback to deprecated Cursor.count
|
||||
# Keeping this should be reevaluated the day pymongo removes .count entirely
|
||||
pass
|
||||
|
||||
cursor = collection.find(filter)
|
||||
for option, option_value in kwargs.items():
|
||||
cursor_method = getattr(cursor, option)
|
||||
cursor = cursor_method(option_value)
|
||||
with_limit_and_skip = "skip" in kwargs or "limit" in kwargs
|
||||
return cursor.count(with_limit_and_skip=with_limit_and_skip)
|
||||
|
||||
|
||||
def list_collection_names(db, include_system_collections=False):
|
||||
@ -27,6 +56,6 @@ def list_collection_names(db, include_system_collections=False):
|
||||
collections = db.collection_names()
|
||||
|
||||
if not include_system_collections:
|
||||
collections = [c for c in collections if not c.startswith('system.')]
|
||||
collections = [c for c in collections if not c.startswith("system.")]
|
||||
|
||||
return collections
|
||||
|
@ -1,23 +0,0 @@
|
||||
"""
|
||||
Helper functions, constants, and types to aid with Python v2.7 - v3.x support
|
||||
"""
|
||||
import six
|
||||
|
||||
# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3.
|
||||
StringIO = six.BytesIO
|
||||
|
||||
# Additionally for Py2, try to use the faster cStringIO, if available
|
||||
if not six.PY3:
|
||||
try:
|
||||
import cStringIO
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
StringIO = cStringIO.StringIO
|
||||
|
||||
|
||||
if six.PY3:
|
||||
from collections.abc import Hashable
|
||||
else:
|
||||
# raises DeprecationWarnings in Python >=3.7
|
||||
from collections import Hashable
|
@ -7,11 +7,22 @@ from mongoengine.queryset.visitor import *
|
||||
|
||||
# Expose just the public subset of all imported objects and constants.
|
||||
__all__ = (
|
||||
'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager',
|
||||
'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL',
|
||||
|
||||
"QuerySet",
|
||||
"QuerySetNoCache",
|
||||
"Q",
|
||||
"queryset_manager",
|
||||
"QuerySetManager",
|
||||
"QueryFieldList",
|
||||
"DO_NOTHING",
|
||||
"NULLIFY",
|
||||
"CASCADE",
|
||||
"DENY",
|
||||
"PULL",
|
||||
# Errors that might be related to a queryset, mostly here for backward
|
||||
# compatibility
|
||||
'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned',
|
||||
'NotUniqueError', 'OperationError',
|
||||
"DoesNotExist",
|
||||
"InvalidQueryError",
|
||||
"MultipleObjectsReturned",
|
||||
"NotUniqueError",
|
||||
"OperationError",
|
||||
)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,12 +1,15 @@
|
||||
__all__ = ('QueryFieldList',)
|
||||
__all__ = ("QueryFieldList",)
|
||||
|
||||
|
||||
class QueryFieldList(object):
|
||||
class QueryFieldList:
|
||||
"""Object that handles combinations of .only() and .exclude() calls"""
|
||||
|
||||
ONLY = 1
|
||||
EXCLUDE = 0
|
||||
|
||||
def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False):
|
||||
def __init__(
|
||||
self, fields=None, value=ONLY, always_include=None, _only_called=False
|
||||
):
|
||||
"""The QueryFieldList builder
|
||||
|
||||
:param fields: A list of fields used in `.only()` or `.exclude()`
|
||||
@ -49,7 +52,7 @@ class QueryFieldList(object):
|
||||
self.fields = f.fields - self.fields
|
||||
self._clean_slice()
|
||||
|
||||
if '_id' in f.fields:
|
||||
if "_id" in f.fields:
|
||||
self._id = f.value
|
||||
|
||||
if self.always_include:
|
||||
@ -59,25 +62,23 @@ class QueryFieldList(object):
|
||||
else:
|
||||
self.fields -= self.always_include
|
||||
|
||||
if getattr(f, '_only_called', False):
|
||||
if getattr(f, "_only_called", False):
|
||||
self._only_called = True
|
||||
return self
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.fields)
|
||||
|
||||
__nonzero__ = __bool__ # For Py2 support
|
||||
|
||||
def as_dict(self):
|
||||
field_list = {field: self.value for field in self.fields}
|
||||
if self.slice:
|
||||
field_list.update(self.slice)
|
||||
if self._id is not None:
|
||||
field_list['_id'] = self._id
|
||||
field_list["_id"] = self._id
|
||||
return field_list
|
||||
|
||||
def reset(self):
|
||||
self.fields = set([])
|
||||
self.fields = set()
|
||||
self.slice = {}
|
||||
self.value = self.ONLY
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
from functools import partial
|
||||
from mongoengine.queryset.queryset import QuerySet
|
||||
|
||||
__all__ = ('queryset_manager', 'QuerySetManager')
|
||||
__all__ = ("queryset_manager", "QuerySetManager")
|
||||
|
||||
|
||||
class QuerySetManager(object):
|
||||
class QuerySetManager:
|
||||
"""
|
||||
The default QuerySet Manager.
|
||||
|
||||
@ -33,7 +33,7 @@ class QuerySetManager(object):
|
||||
return self
|
||||
|
||||
# owner is the document that contains the QuerySetManager
|
||||
queryset_class = owner._meta.get('queryset_class', self.default)
|
||||
queryset_class = owner._meta.get("queryset_class", self.default)
|
||||
queryset = queryset_class(owner, owner._get_collection())
|
||||
if self.get_queryset:
|
||||
arg_count = self.get_queryset.__code__.co_argcount
|
||||
|
@ -1,11 +1,22 @@
|
||||
import six
|
||||
|
||||
from mongoengine.errors import OperationError
|
||||
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
|
||||
NULLIFY, PULL)
|
||||
from mongoengine.queryset.base import (
|
||||
BaseQuerySet,
|
||||
CASCADE,
|
||||
DENY,
|
||||
DO_NOTHING,
|
||||
NULLIFY,
|
||||
PULL,
|
||||
)
|
||||
|
||||
__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE',
|
||||
'DENY', 'PULL')
|
||||
__all__ = (
|
||||
"QuerySet",
|
||||
"QuerySetNoCache",
|
||||
"DO_NOTHING",
|
||||
"NULLIFY",
|
||||
"CASCADE",
|
||||
"DENY",
|
||||
"PULL",
|
||||
)
|
||||
|
||||
# The maximum number of items to display in a QuerySet.__repr__
|
||||
REPR_OUTPUT_SIZE = 20
|
||||
@ -57,12 +68,12 @@ class QuerySet(BaseQuerySet):
|
||||
def __repr__(self):
|
||||
"""Provide a string representation of the QuerySet"""
|
||||
if self._iter:
|
||||
return '.. queryset mid-iteration ..'
|
||||
return ".. queryset mid-iteration .."
|
||||
|
||||
self._populate_cache()
|
||||
data = self._result_cache[:REPR_OUTPUT_SIZE + 1]
|
||||
data = self._result_cache[: REPR_OUTPUT_SIZE + 1]
|
||||
if len(data) > REPR_OUTPUT_SIZE:
|
||||
data[-1] = '...(remaining elements truncated)...'
|
||||
data[-1] = "...(remaining elements truncated)..."
|
||||
return repr(data)
|
||||
|
||||
def _iter_results(self):
|
||||
@ -114,8 +125,8 @@ class QuerySet(BaseQuerySet):
|
||||
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
||||
# the result cache.
|
||||
try:
|
||||
for _ in six.moves.range(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(six.next(self))
|
||||
for _ in range(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(next(self))
|
||||
except StopIteration:
|
||||
# Getting this exception means there are no more docs in the
|
||||
# db cursor. Set _has_more to False so that we can use that
|
||||
@ -130,10 +141,11 @@ class QuerySet(BaseQuerySet):
|
||||
getting the count
|
||||
"""
|
||||
if with_limit_and_skip is False:
|
||||
return super(QuerySet, self).count(with_limit_and_skip)
|
||||
return super().count(with_limit_and_skip)
|
||||
|
||||
if self._len is None:
|
||||
self._len = super(QuerySet, self).count(with_limit_and_skip)
|
||||
# cache the length
|
||||
self._len = super().count(with_limit_and_skip)
|
||||
|
||||
return self._len
|
||||
|
||||
@ -143,10 +155,9 @@ class QuerySet(BaseQuerySet):
|
||||
.. versionadded:: 0.8.3 Convert to non caching queryset
|
||||
"""
|
||||
if self._result_cache is not None:
|
||||
raise OperationError('QuerySet already cached')
|
||||
raise OperationError("QuerySet already cached")
|
||||
|
||||
return self._clone_into(QuerySetNoCache(self._document,
|
||||
self._collection))
|
||||
return self._clone_into(QuerySetNoCache(self._document, self._collection))
|
||||
|
||||
|
||||
class QuerySetNoCache(BaseQuerySet):
|
||||
@ -165,17 +176,17 @@ class QuerySetNoCache(BaseQuerySet):
|
||||
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
||||
"""
|
||||
if self._iter:
|
||||
return '.. queryset mid-iteration ..'
|
||||
return ".. queryset mid-iteration .."
|
||||
|
||||
data = []
|
||||
for _ in six.moves.range(REPR_OUTPUT_SIZE + 1):
|
||||
for _ in range(REPR_OUTPUT_SIZE + 1):
|
||||
try:
|
||||
data.append(six.next(self))
|
||||
data.append(next(self))
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
if len(data) > REPR_OUTPUT_SIZE:
|
||||
data[-1] = '...(remaining elements truncated)...'
|
||||
data[-1] = "...(remaining elements truncated)..."
|
||||
|
||||
self.rewind()
|
||||
return repr(data)
|
||||
|
@ -3,28 +3,59 @@ from collections import defaultdict
|
||||
from bson import ObjectId, SON
|
||||
from bson.dbref import DBRef
|
||||
import pymongo
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.base import UPDATE_OPERATORS
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
|
||||
__all__ = ('query', 'update')
|
||||
__all__ = ("query", "update", "STRING_OPERATORS")
|
||||
|
||||
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||
'all', 'size', 'exists', 'not', 'elemMatch', 'type')
|
||||
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||
'within_box', 'within_polygon', 'near', 'near_sphere',
|
||||
'max_distance', 'min_distance', 'geo_within', 'geo_within_box',
|
||||
'geo_within_polygon', 'geo_within_center',
|
||||
'geo_within_sphere', 'geo_intersects')
|
||||
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
||||
'istartswith', 'endswith', 'iendswith',
|
||||
'exact', 'iexact')
|
||||
CUSTOM_OPERATORS = ('match',)
|
||||
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
||||
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||
COMPARISON_OPERATORS = (
|
||||
"ne",
|
||||
"gt",
|
||||
"gte",
|
||||
"lt",
|
||||
"lte",
|
||||
"in",
|
||||
"nin",
|
||||
"mod",
|
||||
"all",
|
||||
"size",
|
||||
"exists",
|
||||
"not",
|
||||
"elemMatch",
|
||||
"type",
|
||||
)
|
||||
GEO_OPERATORS = (
|
||||
"within_distance",
|
||||
"within_spherical_distance",
|
||||
"within_box",
|
||||
"within_polygon",
|
||||
"near",
|
||||
"near_sphere",
|
||||
"max_distance",
|
||||
"min_distance",
|
||||
"geo_within",
|
||||
"geo_within_box",
|
||||
"geo_within_polygon",
|
||||
"geo_within_center",
|
||||
"geo_within_sphere",
|
||||
"geo_intersects",
|
||||
)
|
||||
STRING_OPERATORS = (
|
||||
"contains",
|
||||
"icontains",
|
||||
"startswith",
|
||||
"istartswith",
|
||||
"endswith",
|
||||
"iendswith",
|
||||
"exact",
|
||||
"iexact",
|
||||
)
|
||||
CUSTOM_OPERATORS = ("match",)
|
||||
MATCH_OPERATORS = (
|
||||
COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS
|
||||
)
|
||||
|
||||
|
||||
# TODO make this less complex
|
||||
@ -33,11 +64,11 @@ def query(_doc_cls=None, **kwargs):
|
||||
mongo_query = {}
|
||||
merge_query = defaultdict(list)
|
||||
for key, value in sorted(kwargs.items()):
|
||||
if key == '__raw__':
|
||||
if key == "__raw__":
|
||||
mongo_query.update(value)
|
||||
continue
|
||||
|
||||
parts = key.rsplit('__')
|
||||
parts = key.rsplit("__")
|
||||
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
|
||||
parts = [part for part in parts if not part.isdigit()]
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
@ -46,11 +77,11 @@ def query(_doc_cls=None, **kwargs):
|
||||
op = parts.pop()
|
||||
|
||||
# Allow to escape operator-like field name by __
|
||||
if len(parts) > 1 and parts[-1] == '':
|
||||
if len(parts) > 1 and parts[-1] == "":
|
||||
parts.pop()
|
||||
|
||||
negate = False
|
||||
if len(parts) > 1 and parts[-1] == 'not':
|
||||
if len(parts) > 1 and parts[-1] == "not":
|
||||
parts.pop()
|
||||
negate = True
|
||||
|
||||
@ -62,18 +93,18 @@ def query(_doc_cls=None, **kwargs):
|
||||
raise InvalidQueryError(e)
|
||||
parts = []
|
||||
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
CachedReferenceField = _import_class("CachedReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
|
||||
cleaned_fields = []
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, six.string_types):
|
||||
if isinstance(field, str):
|
||||
parts.append(field)
|
||||
append_field = False
|
||||
# is last and CachedReferenceField
|
||||
elif isinstance(field, CachedReferenceField) and fields[-1] == field:
|
||||
parts.append('%s._id' % field.db_field)
|
||||
parts.append("%s._id" % field.db_field)
|
||||
else:
|
||||
parts.append(field.db_field)
|
||||
|
||||
@ -83,15 +114,15 @@ def query(_doc_cls=None, **kwargs):
|
||||
# Convert value to proper value
|
||||
field = cleaned_fields[-1]
|
||||
|
||||
singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not']
|
||||
singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"]
|
||||
singular_ops += STRING_OPERATORS
|
||||
if op in singular_ops:
|
||||
value = field.prepare_query_value(op, value)
|
||||
|
||||
if isinstance(field, CachedReferenceField) and value:
|
||||
value = value['_id']
|
||||
value = value["_id"]
|
||||
|
||||
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
|
||||
elif op in ("in", "nin", "all", "near") and not isinstance(value, dict):
|
||||
# Raise an error if the in/nin/all/near param is not iterable.
|
||||
value = _prepare_query_for_iterable(field, op, value)
|
||||
|
||||
@ -101,71 +132,77 @@ def query(_doc_cls=None, **kwargs):
|
||||
# * If the value is an ObjectId, the key should be "field_name._ref.$id".
|
||||
if isinstance(field, GenericReferenceField):
|
||||
if isinstance(value, DBRef):
|
||||
parts[-1] += '._ref'
|
||||
parts[-1] += "._ref"
|
||||
elif isinstance(value, ObjectId):
|
||||
parts[-1] += '._ref.$id'
|
||||
parts[-1] += "._ref.$id"
|
||||
|
||||
# if op and op not in COMPARISON_OPERATORS:
|
||||
if op:
|
||||
if op in GEO_OPERATORS:
|
||||
value = _geo_operator(field, op, value)
|
||||
elif op in ('match', 'elemMatch'):
|
||||
ListField = _import_class('ListField')
|
||||
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
|
||||
elif op in ("match", "elemMatch"):
|
||||
ListField = _import_class("ListField")
|
||||
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||
if (
|
||||
isinstance(value, dict) and
|
||||
isinstance(field, ListField) and
|
||||
isinstance(field.field, EmbeddedDocumentField)
|
||||
isinstance(value, dict)
|
||||
and isinstance(field, ListField)
|
||||
and isinstance(field.field, EmbeddedDocumentField)
|
||||
):
|
||||
value = query(field.field.document_type, **value)
|
||||
else:
|
||||
value = field.prepare_query_value(op, value)
|
||||
value = {'$elemMatch': value}
|
||||
value = {"$elemMatch": value}
|
||||
elif op in CUSTOM_OPERATORS:
|
||||
NotImplementedError('Custom method "%s" has not '
|
||||
'been implemented' % op)
|
||||
NotImplementedError(
|
||||
'Custom method "%s" has not ' "been implemented" % op
|
||||
)
|
||||
elif op not in STRING_OPERATORS:
|
||||
value = {'$' + op: value}
|
||||
value = {"$" + op: value}
|
||||
|
||||
if negate:
|
||||
value = {'$not': value}
|
||||
value = {"$not": value}
|
||||
|
||||
for i, part in indices:
|
||||
parts.insert(i, part)
|
||||
|
||||
key = '.'.join(parts)
|
||||
key = ".".join(parts)
|
||||
|
||||
if op is None or key not in mongo_query:
|
||||
if key not in mongo_query:
|
||||
mongo_query[key] = value
|
||||
elif key in mongo_query:
|
||||
else:
|
||||
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
||||
mongo_query[key].update(value)
|
||||
# $max/minDistance needs to come last - convert to SON
|
||||
value_dict = mongo_query[key]
|
||||
if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \
|
||||
('$near' in value_dict or '$nearSphere' in value_dict):
|
||||
if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and (
|
||||
"$near" in value_dict or "$nearSphere" in value_dict
|
||||
):
|
||||
value_son = SON()
|
||||
for k, v in iteritems(value_dict):
|
||||
if k == '$maxDistance' or k == '$minDistance':
|
||||
for k, v in value_dict.items():
|
||||
if k == "$maxDistance" or k == "$minDistance":
|
||||
continue
|
||||
value_son[k] = v
|
||||
# Required for MongoDB >= 2.6, may fail when combining
|
||||
# PyMongo 3+ and MongoDB < 2.6
|
||||
near_embedded = False
|
||||
for near_op in ('$near', '$nearSphere'):
|
||||
for near_op in ("$near", "$nearSphere"):
|
||||
if isinstance(value_dict.get(near_op), dict):
|
||||
value_son[near_op] = SON(value_son[near_op])
|
||||
if '$maxDistance' in value_dict:
|
||||
value_son[near_op]['$maxDistance'] = value_dict['$maxDistance']
|
||||
if '$minDistance' in value_dict:
|
||||
value_son[near_op]['$minDistance'] = value_dict['$minDistance']
|
||||
if "$maxDistance" in value_dict:
|
||||
value_son[near_op]["$maxDistance"] = value_dict[
|
||||
"$maxDistance"
|
||||
]
|
||||
if "$minDistance" in value_dict:
|
||||
value_son[near_op]["$minDistance"] = value_dict[
|
||||
"$minDistance"
|
||||
]
|
||||
near_embedded = True
|
||||
|
||||
if not near_embedded:
|
||||
if '$maxDistance' in value_dict:
|
||||
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||
if '$minDistance' in value_dict:
|
||||
value_son['$minDistance'] = value_dict['$minDistance']
|
||||
if "$maxDistance" in value_dict:
|
||||
value_son["$maxDistance"] = value_dict["$maxDistance"]
|
||||
if "$minDistance" in value_dict:
|
||||
value_son["$minDistance"] = value_dict["$minDistance"]
|
||||
mongo_query[key] = value_son
|
||||
else:
|
||||
# Store for manually merging later
|
||||
@ -177,10 +214,10 @@ def query(_doc_cls=None, **kwargs):
|
||||
del mongo_query[k]
|
||||
if isinstance(v, list):
|
||||
value = [{k: val} for val in v]
|
||||
if '$and' in mongo_query.keys():
|
||||
mongo_query['$and'].extend(value)
|
||||
if "$and" in mongo_query.keys():
|
||||
mongo_query["$and"].extend(value)
|
||||
else:
|
||||
mongo_query['$and'] = value
|
||||
mongo_query["$and"] = value
|
||||
|
||||
return mongo_query
|
||||
|
||||
@ -192,15 +229,15 @@ def update(_doc_cls=None, **update):
|
||||
mongo_update = {}
|
||||
|
||||
for key, value in update.items():
|
||||
if key == '__raw__':
|
||||
if key == "__raw__":
|
||||
mongo_update.update(value)
|
||||
continue
|
||||
|
||||
parts = key.split('__')
|
||||
parts = key.split("__")
|
||||
|
||||
# if there is no operator, default to 'set'
|
||||
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
|
||||
parts.insert(0, 'set')
|
||||
parts.insert(0, "set")
|
||||
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
op = None
|
||||
@ -208,13 +245,13 @@ def update(_doc_cls=None, **update):
|
||||
op = parts.pop(0)
|
||||
# Convert Pythonic names to Mongo equivalents
|
||||
operator_map = {
|
||||
'push_all': 'pushAll',
|
||||
'pull_all': 'pullAll',
|
||||
'dec': 'inc',
|
||||
'add_to_set': 'addToSet',
|
||||
'set_on_insert': 'setOnInsert'
|
||||
"push_all": "pushAll",
|
||||
"pull_all": "pullAll",
|
||||
"dec": "inc",
|
||||
"add_to_set": "addToSet",
|
||||
"set_on_insert": "setOnInsert",
|
||||
}
|
||||
if op == 'dec':
|
||||
if op == "dec":
|
||||
# Support decrement by flipping a positive value's sign
|
||||
# and using 'inc'
|
||||
value = -value
|
||||
@ -227,7 +264,7 @@ def update(_doc_cls=None, **update):
|
||||
match = parts.pop()
|
||||
|
||||
# Allow to escape operator-like field name by __
|
||||
if len(parts) > 1 and parts[-1] == '':
|
||||
if len(parts) > 1 and parts[-1] == "":
|
||||
parts.pop()
|
||||
|
||||
if _doc_cls:
|
||||
@ -242,10 +279,10 @@ def update(_doc_cls=None, **update):
|
||||
appended_sub_field = False
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, six.string_types):
|
||||
if isinstance(field, str):
|
||||
# Convert the S operator to $
|
||||
if field == 'S':
|
||||
field = '$'
|
||||
if field == "S":
|
||||
field = "$"
|
||||
parts.append(field)
|
||||
append_field = False
|
||||
else:
|
||||
@ -253,7 +290,7 @@ def update(_doc_cls=None, **update):
|
||||
if append_field:
|
||||
appended_sub_field = False
|
||||
cleaned_fields.append(field)
|
||||
if hasattr(field, 'field'):
|
||||
if hasattr(field, "field"):
|
||||
cleaned_fields.append(field.field)
|
||||
appended_sub_field = True
|
||||
|
||||
@ -263,52 +300,53 @@ def update(_doc_cls=None, **update):
|
||||
else:
|
||||
field = cleaned_fields[-1]
|
||||
|
||||
GeoJsonBaseField = _import_class('GeoJsonBaseField')
|
||||
GeoJsonBaseField = _import_class("GeoJsonBaseField")
|
||||
if isinstance(field, GeoJsonBaseField):
|
||||
value = field.to_mongo(value)
|
||||
|
||||
if op == 'pull':
|
||||
if op == "pull":
|
||||
if field.required or value is not None:
|
||||
if match in ('in', 'nin') and not isinstance(value, dict):
|
||||
if match in ("in", "nin") and not isinstance(value, dict):
|
||||
value = _prepare_query_for_iterable(field, op, value)
|
||||
else:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op == 'push' and isinstance(value, (list, tuple, set)):
|
||||
elif op == "push" and isinstance(value, (list, tuple, set)):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif op in (None, 'set', 'push'):
|
||||
elif op in (None, "set", "push"):
|
||||
if field.required or value is not None:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op in ('pushAll', 'pullAll'):
|
||||
elif op in ("pushAll", "pullAll"):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif op in ('addToSet', 'setOnInsert'):
|
||||
elif op in ("addToSet", "setOnInsert"):
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif field.required or value is not None:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op == 'unset':
|
||||
elif op == "unset":
|
||||
value = 1
|
||||
elif op == 'inc':
|
||||
elif op == "inc":
|
||||
value = field.prepare_query_value(op, value)
|
||||
|
||||
if match:
|
||||
match = '$' + match
|
||||
match = "$" + match
|
||||
value = {match: value}
|
||||
|
||||
key = '.'.join(parts)
|
||||
key = ".".join(parts)
|
||||
|
||||
if 'pull' in op and '.' in key:
|
||||
if "pull" in op and "." in key:
|
||||
# Dot operators don't work on pull operations
|
||||
# unless they point to a list field
|
||||
# Otherwise it uses nested dict syntax
|
||||
if op == 'pullAll':
|
||||
raise InvalidQueryError('pullAll operations only support '
|
||||
'a single field depth')
|
||||
if op == "pullAll":
|
||||
raise InvalidQueryError(
|
||||
"pullAll operations only support a single field depth"
|
||||
)
|
||||
|
||||
# Look for the last list field and use dot notation until there
|
||||
field_classes = [c.__class__ for c in cleaned_fields]
|
||||
field_classes.reverse()
|
||||
ListField = _import_class('ListField')
|
||||
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||
ListField = _import_class("ListField")
|
||||
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
||||
if ListField in field_classes or EmbeddedDocumentListField in field_classes:
|
||||
# Join all fields via dot notation to the last ListField or EmbeddedDocumentListField
|
||||
# Then process as normal
|
||||
@ -317,37 +355,36 @@ def update(_doc_cls=None, **update):
|
||||
else:
|
||||
_check_field = EmbeddedDocumentListField
|
||||
|
||||
last_listField = len(
|
||||
cleaned_fields) - field_classes.index(_check_field)
|
||||
key = '.'.join(parts[:last_listField])
|
||||
last_listField = len(cleaned_fields) - field_classes.index(_check_field)
|
||||
key = ".".join(parts[:last_listField])
|
||||
parts = parts[last_listField:]
|
||||
parts.insert(0, key)
|
||||
|
||||
parts.reverse()
|
||||
for key in parts:
|
||||
value = {key: value}
|
||||
elif op == 'addToSet' and isinstance(value, list):
|
||||
value = {key: {'$each': value}}
|
||||
elif op in ('push', 'pushAll'):
|
||||
elif op == "addToSet" and isinstance(value, list):
|
||||
value = {key: {"$each": value}}
|
||||
elif op in ("push", "pushAll"):
|
||||
if parts[-1].isdigit():
|
||||
key = '.'.join(parts[0:-1])
|
||||
key = ".".join(parts[0:-1])
|
||||
position = int(parts[-1])
|
||||
# $position expects an iterable. If pushing a single value,
|
||||
# wrap it in a list.
|
||||
if not isinstance(value, (set, tuple, list)):
|
||||
value = [value]
|
||||
value = {key: {'$each': value, '$position': position}}
|
||||
value = {key: {"$each": value, "$position": position}}
|
||||
else:
|
||||
if op == 'pushAll':
|
||||
op = 'push' # convert to non-deprecated keyword
|
||||
if op == "pushAll":
|
||||
op = "push" # convert to non-deprecated keyword
|
||||
if not isinstance(value, (set, tuple, list)):
|
||||
value = [value]
|
||||
value = {key: {'$each': value}}
|
||||
value = {key: {"$each": value}}
|
||||
else:
|
||||
value = {key: value}
|
||||
else:
|
||||
value = {key: value}
|
||||
key = '$' + op
|
||||
key = "$" + op
|
||||
if key not in mongo_update:
|
||||
mongo_update[key] = value
|
||||
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
||||
@ -358,45 +395,47 @@ def update(_doc_cls=None, **update):
|
||||
|
||||
def _geo_operator(field, op, value):
|
||||
"""Helper to return the query for a given geo query."""
|
||||
if op == 'max_distance':
|
||||
value = {'$maxDistance': value}
|
||||
elif op == 'min_distance':
|
||||
value = {'$minDistance': value}
|
||||
if op == "max_distance":
|
||||
value = {"$maxDistance": value}
|
||||
elif op == "min_distance":
|
||||
value = {"$minDistance": value}
|
||||
elif field._geo_index == pymongo.GEO2D:
|
||||
if op == 'within_distance':
|
||||
value = {'$within': {'$center': value}}
|
||||
elif op == 'within_spherical_distance':
|
||||
value = {'$within': {'$centerSphere': value}}
|
||||
elif op == 'within_polygon':
|
||||
value = {'$within': {'$polygon': value}}
|
||||
elif op == 'near':
|
||||
value = {'$near': value}
|
||||
elif op == 'near_sphere':
|
||||
value = {'$nearSphere': value}
|
||||
elif op == 'within_box':
|
||||
value = {'$within': {'$box': value}}
|
||||
else:
|
||||
raise NotImplementedError('Geo method "%s" has not been '
|
||||
'implemented for a GeoPointField' % op)
|
||||
else:
|
||||
if op == 'geo_within':
|
||||
value = {'$geoWithin': _infer_geometry(value)}
|
||||
elif op == 'geo_within_box':
|
||||
value = {'$geoWithin': {'$box': value}}
|
||||
elif op == 'geo_within_polygon':
|
||||
value = {'$geoWithin': {'$polygon': value}}
|
||||
elif op == 'geo_within_center':
|
||||
value = {'$geoWithin': {'$center': value}}
|
||||
elif op == 'geo_within_sphere':
|
||||
value = {'$geoWithin': {'$centerSphere': value}}
|
||||
elif op == 'geo_intersects':
|
||||
value = {'$geoIntersects': _infer_geometry(value)}
|
||||
elif op == 'near':
|
||||
value = {'$near': _infer_geometry(value)}
|
||||
if op == "within_distance":
|
||||
value = {"$within": {"$center": value}}
|
||||
elif op == "within_spherical_distance":
|
||||
value = {"$within": {"$centerSphere": value}}
|
||||
elif op == "within_polygon":
|
||||
value = {"$within": {"$polygon": value}}
|
||||
elif op == "near":
|
||||
value = {"$near": value}
|
||||
elif op == "near_sphere":
|
||||
value = {"$nearSphere": value}
|
||||
elif op == "within_box":
|
||||
value = {"$within": {"$box": value}}
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Geo method "%s" has not been implemented for a %s '
|
||||
% (op, field._name)
|
||||
'Geo method "%s" has not been ' "implemented for a GeoPointField" % op
|
||||
)
|
||||
else:
|
||||
if op == "geo_within":
|
||||
value = {"$geoWithin": _infer_geometry(value)}
|
||||
elif op == "geo_within_box":
|
||||
value = {"$geoWithin": {"$box": value}}
|
||||
elif op == "geo_within_polygon":
|
||||
value = {"$geoWithin": {"$polygon": value}}
|
||||
elif op == "geo_within_center":
|
||||
value = {"$geoWithin": {"$center": value}}
|
||||
elif op == "geo_within_sphere":
|
||||
value = {"$geoWithin": {"$centerSphere": value}}
|
||||
elif op == "geo_intersects":
|
||||
value = {"$geoIntersects": _infer_geometry(value)}
|
||||
elif op == "near":
|
||||
value = {"$near": _infer_geometry(value)}
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Geo method "{}" has not been implemented for a {} '.format(
|
||||
op, field._name
|
||||
)
|
||||
)
|
||||
return value
|
||||
|
||||
@ -406,51 +445,58 @@ def _infer_geometry(value):
|
||||
given value.
|
||||
"""
|
||||
if isinstance(value, dict):
|
||||
if '$geometry' in value:
|
||||
if "$geometry" in value:
|
||||
return value
|
||||
elif 'coordinates' in value and 'type' in value:
|
||||
return {'$geometry': value}
|
||||
raise InvalidQueryError('Invalid $geometry dictionary should have '
|
||||
'type and coordinates keys')
|
||||
elif "coordinates" in value and "type" in value:
|
||||
return {"$geometry": value}
|
||||
raise InvalidQueryError(
|
||||
"Invalid $geometry dictionary should have type and coordinates keys"
|
||||
)
|
||||
elif isinstance(value, (list, set)):
|
||||
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
||||
|
||||
try:
|
||||
value[0][0][0]
|
||||
return {'$geometry': {'type': 'Polygon', 'coordinates': value}}
|
||||
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
try:
|
||||
value[0][0]
|
||||
return {'$geometry': {'type': 'LineString', 'coordinates': value}}
|
||||
return {"$geometry": {"type": "LineString", "coordinates": value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
try:
|
||||
value[0]
|
||||
return {'$geometry': {'type': 'Point', 'coordinates': value}}
|
||||
return {"$geometry": {"type": "Point", "coordinates": value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
raise InvalidQueryError('Invalid $geometry data. Can be either a '
|
||||
'dictionary or (nested) lists of coordinate(s)')
|
||||
raise InvalidQueryError(
|
||||
"Invalid $geometry data. Can be either a "
|
||||
"dictionary or (nested) lists of coordinate(s)"
|
||||
)
|
||||
|
||||
|
||||
def _prepare_query_for_iterable(field, op, value):
|
||||
# We need a special check for BaseDocument, because - although it's iterable - using
|
||||
# it as such in the context of this method is most definitely a mistake.
|
||||
BaseDocument = _import_class('BaseDocument')
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
|
||||
if isinstance(value, BaseDocument):
|
||||
raise TypeError("When using the `in`, `nin`, `all`, or "
|
||||
"`near`-operators you can\'t use a "
|
||||
"`Document`, you must wrap your object "
|
||||
"in a list (object -> [object]).")
|
||||
raise TypeError(
|
||||
"When using the `in`, `nin`, `all`, or "
|
||||
"`near`-operators you can't use a "
|
||||
"`Document`, you must wrap your object "
|
||||
"in a list (object -> [object])."
|
||||
)
|
||||
|
||||
if not hasattr(value, '__iter__'):
|
||||
raise TypeError("The `in`, `nin`, `all`, or "
|
||||
"`near`-operators must be applied to an "
|
||||
"iterable (e.g. a list).")
|
||||
if not hasattr(value, "__iter__"):
|
||||
raise TypeError(
|
||||
"The `in`, `nin`, `all`, or "
|
||||
"`near`-operators must be applied to an "
|
||||
"iterable (e.g. a list)."
|
||||
)
|
||||
|
||||
return [field.prepare_query_value(op, v) for v in value]
|
||||
|
@ -1,12 +1,18 @@
|
||||
import copy
|
||||
import warnings
|
||||
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.queryset import transform
|
||||
|
||||
__all__ = ('Q', 'QNode')
|
||||
__all__ = ("Q", "QNode")
|
||||
|
||||
|
||||
class QNodeVisitor(object):
|
||||
def warn_empty_is_deprecated():
|
||||
msg = "'empty' property is deprecated in favour of using 'not bool(filter)'"
|
||||
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
||||
|
||||
|
||||
class QNodeVisitor:
|
||||
"""Base visitor class for visiting Q-object nodes in a query tree.
|
||||
"""
|
||||
|
||||
@ -69,16 +75,16 @@ class QueryCompilerVisitor(QNodeVisitor):
|
||||
self.document = document
|
||||
|
||||
def visit_combination(self, combination):
|
||||
operator = '$and'
|
||||
operator = "$and"
|
||||
if combination.operation == combination.OR:
|
||||
operator = '$or'
|
||||
operator = "$or"
|
||||
return {operator: combination.children}
|
||||
|
||||
def visit_query(self, query):
|
||||
return transform.query(self.document, **query.query)
|
||||
|
||||
|
||||
class QNode(object):
|
||||
class QNode:
|
||||
"""Base class for nodes in query trees."""
|
||||
|
||||
AND = 0
|
||||
@ -96,16 +102,19 @@ class QNode(object):
|
||||
"""Combine this node with another node into a QCombination
|
||||
object.
|
||||
"""
|
||||
if getattr(other, 'empty', True):
|
||||
# If the other Q() is empty, ignore it and just use `self`.
|
||||
if not bool(other):
|
||||
return self
|
||||
|
||||
if self.empty:
|
||||
# Or if this Q is empty, ignore it and just use `other`.
|
||||
if not bool(self):
|
||||
return other
|
||||
|
||||
return QCombination(operation, [self, other])
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
warn_empty_is_deprecated()
|
||||
return False
|
||||
|
||||
def __or__(self, other):
|
||||
@ -132,8 +141,11 @@ class QCombination(QNode):
|
||||
self.children.append(node)
|
||||
|
||||
def __repr__(self):
|
||||
op = ' & ' if self.operation is self.AND else ' | '
|
||||
return '(%s)' % op.join([repr(node) for node in self.children])
|
||||
op = " & " if self.operation is self.AND else " | "
|
||||
return "(%s)" % op.join([repr(node) for node in self.children])
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.children)
|
||||
|
||||
def accept(self, visitor):
|
||||
for i in range(len(self.children)):
|
||||
@ -144,8 +156,16 @@ class QCombination(QNode):
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
warn_empty_is_deprecated()
|
||||
return not bool(self.children)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
self.__class__ == other.__class__
|
||||
and self.operation == other.operation
|
||||
and self.children == other.children
|
||||
)
|
||||
|
||||
|
||||
class Q(QNode):
|
||||
"""A simple query object, used in a query tree to build up more complex
|
||||
@ -156,11 +176,18 @@ class Q(QNode):
|
||||
self.query = query
|
||||
|
||||
def __repr__(self):
|
||||
return 'Q(**%s)' % repr(self.query)
|
||||
return "Q(**%s)" % repr(self.query)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.query)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__ == other.__class__ and self.query == other.query
|
||||
|
||||
def accept(self, visitor):
|
||||
return visitor.visit_query(self)
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
warn_empty_is_deprecated()
|
||||
return not bool(self.query)
|
||||
|
@ -1,5 +1,12 @@
|
||||
__all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
|
||||
'post_save', 'pre_delete', 'post_delete')
|
||||
__all__ = (
|
||||
"pre_init",
|
||||
"post_init",
|
||||
"pre_save",
|
||||
"pre_save_post_validation",
|
||||
"post_save",
|
||||
"pre_delete",
|
||||
"post_delete",
|
||||
)
|
||||
|
||||
signals_available = False
|
||||
try:
|
||||
@ -7,11 +14,12 @@ try:
|
||||
|
||||
signals_available = True
|
||||
except ImportError:
|
||||
class Namespace(object):
|
||||
|
||||
class Namespace:
|
||||
def signal(self, name, doc=None):
|
||||
return _FakeSignal(name, doc)
|
||||
|
||||
class _FakeSignal(object):
|
||||
class _FakeSignal:
|
||||
"""If blinker is unavailable, create a fake class with the same
|
||||
interface that allows sending of signals but will fail with an
|
||||
error on anything else. Instead of doing anything on send, it
|
||||
@ -23,13 +31,16 @@ except ImportError:
|
||||
self.__doc__ = doc
|
||||
|
||||
def _fail(self, *args, **kwargs):
|
||||
raise RuntimeError('signalling support is unavailable '
|
||||
'because the blinker library is '
|
||||
'not installed.')
|
||||
raise RuntimeError(
|
||||
"signalling support is unavailable "
|
||||
"because the blinker library is "
|
||||
"not installed."
|
||||
)
|
||||
|
||||
send = lambda *a, **kw: None # noqa
|
||||
connect = disconnect = has_receivers_for = receivers_for = \
|
||||
temporarily_connected_to = _fail
|
||||
connect = (
|
||||
disconnect
|
||||
) = has_receivers_for = receivers_for = temporarily_connected_to = _fail
|
||||
del _fail
|
||||
|
||||
|
||||
@ -37,12 +48,12 @@ except ImportError:
|
||||
# not put signals in here. Create your own namespace instead.
|
||||
_signals = Namespace()
|
||||
|
||||
pre_init = _signals.signal('pre_init')
|
||||
post_init = _signals.signal('post_init')
|
||||
pre_save = _signals.signal('pre_save')
|
||||
pre_save_post_validation = _signals.signal('pre_save_post_validation')
|
||||
post_save = _signals.signal('post_save')
|
||||
pre_delete = _signals.signal('pre_delete')
|
||||
post_delete = _signals.signal('post_delete')
|
||||
pre_bulk_insert = _signals.signal('pre_bulk_insert')
|
||||
post_bulk_insert = _signals.signal('post_bulk_insert')
|
||||
pre_init = _signals.signal("pre_init")
|
||||
post_init = _signals.signal("post_init")
|
||||
pre_save = _signals.signal("pre_save")
|
||||
pre_save_post_validation = _signals.signal("pre_save_post_validation")
|
||||
post_save = _signals.signal("post_save")
|
||||
pre_delete = _signals.signal("pre_delete")
|
||||
post_delete = _signals.signal("post_delete")
|
||||
pre_bulk_insert = _signals.signal("pre_bulk_insert")
|
||||
post_bulk_insert = _signals.signal("post_bulk_insert")
|
||||
|
@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT
|
||||
# %{python_sitearch}/*
|
||||
|
||||
%changelog
|
||||
* See: http://docs.mongoengine.org/en/latest/changelog.html
|
||||
* See: http://docs.mongoengine.org/en/latest/changelog.html
|
||||
|
8
requirements-dev.txt
Normal file
8
requirements-dev.txt
Normal file
@ -0,0 +1,8 @@
|
||||
black
|
||||
flake8
|
||||
flake8-import-order
|
||||
pre-commit
|
||||
pytest
|
||||
ipdb
|
||||
ipython
|
||||
tox
|
@ -1,7 +1,3 @@
|
||||
nose
|
||||
pymongo>=3.4
|
||||
six==1.10.0
|
||||
flake8
|
||||
flake8-import-order
|
||||
Sphinx==1.5.5
|
||||
sphinx-rtd-theme==0.2.4
|
||||
|
13
setup.cfg
13
setup.cfg
@ -1,11 +1,10 @@
|
||||
[nosetests]
|
||||
verbosity=2
|
||||
detailed-errors=1
|
||||
#tests=tests
|
||||
cover-package=mongoengine
|
||||
|
||||
[flake8]
|
||||
ignore=E501,F401,F403,F405,I201,I202,W504, W605
|
||||
ignore=E501,F401,F403,F405,I201,I202,W504, W605, W503
|
||||
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
||||
max-complexity=47
|
||||
application-import-names=mongoengine,tests
|
||||
|
||||
[tool:pytest]
|
||||
# Limits the discovery to tests directory
|
||||
# avoids that it runs for instance the benchmark
|
||||
testpaths = tests
|
||||
|
135
setup.py
135
setup.py
@ -1,6 +1,9 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pkg_resources import normalize_path
|
||||
from setuptools import find_packages, setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
# Hack to silence atexit traceback in newer python versions
|
||||
try:
|
||||
@ -8,13 +11,10 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
DESCRIPTION = (
|
||||
'MongoEngine is a Python Object-Document '
|
||||
'Mapper for working with MongoDB.'
|
||||
)
|
||||
DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB."
|
||||
|
||||
try:
|
||||
with open('README.rst') as fin:
|
||||
with open("README.rst") as fin:
|
||||
LONG_DESCRIPTION = fin.read()
|
||||
except Exception:
|
||||
LONG_DESCRIPTION = None
|
||||
@ -24,63 +24,124 @@ def get_version(version_tuple):
|
||||
"""Return the version tuple as a string, e.g. for (0, 10, 7),
|
||||
return '0.10.7'.
|
||||
"""
|
||||
return '.'.join(map(str, version_tuple))
|
||||
return ".".join(map(str, version_tuple))
|
||||
|
||||
|
||||
class PyTest(TestCommand):
|
||||
"""Will force pytest to search for tests inside the build directory
|
||||
for 2to3 converted code (used by tox), instead of the current directory.
|
||||
Required as long as we need 2to3
|
||||
|
||||
Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations
|
||||
Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html
|
||||
"""
|
||||
|
||||
# https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands
|
||||
# Allows to provide pytest command argument through the test runner command `python setup.py test`
|
||||
# e.g: `python setup.py test -a "-k=test"`
|
||||
# This only works for 1 argument though
|
||||
user_options = [("pytest-args=", "a", "Arguments to pass to py.test")]
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ""
|
||||
|
||||
def finalize_options(self):
|
||||
TestCommand.finalize_options(self)
|
||||
self.test_args = ["tests"]
|
||||
self.test_suite = True
|
||||
|
||||
def run_tests(self):
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
from pkg_resources import _namespace_packages
|
||||
import pytest
|
||||
|
||||
# Purge modules under test from sys.modules. The test loader will
|
||||
# re-import them from the build location. Required when 2to3 is used
|
||||
# with namespace packages.
|
||||
if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False):
|
||||
module = self.test_args[-1].split(".")[0]
|
||||
if module in _namespace_packages:
|
||||
del_modules = []
|
||||
if module in sys.modules:
|
||||
del_modules.append(module)
|
||||
module += "."
|
||||
for name in sys.modules:
|
||||
if name.startswith(module):
|
||||
del_modules.append(name)
|
||||
map(sys.modules.__delitem__, del_modules)
|
||||
|
||||
# Run on the build directory for 2to3-built code
|
||||
# This will prevent the old 2.x code from being found
|
||||
# by py.test discovery mechanism, that apparently
|
||||
# ignores sys.path..
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
self.test_args = [normalize_path(ei_cmd.egg_base)]
|
||||
|
||||
cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else [])
|
||||
errno = pytest.main(cmd_args)
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||
# file is read
|
||||
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
||||
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
|
||||
init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py")
|
||||
version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0]
|
||||
|
||||
VERSION = get_version(eval(version_line.split('=')[-1]))
|
||||
VERSION = get_version(eval(version_line.split("=")[-1]))
|
||||
|
||||
CLASSIFIERS = [
|
||||
'Development Status :: 4 - Beta',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
'Topic :: Database',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
"Topic :: Database",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
]
|
||||
|
||||
extra_opts = {
|
||||
'packages': find_packages(exclude=['tests', 'tests.*']),
|
||||
'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0']
|
||||
"packages": find_packages(exclude=["tests", "tests.*"]),
|
||||
"tests_require": [
|
||||
"pytest<5.0",
|
||||
"pytest-cov",
|
||||
"coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls
|
||||
"blinker",
|
||||
"Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support
|
||||
],
|
||||
}
|
||||
if sys.version_info[0] == 3:
|
||||
extra_opts['use_2to3'] = True
|
||||
if 'test' in sys.argv or 'nosetests' in sys.argv:
|
||||
extra_opts['packages'] = find_packages()
|
||||
extra_opts['package_data'] = {
|
||||
'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']}
|
||||
else:
|
||||
extra_opts['tests_require'] += ['python-dateutil']
|
||||
|
||||
if "test" in sys.argv:
|
||||
extra_opts["packages"] = find_packages()
|
||||
extra_opts["package_data"] = {
|
||||
"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]
|
||||
}
|
||||
|
||||
setup(
|
||||
name='mongoengine',
|
||||
name="mongoengine",
|
||||
version=VERSION,
|
||||
author='Harry Marr',
|
||||
author_email='harry.marr@gmail.com',
|
||||
author="Harry Marr",
|
||||
author_email="harry.marr@gmail.com",
|
||||
maintainer="Stefan Wojcik",
|
||||
maintainer_email="wojcikstefan@gmail.com",
|
||||
url='http://mongoengine.org/',
|
||||
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
||||
license='MIT',
|
||||
url="http://mongoengine.org/",
|
||||
download_url="https://github.com/MongoEngine/mongoengine/tarball/master",
|
||||
license="MIT",
|
||||
include_package_data=True,
|
||||
description=DESCRIPTION,
|
||||
long_description=LONG_DESCRIPTION,
|
||||
platforms=['any'],
|
||||
platforms=["any"],
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=['pymongo>=3.4', 'six'],
|
||||
test_suite='nose.collector',
|
||||
python_requires=">=3.5",
|
||||
install_requires=["pymongo>=3.4, <4.0"],
|
||||
cmdclass={"test": PyTest},
|
||||
**extra_opts
|
||||
)
|
||||
|
@ -1,4 +0,0 @@
|
||||
from .all_warnings import AllWarnings
|
||||
from .document import *
|
||||
from .queryset import *
|
||||
from .fields import *
|
@ -1,42 +0,0 @@
|
||||
"""
|
||||
This test has been put into a module. This is because it tests warnings that
|
||||
only get triggered on first hit. This way we can ensure its imported into the
|
||||
top level and called first by the test suite.
|
||||
"""
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
|
||||
__all__ = ('AllWarnings', )
|
||||
|
||||
|
||||
class AllWarnings(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.warning_list = []
|
||||
self.showwarning_default = warnings.showwarning
|
||||
warnings.showwarning = self.append_to_warning_list
|
||||
|
||||
def append_to_warning_list(self, message, category, *args):
|
||||
self.warning_list.append({"message": message,
|
||||
"category": category})
|
||||
|
||||
def tearDown(self):
|
||||
# restore default handling of warnings
|
||||
warnings.showwarning = self.showwarning_default
|
||||
|
||||
def test_document_collection_syntax_warning(self):
|
||||
|
||||
class NonAbstractBase(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class InheritedDocumentFailTest(NonAbstractBase):
|
||||
meta = {'collection': 'fail'}
|
||||
|
||||
warning = self.warning_list[0]
|
||||
self.assertEqual(SyntaxWarning, warning["category"])
|
||||
self.assertEqual('non_abstract_base',
|
||||
InheritedDocumentFailTest._get_collection_name())
|
35
tests/all_warnings/test_warnings.py
Normal file
35
tests/all_warnings/test_warnings.py
Normal file
@ -0,0 +1,35 @@
|
||||
"""
|
||||
This test has been put into a module. This is because it tests warnings that
|
||||
only get triggered on first hit. This way we can ensure its imported into the
|
||||
top level and called first by the test suite.
|
||||
"""
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
|
||||
class TestAllWarnings(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
self.warning_list = []
|
||||
self.showwarning_default = warnings.showwarning
|
||||
warnings.showwarning = self.append_to_warning_list
|
||||
|
||||
def append_to_warning_list(self, message, category, *args):
|
||||
self.warning_list.append({"message": message, "category": category})
|
||||
|
||||
def tearDown(self):
|
||||
# restore default handling of warnings
|
||||
warnings.showwarning = self.showwarning_default
|
||||
|
||||
def test_document_collection_syntax_warning(self):
|
||||
class NonAbstractBase(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class InheritedDocumentFailTest(NonAbstractBase):
|
||||
meta = {"collection": "fail"}
|
||||
|
||||
warning = self.warning_list[0]
|
||||
assert SyntaxWarning == warning["category"]
|
||||
assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name()
|
@ -1,13 +0,0 @@
|
||||
import unittest
|
||||
|
||||
from .class_methods import *
|
||||
from .delta import *
|
||||
from .dynamic import *
|
||||
from .indexes import *
|
||||
from .inheritance import *
|
||||
from .instance import *
|
||||
from .json_serialisation import *
|
||||
from .validation import *
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -1,864 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from bson import SON
|
||||
from mongoengine import *
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class DeltaTest(MongoDBTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(DeltaTest, self).setUp()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
non_field = True
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_delta(self):
|
||||
self.delta(Document)
|
||||
self.delta(DynamicDocument)
|
||||
|
||||
def delta(self, DocClass):
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
doc.string_field = 'hello'
|
||||
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ['1', 2, {'hello': 'world'}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||
|
||||
def test_delta_recursive(self):
|
||||
self.delta_recursive(Document, EmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
||||
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||
|
||||
class Embedded(EmbeddedClass):
|
||||
id = StringField()
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.id = "010101"
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'id': "010101",
|
||||
'string_field': 'hello',
|
||||
'int_field': 1,
|
||||
'dict_field': {'hello': 'world'},
|
||||
'list_field': ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field': embedded_delta}, {}))
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.dict_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
||||
self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
||||
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field, [])
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
|
||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field'])
|
||||
|
||||
self.assertEqual(doc.embedded_field._delta(), ({
|
||||
'list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello',
|
||||
'dict_field': {'hello': 'world'},
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
|
||||
self.assertEqual(doc._delta(), ({
|
||||
'embedded_field.list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello',
|
||||
'dict_field': {'hello': 'world'},
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||
embedded_2[k])
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = 'world'
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field.2.string_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({'list_field.2.string_field': 'world'}, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'world')
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field.2'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}
|
||||
}, {}))
|
||||
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'hello world')
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field.list_field.2.list_field':
|
||||
[2, {'hello': 'world'}]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field.list_field.2.list_field':
|
||||
[2, {'hello': 'world'}, 1]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[2, {'hello': 'world'}, 1])
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[1, 2, {'hello': 'world'}])
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field['Embedded'] = embedded_1
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field['Embedded'].string_field = 'Hello World'
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['dict_field.Embedded.string_field'])
|
||||
self.assertEqual(doc._delta(),
|
||||
({'dict_field.Embedded.string_field': 'Hello World'}, {}))
|
||||
|
||||
def test_circular_reference_deltas(self):
|
||||
self.circular_reference_deltas(Document, Document)
|
||||
self.circular_reference_deltas(Document, DynamicDocument)
|
||||
self.circular_reference_deltas(DynamicDocument, Document)
|
||||
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas(self, DocClass1, DocClass2):
|
||||
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField('Organization'))
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField('Person')
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
|
||||
p = Person.objects[0].select_related()
|
||||
o = Organization.objects.first()
|
||||
self.assertEqual(p.owns[0], o)
|
||||
self.assertEqual(o.owner, p)
|
||||
|
||||
def test_circular_reference_deltas_2(self):
|
||||
self.circular_reference_deltas_2(Document, Document)
|
||||
self.circular_reference_deltas_2(Document, DynamicDocument)
|
||||
self.circular_reference_deltas_2(DynamicDocument, Document)
|
||||
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True):
|
||||
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField('Organization', dbref=dbref))
|
||||
employer = ReferenceField('Organization', dbref=dbref)
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField('Person', dbref=dbref)
|
||||
employees = ListField(ReferenceField('Person', dbref=dbref))
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
employee = Person(name="employee").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
organization.employees.append(employee)
|
||||
employee.employer = organization
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
employee.save()
|
||||
|
||||
p = Person.objects.get(name="owner")
|
||||
e = Person.objects.get(name="employee")
|
||||
o = Organization.objects.first()
|
||||
|
||||
self.assertEqual(p.owns[0], o)
|
||||
self.assertEqual(o.owner, p)
|
||||
self.assertEqual(e.employer, o)
|
||||
|
||||
return person, organization, employee
|
||||
|
||||
def test_delta_db_field(self):
|
||||
self.delta_db_field(Document)
|
||||
self.delta_db_field(DynamicDocument)
|
||||
|
||||
def delta_db_field(self, DocClass):
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field='db_string_field')
|
||||
int_field = IntField(db_field='db_int_field')
|
||||
dict_field = DictField(db_field='db_dict_field')
|
||||
list_field = ListField(db_field='db_list_field')
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
doc.string_field = 'hello'
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_string_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_int_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_int_field': 1}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ['1', 2, {'hello': 'world'}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {}))
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'db_list_field': 1}))
|
||||
|
||||
# Test it saves that data
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc.string_field = 'hello'
|
||||
doc.int_field = 1
|
||||
doc.dict_field = {'hello': 'world'}
|
||||
doc.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.string_field, 'hello')
|
||||
self.assertEqual(doc.int_field, 1)
|
||||
self.assertEqual(doc.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}])
|
||||
|
||||
def test_delta_recursive_db_field(self):
|
||||
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
def delta_recursive_db_field(self, DocClass, EmbeddedClass):
|
||||
|
||||
class Embedded(EmbeddedClass):
|
||||
string_field = StringField(db_field='db_string_field')
|
||||
int_field = IntField(db_field='db_int_field')
|
||||
dict_field = DictField(db_field='db_dict_field')
|
||||
list_field = ListField(db_field='db_list_field')
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field='db_string_field')
|
||||
int_field = IntField(db_field='db_int_field')
|
||||
dict_field = DictField(db_field='db_dict_field')
|
||||
list_field = ListField(db_field='db_list_field')
|
||||
embedded_field = EmbeddedDocumentField(Embedded,
|
||||
db_field='db_embedded_field')
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'db_string_field': 'hello',
|
||||
'db_int_field': 1,
|
||||
'db_dict_field': {'hello': 'world'},
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field': embedded_delta}, {}))
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_dict_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({}, {'db_dict_field': 1}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'db_embedded_field.db_dict_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({}, {'db_list_field': 1}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'db_embedded_field.db_list_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field, [])
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
|
||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({
|
||||
'db_list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello',
|
||||
'db_dict_field': {'hello': 'world'},
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
|
||||
self.assertEqual(doc._delta(), ({
|
||||
'db_embedded_field.db_list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello',
|
||||
'db_dict_field': {'hello': 'world'},
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||
embedded_2[k])
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = 'world'
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field.2.db_string_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({'db_list_field.2.db_string_field': 'world'}, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field.db_list_field.2.db_string_field': 'world'},
|
||||
{}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'world')
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field.2'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello world',
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
'db_dict_field': {'hello': 'world'}}}, {}))
|
||||
self.assertEqual(doc._delta(), ({
|
||||
'db_embedded_field.db_list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello world',
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
'db_dict_field': {'hello': 'world'}}
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'hello world')
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||
[2, {'hello': 'world'}]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||
[2, {'hello': 'world'}, 1]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[2, {'hello': 'world'}, 1])
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[1, 2, {'hello': 'world'}])
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(doc._delta(), ({},
|
||||
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
||||
|
||||
def test_delta_for_dynamic_documents(self):
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(name="James", age=34)
|
||||
self.assertEqual(p._delta(), (
|
||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||
|
||||
p.doc = 123
|
||||
del p.doc
|
||||
self.assertEqual(p._delta(), (
|
||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||
|
||||
p = Person()
|
||||
p.name = "Dean"
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
p.age = 24
|
||||
self.assertEqual(p.age, 24)
|
||||
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||
|
||||
p = Person.objects(age=22).get()
|
||||
p.age = 24
|
||||
self.assertEqual(p.age, 24)
|
||||
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||
|
||||
p.save()
|
||||
self.assertEqual(1, Person.objects(age=24).count())
|
||||
|
||||
def test_dynamic_delta(self):
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
doc.string_field = 'hello'
|
||||
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ['1', 2, {'hello': 'world'}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||
|
||||
def test_delta_with_dbref_true(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(Document, Document, True)
|
||||
employee.name = 'test'
|
||||
|
||||
self.assertEqual(organization._get_changed_fields(), [])
|
||||
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertEqual({}, updates)
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertIn('employees', updates)
|
||||
|
||||
def test_delta_with_dbref_false(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(Document, Document, False)
|
||||
employee.name = 'test'
|
||||
|
||||
self.assertEqual(organization._get_changed_fields(), [])
|
||||
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertEqual({}, updates)
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertIn('employees', updates)
|
||||
|
||||
def test_nested_nested_fields_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc)))
|
||||
name = StringField()
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs['a']['b']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields())
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_lower_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc().save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
mydoc.subs['a'] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
|
||||
subdoc = mydoc.subs['a']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_upper_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs['a']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a.name"], mydoc._get_changed_fields())
|
||||
|
||||
mydoc.subs['a'] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_referenced_object_changed_attributes(self):
|
||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||
|
||||
class Organization(Document):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
org = ReferenceField('Organization', required=True)
|
||||
|
||||
Organization.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
org1 = Organization(name='Org 1')
|
||||
org1.save()
|
||||
|
||||
org2 = Organization(name='Org 2')
|
||||
org2.save()
|
||||
|
||||
user = User(name='Fred', org=org1)
|
||||
user.save()
|
||||
|
||||
org1.reload()
|
||||
org2.reload()
|
||||
user.reload()
|
||||
self.assertEqual(org1.name, 'Org 1')
|
||||
self.assertEqual(org2.name, 'Org 2')
|
||||
self.assertEqual(user.name, 'Fred')
|
||||
|
||||
user.name = 'Harold'
|
||||
user.org = org2
|
||||
|
||||
org2.name = 'New Org 2'
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
|
||||
user.save()
|
||||
org2.save()
|
||||
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
org2.reload()
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
|
||||
def test_delta_for_nested_map_fields(self):
|
||||
class UInfoDocument(Document):
|
||||
phone = StringField()
|
||||
|
||||
class EmbeddedRole(EmbeddedDocument):
|
||||
type = StringField()
|
||||
|
||||
class EmbeddedUser(EmbeddedDocument):
|
||||
name = StringField()
|
||||
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
info = ReferenceField(UInfoDocument)
|
||||
|
||||
class Doc(Document):
|
||||
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
|
||||
num = IntField(default=-1)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(num=1)
|
||||
doc.users["007"] = EmbeddedUser(name="Agent007")
|
||||
doc.save()
|
||||
|
||||
uinfo = UInfoDocument(phone="79089269066")
|
||||
uinfo.save()
|
||||
|
||||
d = Doc.objects(num=1).first()
|
||||
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
|
||||
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
||||
d.users["007"]["info"] = uinfo
|
||||
delta = d._delta()
|
||||
self.assertEqual(True, "users.007.roles.666" in delta[0])
|
||||
self.assertEqual(True, "users.007.rolist" in delta[0])
|
||||
self.assertEqual(True, "users.007.info" in delta[0])
|
||||
self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"])
|
||||
self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"])
|
||||
self.assertEqual(uinfo.id, delta[0]["users.007.info"])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -1,564 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import (BooleanField, Document, EmbeddedDocument,
|
||||
EmbeddedDocumentField, GenericReferenceField,
|
||||
IntField, ReferenceField, StringField)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
from tests.fixtures import Base
|
||||
|
||||
__all__ = ('InheritanceTest', )
|
||||
|
||||
|
||||
class InheritanceTest(MongoDBTestCase):
|
||||
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_constructor_cls(self):
|
||||
# Ensures _cls is properly set during construction
|
||||
# and when object gets reloaded (prevent regression of #1950)
|
||||
class EmbedData(EmbeddedDocument):
|
||||
data = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class DataDoc(Document):
|
||||
name = StringField()
|
||||
embed = EmbeddedDocumentField(EmbedData)
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
test_doc = DataDoc(name='test', embed=EmbedData(data='data'))
|
||||
self.assertEqual(test_doc._cls, 'DataDoc')
|
||||
self.assertEqual(test_doc.embed._cls, 'EmbedData')
|
||||
test_doc.save()
|
||||
saved_doc = DataDoc.objects.with_id(test_doc.id)
|
||||
self.assertEqual(test_doc._cls, saved_doc._cls)
|
||||
self.assertEqual(test_doc.embed._cls, saved_doc.embed._cls)
|
||||
test_doc.delete()
|
||||
|
||||
def test_superclasses(self):
|
||||
"""Ensure that the correct list of superclasses is assembled.
|
||||
"""
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Fish._superclasses, ('Animal',))
|
||||
self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish'))
|
||||
self.assertEqual(Mammal._superclasses, ('Animal',))
|
||||
self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal'))
|
||||
self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal'))
|
||||
|
||||
def test_external_superclasses(self):
|
||||
"""Ensure that the correct list of super classes is assembled when
|
||||
importing part of the model.
|
||||
"""
|
||||
class Animal(Base): pass
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ('Base', ))
|
||||
self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',))
|
||||
self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Fish'))
|
||||
self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',))
|
||||
self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Mammal'))
|
||||
self.assertEqual(Human._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Mammal'))
|
||||
|
||||
def test_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled.
|
||||
"""
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._subclasses, ('Animal',
|
||||
'Animal.Fish',
|
||||
'Animal.Fish.Guppy',
|
||||
'Animal.Mammal',
|
||||
'Animal.Mammal.Dog',
|
||||
'Animal.Mammal.Human'))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish',
|
||||
'Animal.Fish.Guppy',))
|
||||
self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',))
|
||||
self.assertEqual(Mammal._subclasses, ('Animal.Mammal',
|
||||
'Animal.Mammal.Dog',
|
||||
'Animal.Mammal.Human'))
|
||||
self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',))
|
||||
|
||||
def test_external_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled when importing part of the model.
|
||||
"""
|
||||
class Animal(Base): pass
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._subclasses, ('Base.Animal',
|
||||
'Base.Animal.Fish',
|
||||
'Base.Animal.Fish.Guppy',
|
||||
'Base.Animal.Mammal',
|
||||
'Base.Animal.Mammal.Dog',
|
||||
'Base.Animal.Mammal.Human'))
|
||||
self.assertEqual(Fish._subclasses, ('Base.Animal.Fish',
|
||||
'Base.Animal.Fish.Guppy',))
|
||||
self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',))
|
||||
self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal',
|
||||
'Base.Animal.Mammal.Dog',
|
||||
'Base.Animal.Mammal.Human'))
|
||||
self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',))
|
||||
|
||||
def test_dynamic_declarations(self):
|
||||
"""Test that declaring an extra class updates meta data"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal',))
|
||||
|
||||
# Test dynamically adding a class changes the meta data
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish'))
|
||||
|
||||
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish',))
|
||||
|
||||
# Test dynamically adding an inherited class changes the meta data
|
||||
class Pike(Fish):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish',
|
||||
'Animal.Fish.Pike'))
|
||||
|
||||
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike'))
|
||||
|
||||
self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish'))
|
||||
self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',))
|
||||
|
||||
def test_inheritance_meta_data(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
|
||||
def test_inheritance_to_mongo_keys(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||
['_cls', 'name', 'age'])
|
||||
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||
['_cls', 'name', 'age', 'salary'])
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
|
||||
def test_indexes_and_multiple_inheritance(self):
|
||||
""" Ensure that all of the indexes are created for a document with
|
||||
multiple inheritance.
|
||||
"""
|
||||
|
||||
class A(Document):
|
||||
a = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': ['a']
|
||||
}
|
||||
|
||||
class B(Document):
|
||||
b = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': ['b']
|
||||
}
|
||||
|
||||
class C(A, B):
|
||||
pass
|
||||
|
||||
A.drop_collection()
|
||||
B.drop_collection()
|
||||
C.drop_collection()
|
||||
|
||||
C.ensure_indexes()
|
||||
|
||||
self.assertEqual(
|
||||
sorted([idx['key'] for idx in C._get_collection().index_information().values()]),
|
||||
sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]])
|
||||
)
|
||||
|
||||
def test_polymorphic_queries(self):
|
||||
"""Ensure that the correct subclasses are returned from a query
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
Animal.drop_collection()
|
||||
|
||||
Animal().save()
|
||||
Fish().save()
|
||||
Mammal().save()
|
||||
Dog().save()
|
||||
Human().save()
|
||||
|
||||
classes = [obj.__class__ for obj in Animal.objects]
|
||||
self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human])
|
||||
|
||||
classes = [obj.__class__ for obj in Mammal.objects]
|
||||
self.assertEqual(classes, [Mammal, Dog, Human])
|
||||
|
||||
classes = [obj.__class__ for obj in Human.objects]
|
||||
self.assertEqual(classes, [Human])
|
||||
|
||||
def test_allow_inheritance(self):
|
||||
"""Ensure that inheritance is disabled by default on simple
|
||||
classes and that _cls will not be used.
|
||||
"""
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
|
||||
# can't inherit because Animal didn't explicitly allow inheritance
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
class Dog(Animal):
|
||||
pass
|
||||
self.assertIn("Document Animal may not be subclassed", str(cm.exception))
|
||||
|
||||
# Check that _cls etc aren't present on simple documents
|
||||
dog = Animal(name='dog').save()
|
||||
self.assertEqual(dog.to_mongo().keys(), ['_id', 'name'])
|
||||
|
||||
collection = self.db[Animal._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertNotIn('_cls', obj)
|
||||
|
||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||
"""
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
class Mammal(Animal):
|
||||
meta = {'allow_inheritance': False}
|
||||
self.assertEqual(str(cm.exception), 'Only direct subclasses of Document may set "allow_inheritance" to False')
|
||||
|
||||
def test_allow_inheritance_abstract_document(self):
|
||||
"""Ensure that abstract documents can set inheritance rules and that
|
||||
_cls will not be used.
|
||||
"""
|
||||
class FinalDocument(Document):
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class Animal(FinalDocument):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls isn't present in simple documents
|
||||
doc = Animal(name='dog')
|
||||
self.assertNotIn('_cls', doc.to_mongo())
|
||||
|
||||
def test_using_abstract_class_in_reference_field(self):
|
||||
# Ensures no regression of #1920
|
||||
class AbstractHuman(Document):
|
||||
meta = {'abstract': True}
|
||||
|
||||
class Dad(AbstractHuman):
|
||||
name = StringField()
|
||||
|
||||
class Home(Document):
|
||||
dad = ReferenceField(AbstractHuman) # Referencing the abstract class
|
||||
address = StringField()
|
||||
|
||||
dad = Dad(name='5').save()
|
||||
Home(dad=dad, address='street').save()
|
||||
|
||||
home = Home.objects.first()
|
||||
home.address = 'garbage'
|
||||
home.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_class_referencing_self(self):
|
||||
# Ensures no regression of #1920
|
||||
class Human(Document):
|
||||
meta = {'abstract': True}
|
||||
creator = ReferenceField('self', dbref=True)
|
||||
|
||||
class User(Human):
|
||||
name = StringField()
|
||||
|
||||
user = User(name='John').save()
|
||||
user2 = User(name='Foo', creator=user).save()
|
||||
|
||||
user2 = User.objects.with_id(user2.id)
|
||||
user2.name = 'Bar'
|
||||
user2.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'id')
|
||||
|
||||
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
city_id = IntField(primary_key=True)
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'city_id')
|
||||
|
||||
def test_auto_id_vs_non_pk_id_field(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
id = IntField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 4)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'auto_id_0')
|
||||
berlin.save()
|
||||
self.assertEqual(berlin.pk, berlin.auto_id_0)
|
||||
|
||||
def test_abstract_document_creation_does_not_fail(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
city = City(continent='asia')
|
||||
self.assertEqual(None, city.pk)
|
||||
# TODO: expected error? Shouldn't we create a new error type?
|
||||
with self.assertRaises(KeyError):
|
||||
setattr(city, 'pk', 1)
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance."""
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
class SpecialComment(Comment):
|
||||
pass
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertNotIn('_cls', doc.to_mongo())
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertIn('_cls', doc.to_mongo())
|
||||
|
||||
def test_document_inheritance(self):
|
||||
"""Ensure mutliple inheritance of abstract documents
|
||||
"""
|
||||
class DateCreatedDocument(Document):
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'abstract': True,
|
||||
}
|
||||
|
||||
class DateUpdatedDocument(Document):
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'abstract': True,
|
||||
}
|
||||
|
||||
try:
|
||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||
pass
|
||||
except Exception:
|
||||
self.assertTrue(False, "Couldn't create MyDocument class")
|
||||
|
||||
def test_abstract_documents(self):
|
||||
"""Ensure that a document superclass can be marked as abstract
|
||||
thereby not using it as the name for the collection."""
|
||||
|
||||
defaults = {'index_background': True,
|
||||
'index_drop_dups': True,
|
||||
'index_opts': {'hello': 'world'},
|
||||
'allow_inheritance': True,
|
||||
'queryset_class': 'QuerySet',
|
||||
'db_alias': 'myDB',
|
||||
'shard_key': ('hello', 'world')}
|
||||
|
||||
meta_settings = {'abstract': True}
|
||||
meta_settings.update(defaults)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = meta_settings
|
||||
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
|
||||
class Mammal(Animal):
|
||||
meta = {'abstract': True}
|
||||
class Human(Mammal): pass
|
||||
|
||||
for k, v in iteritems(defaults):
|
||||
for cls in [Animal, Fish, Guppy]:
|
||||
self.assertEqual(cls._meta[k], v)
|
||||
|
||||
self.assertNotIn('collection', Animal._meta)
|
||||
self.assertNotIn('collection', Mammal._meta)
|
||||
|
||||
self.assertEqual(Animal._get_collection_name(), None)
|
||||
self.assertEqual(Mammal._get_collection_name(), None)
|
||||
|
||||
self.assertEqual(Fish._get_collection_name(), 'fish')
|
||||
self.assertEqual(Guppy._get_collection_name(), 'fish')
|
||||
self.assertEqual(Human._get_collection_name(), 'human')
|
||||
|
||||
# ensure that a subclass of a non-abstract class can't be abstract
|
||||
with self.assertRaises(ValueError):
|
||||
class EvilHuman(Human):
|
||||
evil = BooleanField(default=True)
|
||||
meta = {'abstract': True}
|
||||
|
||||
def test_abstract_embedded_documents(self):
|
||||
# 789: EmbeddedDocument shouldn't inherit abstract
|
||||
class A(EmbeddedDocument):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
self.assertFalse(B._meta["abstract"])
|
||||
|
||||
def test_inherited_collections(self):
|
||||
"""Ensure that subclassed documents don't override parents'
|
||||
collections
|
||||
"""
|
||||
|
||||
class Drink(Document):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Drinker(Document):
|
||||
drink = GenericReferenceField()
|
||||
|
||||
try:
|
||||
warnings.simplefilter("error")
|
||||
|
||||
class AcloholicDrink(Drink):
|
||||
meta = {'collection': 'booze'}
|
||||
|
||||
except SyntaxWarning:
|
||||
warnings.simplefilter("ignore")
|
||||
|
||||
class AlcoholicDrink(Drink):
|
||||
meta = {'collection': 'booze'}
|
||||
|
||||
else:
|
||||
raise AssertionError("SyntaxWarning should be triggered")
|
||||
|
||||
warnings.resetwarnings()
|
||||
|
||||
Drink.drop_collection()
|
||||
AlcoholicDrink.drop_collection()
|
||||
Drinker.drop_collection()
|
||||
|
||||
red_bull = Drink(name='Red Bull')
|
||||
red_bull.save()
|
||||
|
||||
programmer = Drinker(drink=red_bull)
|
||||
programmer.save()
|
||||
|
||||
beer = AlcoholicDrink(name='Beer')
|
||||
beer.save()
|
||||
real_person = Drinker(drink=beer)
|
||||
real_person.save()
|
||||
|
||||
self.assertEqual(Drinker.objects[0].drink.name, red_bull.name)
|
||||
self.assertEqual(Drinker.objects[1].drink.name, beer.name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -2,18 +2,14 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("ClassMethodsTest", )
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
|
||||
|
||||
class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
class TestClassMethods(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
self.db = get_db()
|
||||
|
||||
class Person(Document):
|
||||
@ -33,54 +29,53 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
def test_definition(self):
|
||||
"""Ensure that document may be defined using fields.
|
||||
"""
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name'],
|
||||
sorted(self.Person._fields.keys()))
|
||||
self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"],
|
||||
sorted([x.__class__.__name__ for x in
|
||||
self.Person._fields.values()]))
|
||||
assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys())
|
||||
assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted(
|
||||
[x.__class__.__name__ for x in self.Person._fields.values()]
|
||||
)
|
||||
|
||||
def test_get_db(self):
|
||||
"""Ensure that get_db returns the expected db.
|
||||
"""
|
||||
db = self.Person._get_db()
|
||||
self.assertEqual(self.db, db)
|
||||
assert self.db == db
|
||||
|
||||
def test_get_collection_name(self):
|
||||
"""Ensure that get_collection_name returns the expected collection
|
||||
name.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
self.assertEqual(collection_name, self.Person._get_collection_name())
|
||||
collection_name = "person"
|
||||
assert collection_name == self.Person._get_collection_name()
|
||||
|
||||
def test_get_collection(self):
|
||||
"""Ensure that get_collection returns the expected collection.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
collection_name = "person"
|
||||
collection = self.Person._get_collection()
|
||||
self.assertEqual(self.db[collection_name], collection)
|
||||
assert self.db[collection_name] == collection
|
||||
|
||||
def test_drop_collection(self):
|
||||
"""Ensure that the collection may be dropped from the database.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
self.Person(name='Test').save()
|
||||
self.assertIn(collection_name, list_collection_names(self.db))
|
||||
collection_name = "person"
|
||||
self.Person(name="Test").save()
|
||||
assert collection_name in list_collection_names(self.db)
|
||||
|
||||
self.Person.drop_collection()
|
||||
self.assertNotIn(collection_name, list_collection_names(self.db))
|
||||
assert collection_name not in list_collection_names(self.db)
|
||||
|
||||
def test_register_delete_rule(self):
|
||||
"""Ensure that register delete rule adds a delete rule to the document
|
||||
meta.
|
||||
"""
|
||||
|
||||
class Job(Document):
|
||||
employee = ReferenceField(self.Person)
|
||||
|
||||
self.assertEqual(self.Person._meta.get('delete_rules'), None)
|
||||
assert self.Person._meta.get("delete_rules") is None
|
||||
|
||||
self.Person.register_delete_rule(Job, 'employee', NULLIFY)
|
||||
self.assertEqual(self.Person._meta['delete_rules'],
|
||||
{(Job, 'employee'): NULLIFY})
|
||||
self.Person.register_delete_rule(Job, "employee", NULLIFY)
|
||||
assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY}
|
||||
|
||||
def test_compare_indexes(self):
|
||||
""" Ensure that the indexes are properly created and that
|
||||
@ -93,23 +88,27 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
description = StringField()
|
||||
tags = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'title')]
|
||||
}
|
||||
meta = {"indexes": [("author", "title")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPost.ensure_index(['author', 'description'])
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('author', 1), ('description', 1)]]})
|
||||
BlogPost.ensure_index(["author", "description"])
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [],
|
||||
"extra": [[("author", 1), ("description", 1)]],
|
||||
}
|
||||
|
||||
BlogPost._get_collection().drop_index('author_1_description_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
BlogPost._get_collection().drop_index("author_1_description_1")
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPost._get_collection().drop_index('author_1_title_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('author', 1), ('title', 1)]], 'extra': []})
|
||||
BlogPost._get_collection().drop_index("author_1_title_1")
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [[("author", 1), ("title", 1)]],
|
||||
"extra": [],
|
||||
}
|
||||
|
||||
def test_compare_indexes_inheritance(self):
|
||||
""" Ensure that the indexes are properly created and that
|
||||
@ -122,32 +121,34 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
tag_list = ListField(StringField())
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPostWithTags.ensure_index(['author', 'tag_list'])
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]]})
|
||||
BlogPostWithTags.ensure_index(["author", "tag_list"])
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [],
|
||||
"extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]],
|
||||
}
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1")
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': []})
|
||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1")
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [[("_cls", 1), ("author", 1), ("tags", 1)]],
|
||||
"extra": [],
|
||||
}
|
||||
|
||||
def test_compare_indexes_multiple_subclasses(self):
|
||||
""" Ensure that compare_indexes behaves correctly if called from a
|
||||
@ -159,32 +160,26 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
tag_list = ListField(StringField())
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
class BlogPostWithCustomField(BlogPost):
|
||||
custom = DictField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'custom')]
|
||||
}
|
||||
meta = {"indexes": [("author", "custom")]}
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithCustomField.ensure_indexes()
|
||||
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
self.assertEqual(BlogPostWithTags.compare_indexes(), {'missing': [], 'extra': []})
|
||||
self.assertEqual(BlogPostWithCustomField.compare_indexes(), {'missing': [], 'extra': []})
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
assert BlogPostWithTags.compare_indexes() == {"missing": [], "extra": []}
|
||||
assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
def test_compare_indexes_for_text_indexes(self):
|
||||
""" Ensure that compare_indexes behaves correctly for text indexes """
|
||||
@ -192,18 +187,21 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
class Doc(Document):
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
meta = {'indexes': [
|
||||
{'fields': ['$a', "$b"],
|
||||
'default_language': 'english',
|
||||
'weights': {'a': 10, 'b': 2}
|
||||
}
|
||||
]}
|
||||
meta = {
|
||||
"indexes": [
|
||||
{
|
||||
"fields": ["$a", "$b"],
|
||||
"default_language": "english",
|
||||
"weights": {"a": 10, "b": 2},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Doc.drop_collection()
|
||||
Doc.ensure_indexes()
|
||||
actual = Doc.compare_indexes()
|
||||
expected = {'missing': [], 'extra': []}
|
||||
self.assertEqual(actual, expected)
|
||||
expected = {"missing": [], "extra": []}
|
||||
assert actual == expected
|
||||
|
||||
def test_list_indexes_inheritance(self):
|
||||
""" ensure that all of the indexes are listed regardless of the super-
|
||||
@ -215,23 +213,17 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
class BlogPostWithTagsAndExtraText(BlogPostWithTags):
|
||||
extra_text = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags', 'extra_text')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags", "extra_text")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@ -239,17 +231,16 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithTagsAndExtraText.ensure_indexes()
|
||||
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
BlogPostWithTags.list_indexes())
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
BlogPostWithTagsAndExtraText.list_indexes())
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
[[('_cls', 1), ('author', 1), ('tags', 1)],
|
||||
[('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)],
|
||||
[(u'_id', 1)], [('_cls', 1)]])
|
||||
assert BlogPost.list_indexes() == BlogPostWithTags.list_indexes()
|
||||
assert BlogPost.list_indexes() == BlogPostWithTagsAndExtraText.list_indexes()
|
||||
assert BlogPost.list_indexes() == [
|
||||
[("_cls", 1), ("author", 1), ("tags", 1)],
|
||||
[("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)],
|
||||
[(u"_id", 1)],
|
||||
[("_cls", 1)],
|
||||
]
|
||||
|
||||
def test_register_delete_rule_inherited(self):
|
||||
|
||||
class Vaccine(Document):
|
||||
name = StringField(required=True)
|
||||
|
||||
@ -257,15 +248,17 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
class Animal(Document):
|
||||
family = StringField(required=True)
|
||||
vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL))
|
||||
vaccine_made = ListField(
|
||||
ReferenceField("Vaccine", reverse_delete_rule=PULL)
|
||||
)
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["family"]}
|
||||
|
||||
class Cat(Animal):
|
||||
name = StringField(required=True)
|
||||
|
||||
self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL)
|
||||
self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL)
|
||||
assert Vaccine._meta["delete_rules"][(Animal, "vaccine_made")] == PULL
|
||||
assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL
|
||||
|
||||
def test_collection_naming(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
@ -273,80 +266,76 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
class DefaultNamingTest(Document):
|
||||
pass
|
||||
self.assertEqual('default_naming_test',
|
||||
DefaultNamingTest._get_collection_name())
|
||||
|
||||
assert "default_naming_test" == DefaultNamingTest._get_collection_name()
|
||||
|
||||
class CustomNamingTest(Document):
|
||||
meta = {'collection': 'pimp_my_collection'}
|
||||
meta = {"collection": "pimp_my_collection"}
|
||||
|
||||
self.assertEqual('pimp_my_collection',
|
||||
CustomNamingTest._get_collection_name())
|
||||
assert "pimp_my_collection" == CustomNamingTest._get_collection_name()
|
||||
|
||||
class DynamicNamingTest(Document):
|
||||
meta = {'collection': lambda c: "DYNAMO"}
|
||||
self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name())
|
||||
meta = {"collection": lambda c: "DYNAMO"}
|
||||
|
||||
assert "DYNAMO" == DynamicNamingTest._get_collection_name()
|
||||
|
||||
# Use Abstract class to handle backwards compatibility
|
||||
class BaseDocument(Document):
|
||||
meta = {
|
||||
'abstract': True,
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
meta = {"abstract": True, "collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class OldNamingConvention(BaseDocument):
|
||||
pass
|
||||
self.assertEqual('oldnamingconvention',
|
||||
OldNamingConvention._get_collection_name())
|
||||
|
||||
assert "oldnamingconvention" == OldNamingConvention._get_collection_name()
|
||||
|
||||
class InheritedAbstractNamingTest(BaseDocument):
|
||||
meta = {'collection': 'wibble'}
|
||||
self.assertEqual('wibble',
|
||||
InheritedAbstractNamingTest._get_collection_name())
|
||||
meta = {"collection": "wibble"}
|
||||
|
||||
assert "wibble" == InheritedAbstractNamingTest._get_collection_name()
|
||||
|
||||
# Mixin tests
|
||||
class BaseMixin(object):
|
||||
meta = {
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
meta = {"collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class OldMixinNamingConvention(Document, BaseMixin):
|
||||
pass
|
||||
self.assertEqual('oldmixinnamingconvention',
|
||||
OldMixinNamingConvention._get_collection_name())
|
||||
|
||||
assert (
|
||||
"oldmixinnamingconvention"
|
||||
== OldMixinNamingConvention._get_collection_name()
|
||||
)
|
||||
|
||||
class BaseMixin(object):
|
||||
meta = {
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
meta = {"collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class BaseDocument(Document, BaseMixin):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class MyDocument(BaseDocument):
|
||||
pass
|
||||
|
||||
self.assertEqual('basedocument', MyDocument._get_collection_name())
|
||||
assert "basedocument" == MyDocument._get_collection_name()
|
||||
|
||||
def test_custom_collection_name_operations(self):
|
||||
"""Ensure that a collection with a specified name is used as expected.
|
||||
"""
|
||||
collection_name = 'personCollTest'
|
||||
collection_name = "personCollTest"
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
meta = {'collection': collection_name}
|
||||
meta = {"collection": collection_name}
|
||||
|
||||
Person(name="Test User").save()
|
||||
self.assertIn(collection_name, list_collection_names(self.db))
|
||||
assert collection_name in list_collection_names(self.db)
|
||||
|
||||
user_obj = self.db[collection_name].find_one()
|
||||
self.assertEqual(user_obj['name'], "Test User")
|
||||
assert user_obj["name"] == "Test User"
|
||||
|
||||
user_obj = Person.objects[0]
|
||||
self.assertEqual(user_obj.name, "Test User")
|
||||
assert user_obj.name == "Test User"
|
||||
|
||||
Person.drop_collection()
|
||||
self.assertNotIn(collection_name, list_collection_names(self.db))
|
||||
assert collection_name not in list_collection_names(self.db)
|
||||
|
||||
def test_collection_name_and_primary(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
@ -354,15 +343,15 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
class Person(Document):
|
||||
name = StringField(primary_key=True)
|
||||
meta = {'collection': 'app'}
|
||||
meta = {"collection": "app"}
|
||||
|
||||
Person(name="Test User").save()
|
||||
|
||||
user_obj = Person.objects.first()
|
||||
self.assertEqual(user_obj.name, "Test User")
|
||||
assert user_obj.name == "Test User"
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
926
tests/document/test_delta.py
Normal file
926
tests/document/test_delta.py
Normal file
@ -0,0 +1,926 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from bson import SON
|
||||
from mongoengine import *
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDelta(MongoDBTestCase):
|
||||
def setUp(self):
|
||||
super(TestDelta, self).setUp()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
non_field = True
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_delta(self):
|
||||
self.delta(Document)
|
||||
self.delta(DynamicDocument)
|
||||
|
||||
@staticmethod
|
||||
def delta(DocClass):
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
assert doc._get_changed_fields() == ["string_field"]
|
||||
assert doc._delta() == ({"string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
assert doc._get_changed_fields() == ["int_field"]
|
||||
assert doc._delta() == ({"int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({"dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({"list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({}, {"dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({}, {"list_field": 1})
|
||||
|
||||
def test_delta_recursive(self):
|
||||
self.delta_recursive(Document, EmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
||||
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||
class Embedded(EmbeddedClass):
|
||||
id = StringField()
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.id = "010101"
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
assert doc._get_changed_fields() == ["embedded_field"]
|
||||
|
||||
embedded_delta = {
|
||||
"id": "010101",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
assert doc.embedded_field._delta() == (embedded_delta, {})
|
||||
assert doc._delta() == ({"embedded_field": embedded_delta}, {})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["embedded_field.dict_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"dict_field": 1})
|
||||
assert doc._delta() == ({}, {"embedded_field.dict_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.dict_field == {}
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"list_field": 1})
|
||||
assert doc._delta() == ({}, {"embedded_field.list_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field == []
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = "hello"
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {"hello": "world"}
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
||||
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"dict_field": {"hello": "world"},
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"embedded_field.list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"dict_field": {"hello": "world"},
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = "world"
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field.2.string_field"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{"list_field.2.string_field": "world"},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.string_field": "world"},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "world"
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = "hello world"
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field.2"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello world",
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
"dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"embedded_field.list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello world",
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
"dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"embedded_field.list_field.2.list_field.2.hello": 1},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
assert doc._delta() == ({}, {"embedded_field.list_field.2.list_field": 1})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field["Embedded"] = embedded_1
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field["Embedded"].string_field = "Hello World"
|
||||
assert doc._get_changed_fields() == ["dict_field.Embedded.string_field"]
|
||||
assert doc._delta() == ({"dict_field.Embedded.string_field": "Hello World"}, {})
|
||||
|
||||
def test_circular_reference_deltas(self):
|
||||
self.circular_reference_deltas(Document, Document)
|
||||
self.circular_reference_deltas(Document, DynamicDocument)
|
||||
self.circular_reference_deltas(DynamicDocument, Document)
|
||||
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas(self, DocClass1, DocClass2):
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField("Organization"))
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField("Person")
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
|
||||
p = Person.objects[0].select_related()
|
||||
o = Organization.objects.first()
|
||||
assert p.owns[0] == o
|
||||
assert o.owner == p
|
||||
|
||||
def test_circular_reference_deltas_2(self):
|
||||
self.circular_reference_deltas_2(Document, Document)
|
||||
self.circular_reference_deltas_2(Document, DynamicDocument)
|
||||
self.circular_reference_deltas_2(DynamicDocument, Document)
|
||||
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True):
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField("Organization", dbref=dbref))
|
||||
employer = ReferenceField("Organization", dbref=dbref)
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField("Person", dbref=dbref)
|
||||
employees = ListField(ReferenceField("Person", dbref=dbref))
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
employee = Person(name="employee").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
organization.employees.append(employee)
|
||||
employee.employer = organization
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
employee.save()
|
||||
|
||||
p = Person.objects.get(name="owner")
|
||||
e = Person.objects.get(name="employee")
|
||||
o = Organization.objects.first()
|
||||
|
||||
assert p.owns[0] == o
|
||||
assert o.owner == p
|
||||
assert e.employer == o
|
||||
|
||||
return person, organization, employee
|
||||
|
||||
def test_delta_db_field(self):
|
||||
self.delta_db_field(Document)
|
||||
self.delta_db_field(DynamicDocument)
|
||||
|
||||
def delta_db_field(self, DocClass):
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field="db_string_field")
|
||||
int_field = IntField(db_field="db_int_field")
|
||||
dict_field = DictField(db_field="db_dict_field")
|
||||
list_field = ListField(db_field="db_list_field")
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
assert doc._get_changed_fields() == ["db_string_field"]
|
||||
assert doc._delta() == ({"db_string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
assert doc._get_changed_fields() == ["db_int_field"]
|
||||
assert doc._delta() == ({"db_int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
assert doc._get_changed_fields() == ["db_dict_field"]
|
||||
assert doc._delta() == ({"db_dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
assert doc._get_changed_fields() == ["db_list_field"]
|
||||
assert doc._delta() == ({"db_list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["db_dict_field"]
|
||||
assert doc._delta() == ({}, {"db_dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
assert doc._get_changed_fields() == ["db_list_field"]
|
||||
assert doc._delta() == ({}, {"db_list_field": 1})
|
||||
|
||||
# Test it saves that data
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc.string_field = "hello"
|
||||
doc.int_field = 1
|
||||
doc.dict_field = {"hello": "world"}
|
||||
doc.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc.string_field == "hello"
|
||||
assert doc.int_field == 1
|
||||
assert doc.dict_field == {"hello": "world"}
|
||||
assert doc.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self):
|
||||
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self):
|
||||
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self):
|
||||
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self):
|
||||
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
@staticmethod
|
||||
def delta_recursive_db_field(DocClass, EmbeddedClass):
|
||||
class Embedded(EmbeddedClass):
|
||||
string_field = StringField(db_field="db_string_field")
|
||||
int_field = IntField(db_field="db_int_field")
|
||||
dict_field = DictField(db_field="db_dict_field")
|
||||
list_field = ListField(db_field="db_list_field")
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field="db_string_field")
|
||||
int_field = IntField(db_field="db_int_field")
|
||||
dict_field = DictField(db_field="db_dict_field")
|
||||
list_field = ListField(db_field="db_list_field")
|
||||
embedded_field = EmbeddedDocumentField(
|
||||
Embedded, db_field="db_embedded_field"
|
||||
)
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
assert doc._get_changed_fields() == ["db_embedded_field"]
|
||||
|
||||
embedded_delta = {
|
||||
"db_string_field": "hello",
|
||||
"db_int_field": 1,
|
||||
"db_dict_field": {"hello": "world"},
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
assert doc.embedded_field._delta() == (embedded_delta, {})
|
||||
assert doc._delta() == ({"db_embedded_field": embedded_delta}, {})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_dict_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"db_dict_field": 1})
|
||||
assert doc._delta() == ({}, {"db_embedded_field.db_dict_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.dict_field == {}
|
||||
|
||||
assert doc._get_changed_fields() == []
|
||||
doc.embedded_field.list_field = []
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"db_list_field": 1})
|
||||
assert doc._delta() == ({}, {"db_embedded_field.db_list_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field == []
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = "hello"
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {"hello": "world"}
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"db_list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello",
|
||||
"db_dict_field": {"hello": "world"},
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello",
|
||||
"db_dict_field": {"hello": "world"},
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = "world"
|
||||
assert doc._get_changed_fields() == [
|
||||
"db_embedded_field.db_list_field.2.db_string_field"
|
||||
]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{"db_list_field.2.db_string_field": "world"},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{"db_embedded_field.db_list_field.2.db_string_field": "world"},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "world"
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = "hello world"
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field.2"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"db_list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello world",
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
"db_dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello world",
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
"db_dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2.db_list_field": [
|
||||
2,
|
||||
{"hello": "world"},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2.db_list_field": [
|
||||
2,
|
||||
{"hello": "world"},
|
||||
1,
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc._delta() == ({}, {},)
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"db_embedded_field.db_list_field.2.db_list_field": 1},
|
||||
)
|
||||
|
||||
def test_delta_for_dynamic_documents(self):
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(name="James", age=34)
|
||||
assert p._delta() == (
|
||||
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
||||
{},
|
||||
)
|
||||
|
||||
p.doc = 123
|
||||
del p.doc
|
||||
assert p._delta() == (
|
||||
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
||||
{},
|
||||
)
|
||||
|
||||
p = Person()
|
||||
p.name = "Dean"
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
p.age = 24
|
||||
assert p.age == 24
|
||||
assert p._get_changed_fields() == ["age"]
|
||||
assert p._delta() == ({"age": 24}, {})
|
||||
|
||||
p = Person.objects(age=22).get()
|
||||
p.age = 24
|
||||
assert p.age == 24
|
||||
assert p._get_changed_fields() == ["age"]
|
||||
assert p._delta() == ({"age": 24}, {})
|
||||
|
||||
p.save()
|
||||
assert 1 == Person.objects(age=24).count()
|
||||
|
||||
def test_dynamic_delta(self):
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
assert doc._get_changed_fields() == ["string_field"]
|
||||
assert doc._delta() == ({"string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
assert doc._get_changed_fields() == ["int_field"]
|
||||
assert doc._delta() == ({"int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({"dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({"list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({}, {"dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({}, {"list_field": 1})
|
||||
|
||||
def test_delta_with_dbref_true(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(
|
||||
Document, Document, True
|
||||
)
|
||||
employee.name = "test"
|
||||
|
||||
assert organization._get_changed_fields() == []
|
||||
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert updates == {}
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert "employees" in updates
|
||||
|
||||
def test_delta_with_dbref_false(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(
|
||||
Document, Document, False
|
||||
)
|
||||
employee.name = "test"
|
||||
|
||||
assert organization._get_changed_fields() == []
|
||||
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert updates == {}
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert "employees" in updates
|
||||
|
||||
def test_nested_nested_fields_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc)))
|
||||
name = StringField()
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
mydoc = MyDoc(
|
||||
name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}
|
||||
).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs["a"]["b"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a.b.name"]
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_lower_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc().save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
mydoc.subs["a"] = EmbeddedDoc()
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
|
||||
subdoc = mydoc.subs["a"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_upper_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc(subs={"a": EmbeddedDoc(name="foo")}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs["a"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a.name"]
|
||||
|
||||
mydoc.subs["a"] = EmbeddedDoc()
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_referenced_object_changed_attributes(self):
|
||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||
|
||||
class Organization(Document):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
org = ReferenceField("Organization", required=True)
|
||||
|
||||
Organization.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
org1 = Organization(name="Org 1")
|
||||
org1.save()
|
||||
|
||||
org2 = Organization(name="Org 2")
|
||||
org2.save()
|
||||
|
||||
user = User(name="Fred", org=org1)
|
||||
user.save()
|
||||
|
||||
org1.reload()
|
||||
org2.reload()
|
||||
user.reload()
|
||||
assert org1.name == "Org 1"
|
||||
assert org2.name == "Org 2"
|
||||
assert user.name == "Fred"
|
||||
|
||||
user.name = "Harold"
|
||||
user.org = org2
|
||||
|
||||
org2.name = "New Org 2"
|
||||
assert org2.name == "New Org 2"
|
||||
|
||||
user.save()
|
||||
org2.save()
|
||||
|
||||
assert org2.name == "New Org 2"
|
||||
org2.reload()
|
||||
assert org2.name == "New Org 2"
|
||||
|
||||
def test_delta_for_nested_map_fields(self):
|
||||
class UInfoDocument(Document):
|
||||
phone = StringField()
|
||||
|
||||
class EmbeddedRole(EmbeddedDocument):
|
||||
type = StringField()
|
||||
|
||||
class EmbeddedUser(EmbeddedDocument):
|
||||
name = StringField()
|
||||
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
info = ReferenceField(UInfoDocument)
|
||||
|
||||
class Doc(Document):
|
||||
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
|
||||
num = IntField(default=-1)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(num=1)
|
||||
doc.users["007"] = EmbeddedUser(name="Agent007")
|
||||
doc.save()
|
||||
|
||||
uinfo = UInfoDocument(phone="79089269066")
|
||||
uinfo.save()
|
||||
|
||||
d = Doc.objects(num=1).first()
|
||||
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
|
||||
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
||||
d.users["007"]["info"] = uinfo
|
||||
delta = d._delta()
|
||||
assert True == ("users.007.roles.666" in delta[0])
|
||||
assert True == ("users.007.rolist" in delta[0])
|
||||
assert True == ("users.007.info" in delta[0])
|
||||
assert "superadmin" == delta[0]["users.007.roles.666"]["type"]
|
||||
assert "oops" == delta[0]["users.007.rolist"][0]["type"]
|
||||
assert uinfo.id == delta[0]["users.007.info"]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -1,19 +1,20 @@
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
__all__ = ("TestDynamicDocument", )
|
||||
__all__ = ("TestDynamicDocument",)
|
||||
|
||||
|
||||
class TestDynamicDocument(MongoDBTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestDynamicDocument, self).setUp()
|
||||
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
@ -26,16 +27,15 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.name = "James"
|
||||
p.age = 34
|
||||
|
||||
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James",
|
||||
"age": 34})
|
||||
self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"])
|
||||
assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34}
|
||||
assert p.to_mongo().keys() == ["_cls", "name", "age"]
|
||||
p.save()
|
||||
self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"])
|
||||
assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"]
|
||||
|
||||
self.assertEqual(self.Person.objects.first().age, 34)
|
||||
assert self.Person.objects.first().age == 34
|
||||
|
||||
# Confirm no changes to self.Person
|
||||
self.assertFalse(hasattr(self.Person, 'age'))
|
||||
assert not hasattr(self.Person, "age")
|
||||
|
||||
def test_change_scope_of_variable(self):
|
||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||
@ -45,11 +45,11 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEqual(p.misc, {'hello': 'world'})
|
||||
assert p.misc == {"hello": "world"}
|
||||
|
||||
def test_delete_dynamic_field(self):
|
||||
"""Test deleting a dynamic field works"""
|
||||
@ -60,23 +60,23 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEqual(p.misc, {'hello': 'world'})
|
||||
assert p.misc == {"hello": "world"}
|
||||
collection = self.db[self.Person._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
||||
assert sorted(obj.keys()) == ["_cls", "_id", "misc", "name"]
|
||||
|
||||
del p.misc
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertFalse(hasattr(p, 'misc'))
|
||||
assert not hasattr(p, "misc")
|
||||
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name'])
|
||||
assert sorted(obj.keys()) == ["_cls", "_id", "name"]
|
||||
|
||||
def test_reload_after_unsetting(self):
|
||||
p = self.Person()
|
||||
@ -90,78 +90,55 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p = self.Person.objects.create()
|
||||
p.update(age=1)
|
||||
|
||||
self.assertEqual(len(p._data), 3)
|
||||
self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name'])
|
||||
assert len(p._data) == 3
|
||||
assert sorted(p._data.keys()) == ["_cls", "id", "name"]
|
||||
|
||||
p.reload()
|
||||
self.assertEqual(len(p._data), 4)
|
||||
self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name'])
|
||||
assert len(p._data) == 4
|
||||
assert sorted(p._data.keys()) == ["_cls", "age", "id", "name"]
|
||||
|
||||
def test_fields_without_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
Person = self.Person
|
||||
|
||||
p = self.Person(name='Dean')
|
||||
p = self.Person(name="Dean")
|
||||
p.save()
|
||||
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_cls': u'Person',
|
||||
'_id': p.id,
|
||||
'name': u'Dean'
|
||||
}
|
||||
)
|
||||
assert raw_p == {"_cls": u"Person", "_id": p.id, "name": u"Dean"}
|
||||
|
||||
p.name = 'OldDean'
|
||||
p.newattr = 'garbage'
|
||||
p.name = "OldDean"
|
||||
p.newattr = "garbage"
|
||||
p.save()
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_cls': u'Person',
|
||||
'_id': p.id,
|
||||
'name': 'OldDean',
|
||||
'newattr': u'garbage'
|
||||
}
|
||||
)
|
||||
assert raw_p == {
|
||||
"_cls": u"Person",
|
||||
"_id": p.id,
|
||||
"name": "OldDean",
|
||||
"newattr": u"garbage",
|
||||
}
|
||||
|
||||
def test_fields_containing_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
|
||||
class WeirdPerson(DynamicDocument):
|
||||
name = StringField()
|
||||
_name = StringField()
|
||||
|
||||
WeirdPerson.drop_collection()
|
||||
|
||||
p = WeirdPerson(name='Dean', _name='Dean')
|
||||
p = WeirdPerson(name="Dean", _name="Dean")
|
||||
p.save()
|
||||
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_id': p.id,
|
||||
'_name': u'Dean',
|
||||
'name': u'Dean'
|
||||
}
|
||||
)
|
||||
assert raw_p == {"_id": p.id, "_name": u"Dean", "name": u"Dean"}
|
||||
|
||||
p.name = 'OldDean'
|
||||
p._name = 'NewDean'
|
||||
p._newattr1 = 'garbage' # Unknown fields won't be added
|
||||
p.name = "OldDean"
|
||||
p._name = "NewDean"
|
||||
p._newattr1 = "garbage" # Unknown fields won't be added
|
||||
p.save()
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_id': p.id,
|
||||
'_name': u'NewDean',
|
||||
'name': u'OldDean',
|
||||
}
|
||||
)
|
||||
assert raw_p == {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"}
|
||||
|
||||
def test_dynamic_document_queries(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
@ -170,10 +147,10 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(age=22).count())
|
||||
assert 1 == self.Person.objects(age=22).count()
|
||||
p = self.Person.objects(age=22)
|
||||
p = p.get()
|
||||
self.assertEqual(22, p.age)
|
||||
assert 22 == p.age
|
||||
|
||||
def test_complex_dynamic_document_queries(self):
|
||||
class Person(DynamicDocument):
|
||||
@ -193,26 +170,25 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p2.age = 10
|
||||
p2.save()
|
||||
|
||||
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
|
||||
self.assertEqual(Person.objects(age__gte=10).count(), 1)
|
||||
assert Person.objects(age__icontains="ten").count() == 2
|
||||
assert Person.objects(age__gte=10).count() == 1
|
||||
|
||||
def test_complex_data_lookups(self):
|
||||
"""Ensure you can query dynamic document dynamic fields"""
|
||||
p = self.Person()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
|
||||
assert 1 == self.Person.objects(misc__hello="world").count()
|
||||
|
||||
def test_three_level_complex_data_lookups(self):
|
||||
"""Ensure you can query three level document dynamic fields"""
|
||||
p = self.Person.objects.create(
|
||||
misc={'hello': {'hello2': 'world'}}
|
||||
)
|
||||
self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count())
|
||||
self.Person.objects.create(misc={"hello": {"hello2": "world"}})
|
||||
assert 1 == self.Person.objects(misc__hello__hello2="world").count()
|
||||
|
||||
def test_complex_embedded_document_validation(self):
|
||||
"""Ensure embedded dynamic documents may be validated"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
content = URLField()
|
||||
|
||||
@ -222,27 +198,29 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
|
||||
embedded_doc_1 = Embedded(content='http://mongoengine.org')
|
||||
embedded_doc_1 = Embedded(content="http://mongoengine.org")
|
||||
embedded_doc_1.validate()
|
||||
|
||||
embedded_doc_2 = Embedded(content='this is not a url')
|
||||
self.assertRaises(ValidationError, embedded_doc_2.validate)
|
||||
embedded_doc_2 = Embedded(content="this is not a url")
|
||||
with pytest.raises(ValidationError):
|
||||
embedded_doc_2.validate()
|
||||
|
||||
doc.embedded_field_1 = embedded_doc_1
|
||||
doc.embedded_field_2 = embedded_doc_2
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
doc.validate()
|
||||
|
||||
def test_inheritance(self):
|
||||
"""Ensure that dynamic document plays nice with inheritance"""
|
||||
|
||||
class Employee(self.Person):
|
||||
salary = IntField()
|
||||
|
||||
Employee.drop_collection()
|
||||
|
||||
self.assertIn('name', Employee._fields)
|
||||
self.assertIn('salary', Employee._fields)
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
self.Person._get_collection_name())
|
||||
assert "name" in Employee._fields
|
||||
assert "salary" in Employee._fields
|
||||
assert Employee._get_collection_name() == self.Person._get_collection_name()
|
||||
|
||||
joe_bloggs = Employee()
|
||||
joe_bloggs.name = "Joe Bloggs"
|
||||
@ -250,14 +228,15 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
joe_bloggs.age = 20
|
||||
joe_bloggs.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(age=20).count())
|
||||
self.assertEqual(1, Employee.objects(age=20).count())
|
||||
assert 1 == self.Person.objects(age=20).count()
|
||||
assert 1 == Employee.objects(age=20).count()
|
||||
|
||||
joe_bloggs = self.Person.objects.first()
|
||||
self.assertIsInstance(joe_bloggs, Employee)
|
||||
assert isinstance(joe_bloggs, Employee)
|
||||
|
||||
def test_embedded_dynamic_document(self):
|
||||
"""Test dynamic embedded documents"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
@ -268,33 +247,33 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc.to_mongo(), {
|
||||
assert doc.to_mongo() == {
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
})
|
||||
}
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(doc.embedded_field.list_field,
|
||||
['1', 2, {'hello': 'world'}])
|
||||
assert doc.embedded_field.__class__ == Embedded
|
||||
assert doc.embedded_field.string_field == "hello"
|
||||
assert doc.embedded_field.int_field == 1
|
||||
assert doc.embedded_field.dict_field == {"hello": "world"}
|
||||
assert doc.embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_complex_embedded_documents(self):
|
||||
"""Test complex dynamic embedded documents setups"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
@ -305,51 +284,54 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.string_field = "hello"
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
embedded_2.dict_field = {"hello": "world"}
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
embedded_1.list_field = ['1', 2, embedded_2]
|
||||
embedded_1.list_field = ["1", 2, embedded_2]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc.to_mongo(), {
|
||||
assert doc.to_mongo() == {
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2,
|
||||
{"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]}
|
||||
]
|
||||
"list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
],
|
||||
}
|
||||
})
|
||||
}
|
||||
doc.save()
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
assert doc.embedded_field.__class__ == Embedded
|
||||
assert doc.embedded_field.string_field == "hello"
|
||||
assert doc.embedded_field.int_field == 1
|
||||
assert doc.embedded_field.dict_field == {"hello": "world"}
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
|
||||
embedded_field = doc.embedded_field.list_field[2]
|
||||
|
||||
self.assertEqual(embedded_field.__class__, Embedded)
|
||||
self.assertEqual(embedded_field.string_field, "hello")
|
||||
self.assertEqual(embedded_field.int_field, 1)
|
||||
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(embedded_field.list_field, ['1', 2,
|
||||
{'hello': 'world'}])
|
||||
assert embedded_field.__class__ == Embedded
|
||||
assert embedded_field.string_field == "hello"
|
||||
assert embedded_field.int_field == 1
|
||||
assert embedded_field.dict_field == {"hello": "world"}
|
||||
assert embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_dynamic_and_embedded(self):
|
||||
"""Ensure embedded documents play nicely"""
|
||||
@ -368,18 +350,18 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
person.address.city = "Lundenne"
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||
assert Person.objects.first().address.city == "Lundenne"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.address = Address(city="Londinium")
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||
assert Person.objects.first().address.city == "Londinium"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.age = 35
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
assert Person.objects.first().age == 35
|
||||
|
||||
def test_dynamic_embedded_works_with_only(self):
|
||||
"""Ensure custom fieldnames on a dynamic embedded document are found by qs.only()"""
|
||||
@ -392,10 +374,15 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save()
|
||||
Person(
|
||||
name="Eric", address=Address(city="San Francisco", street_number="1337")
|
||||
).save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.street_number, '1337')
|
||||
self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337')
|
||||
assert Person.objects.first().address.street_number == "1337"
|
||||
assert (
|
||||
Person.objects.only("address__street_number").first().address.street_number
|
||||
== "1337"
|
||||
)
|
||||
|
||||
def test_dynamic_and_embedded_dict_access(self):
|
||||
"""Ensure embedded dynamic documents work with dict[] style access"""
|
||||
@ -419,21 +406,21 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
person["address"]["city"] = "Lundenne"
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||
assert Person.objects.first().address.city == "Lundenne"
|
||||
|
||||
self.assertEqual(Person.objects.first().phone, "555-1212")
|
||||
assert Person.objects.first().phone == "555-1212"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.address = Address(city="Londinium")
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||
assert Person.objects.first().address.city == "Londinium"
|
||||
|
||||
person = Person.objects.first()
|
||||
person["age"] = 35
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
assert Person.objects.first().age == 35
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
File diff suppressed because it is too large
Load Diff
631
tests/document/test_inheritance.py
Normal file
631
tests/document/test_inheritance.py
Normal file
@ -0,0 +1,631 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import (
|
||||
BooleanField,
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
GenericReferenceField,
|
||||
IntField,
|
||||
ReferenceField,
|
||||
StringField,
|
||||
)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.fixtures import Base
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestInheritance(MongoDBTestCase):
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_constructor_cls(self):
|
||||
# Ensures _cls is properly set during construction
|
||||
# and when object gets reloaded (prevent regression of #1950)
|
||||
class EmbedData(EmbeddedDocument):
|
||||
data = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class DataDoc(Document):
|
||||
name = StringField()
|
||||
embed = EmbeddedDocumentField(EmbedData)
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
test_doc = DataDoc(name="test", embed=EmbedData(data="data"))
|
||||
assert test_doc._cls == "DataDoc"
|
||||
assert test_doc.embed._cls == "EmbedData"
|
||||
test_doc.save()
|
||||
saved_doc = DataDoc.objects.with_id(test_doc.id)
|
||||
assert test_doc._cls == saved_doc._cls
|
||||
assert test_doc.embed._cls == saved_doc.embed._cls
|
||||
test_doc.delete()
|
||||
|
||||
def test_superclasses(self):
|
||||
"""Ensure that the correct list of superclasses is assembled.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Guppy._superclasses == ("Animal", "Animal.Fish")
|
||||
assert Mammal._superclasses == ("Animal",)
|
||||
assert Dog._superclasses == ("Animal", "Animal.Mammal")
|
||||
assert Human._superclasses == ("Animal", "Animal.Mammal")
|
||||
|
||||
def test_external_superclasses(self):
|
||||
"""Ensure that the correct list of super classes is assembled when
|
||||
importing part of the model.
|
||||
"""
|
||||
|
||||
class Animal(Base):
|
||||
pass
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ("Base",)
|
||||
assert Fish._superclasses == ("Base", "Base.Animal")
|
||||
assert Guppy._superclasses == ("Base", "Base.Animal", "Base.Animal.Fish")
|
||||
assert Mammal._superclasses == ("Base", "Base.Animal")
|
||||
assert Dog._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
assert Human._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
|
||||
def test_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._subclasses == (
|
||||
"Animal",
|
||||
"Animal.Fish",
|
||||
"Animal.Fish.Guppy",
|
||||
"Animal.Mammal",
|
||||
"Animal.Mammal.Dog",
|
||||
"Animal.Mammal.Human",
|
||||
)
|
||||
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Guppy")
|
||||
assert Guppy._subclasses == ("Animal.Fish.Guppy",)
|
||||
assert Mammal._subclasses == (
|
||||
"Animal.Mammal",
|
||||
"Animal.Mammal.Dog",
|
||||
"Animal.Mammal.Human",
|
||||
)
|
||||
assert Human._subclasses == ("Animal.Mammal.Human",)
|
||||
|
||||
def test_external_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled when importing part of the model.
|
||||
"""
|
||||
|
||||
class Animal(Base):
|
||||
pass
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._subclasses == (
|
||||
"Base.Animal",
|
||||
"Base.Animal.Fish",
|
||||
"Base.Animal.Fish.Guppy",
|
||||
"Base.Animal.Mammal",
|
||||
"Base.Animal.Mammal.Dog",
|
||||
"Base.Animal.Mammal.Human",
|
||||
)
|
||||
assert Fish._subclasses == ("Base.Animal.Fish", "Base.Animal.Fish.Guppy")
|
||||
assert Guppy._subclasses == ("Base.Animal.Fish.Guppy",)
|
||||
assert Mammal._subclasses == (
|
||||
"Base.Animal.Mammal",
|
||||
"Base.Animal.Mammal.Dog",
|
||||
"Base.Animal.Mammal.Human",
|
||||
)
|
||||
assert Human._subclasses == ("Base.Animal.Mammal.Human",)
|
||||
|
||||
def test_dynamic_declarations(self):
|
||||
"""Test that declaring an extra class updates meta data"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal",)
|
||||
|
||||
# Test dynamically adding a class changes the meta data
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal", "Animal.Fish")
|
||||
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Fish._subclasses == ("Animal.Fish",)
|
||||
|
||||
# Test dynamically adding an inherited class changes the meta data
|
||||
class Pike(Fish):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal", "Animal.Fish", "Animal.Fish.Pike")
|
||||
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Pike")
|
||||
|
||||
assert Pike._superclasses == ("Animal", "Animal.Fish")
|
||||
assert Pike._subclasses == ("Animal.Fish.Pike",)
|
||||
|
||||
def test_inheritance_meta_data(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
||||
Employee._fields.keys()
|
||||
)
|
||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||
|
||||
def test_inheritance_to_mongo_keys(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
||||
Employee._fields.keys()
|
||||
)
|
||||
assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"]
|
||||
assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [
|
||||
"_cls",
|
||||
"name",
|
||||
"age",
|
||||
"salary",
|
||||
]
|
||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||
|
||||
def test_indexes_and_multiple_inheritance(self):
|
||||
""" Ensure that all of the indexes are created for a document with
|
||||
multiple inheritance.
|
||||
"""
|
||||
|
||||
class A(Document):
|
||||
a = StringField()
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["a"]}
|
||||
|
||||
class B(Document):
|
||||
b = StringField()
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["b"]}
|
||||
|
||||
class C(A, B):
|
||||
pass
|
||||
|
||||
A.drop_collection()
|
||||
B.drop_collection()
|
||||
C.drop_collection()
|
||||
|
||||
C.ensure_indexes()
|
||||
|
||||
assert sorted(
|
||||
[idx["key"] for idx in C._get_collection().index_information().values()]
|
||||
) == sorted(
|
||||
[[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]]
|
||||
)
|
||||
|
||||
def test_polymorphic_queries(self):
|
||||
"""Ensure that the correct subclasses are returned from a query
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
Animal.drop_collection()
|
||||
|
||||
Animal().save()
|
||||
Fish().save()
|
||||
Mammal().save()
|
||||
Dog().save()
|
||||
Human().save()
|
||||
|
||||
classes = [obj.__class__ for obj in Animal.objects]
|
||||
assert classes == [Animal, Fish, Mammal, Dog, Human]
|
||||
|
||||
classes = [obj.__class__ for obj in Mammal.objects]
|
||||
assert classes == [Mammal, Dog, Human]
|
||||
|
||||
classes = [obj.__class__ for obj in Human.objects]
|
||||
assert classes == [Human]
|
||||
|
||||
def test_allow_inheritance(self):
|
||||
"""Ensure that inheritance is disabled by default on simple
|
||||
classes and that _cls will not be used.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
|
||||
# can't inherit because Animal didn't explicitly allow inheritance
|
||||
with pytest.raises(ValueError, match="Document Animal may not be subclassed"):
|
||||
|
||||
class Dog(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls etc aren't present on simple documents
|
||||
dog = Animal(name="dog").save()
|
||||
assert dog.to_mongo().keys() == ["_id", "name"]
|
||||
|
||||
collection = self.db[Animal._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
assert "_cls" not in obj
|
||||
|
||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
|
||||
class Mammal(Animal):
|
||||
meta = {"allow_inheritance": False}
|
||||
|
||||
assert (
|
||||
str(exc_info.value)
|
||||
== 'Only direct subclasses of Document may set "allow_inheritance" to False'
|
||||
)
|
||||
|
||||
def test_allow_inheritance_abstract_document(self):
|
||||
"""Ensure that abstract documents can set inheritance rules and that
|
||||
_cls will not be used.
|
||||
"""
|
||||
|
||||
class FinalDocument(Document):
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class Animal(FinalDocument):
|
||||
name = StringField()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls isn't present in simple documents
|
||||
doc = Animal(name="dog")
|
||||
assert "_cls" not in doc.to_mongo()
|
||||
|
||||
def test_using_abstract_class_in_reference_field(self):
|
||||
# Ensures no regression of #1920
|
||||
class AbstractHuman(Document):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Dad(AbstractHuman):
|
||||
name = StringField()
|
||||
|
||||
class Home(Document):
|
||||
dad = ReferenceField(AbstractHuman) # Referencing the abstract class
|
||||
address = StringField()
|
||||
|
||||
dad = Dad(name="5").save()
|
||||
Home(dad=dad, address="street").save()
|
||||
|
||||
home = Home.objects.first()
|
||||
home.address = "garbage"
|
||||
home.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_class_referencing_self(self):
|
||||
# Ensures no regression of #1920
|
||||
class Human(Document):
|
||||
meta = {"abstract": True}
|
||||
creator = ReferenceField("self", dbref=True)
|
||||
|
||||
class User(Human):
|
||||
name = StringField()
|
||||
|
||||
user = User(name="John").save()
|
||||
user2 = User(name="Foo", creator=user).save()
|
||||
|
||||
user2 = User.objects.with_id(user2.id)
|
||||
user2.name = "Bar"
|
||||
user2.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 3
|
||||
assert berlin._fields_ordered[0] == "id"
|
||||
|
||||
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
city_id = IntField(primary_key=True)
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 3
|
||||
assert berlin._fields_ordered[0] == "city_id"
|
||||
|
||||
def test_auto_id_vs_non_pk_id_field(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
id = IntField()
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 4
|
||||
assert berlin._fields_ordered[0] == "auto_id_0"
|
||||
berlin.save()
|
||||
assert berlin.pk == berlin.auto_id_0
|
||||
|
||||
def test_abstract_document_creation_does_not_fail(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
city = City(continent="asia")
|
||||
assert city.pk is None
|
||||
# TODO: expected error? Shouldn't we create a new error type?
|
||||
with pytest.raises(KeyError):
|
||||
setattr(city, "pk", 1)
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance."""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class SpecialComment(Comment):
|
||||
pass
|
||||
|
||||
doc = Comment(content="test")
|
||||
assert "_cls" not in doc.to_mongo()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
doc = Comment(content="test")
|
||||
assert "_cls" in doc.to_mongo()
|
||||
|
||||
def test_document_inheritance(self):
|
||||
"""Ensure mutliple inheritance of abstract documents
|
||||
"""
|
||||
|
||||
class DateCreatedDocument(Document):
|
||||
meta = {"allow_inheritance": True, "abstract": True}
|
||||
|
||||
class DateUpdatedDocument(Document):
|
||||
meta = {"allow_inheritance": True, "abstract": True}
|
||||
|
||||
try:
|
||||
|
||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||
pass
|
||||
|
||||
except Exception:
|
||||
assert False, "Couldn't create MyDocument class"
|
||||
|
||||
def test_abstract_documents(self):
|
||||
"""Ensure that a document superclass can be marked as abstract
|
||||
thereby not using it as the name for the collection."""
|
||||
|
||||
defaults = {
|
||||
"index_background": True,
|
||||
"index_opts": {"hello": "world"},
|
||||
"allow_inheritance": True,
|
||||
"queryset_class": "QuerySet",
|
||||
"db_alias": "myDB",
|
||||
"shard_key": ("hello", "world"),
|
||||
}
|
||||
|
||||
meta_settings = {"abstract": True}
|
||||
meta_settings.update(defaults)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = meta_settings
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
for k, v in defaults.items():
|
||||
for cls in [Animal, Fish, Guppy]:
|
||||
assert cls._meta[k] == v
|
||||
|
||||
assert "collection" not in Animal._meta
|
||||
assert "collection" not in Mammal._meta
|
||||
|
||||
assert Animal._get_collection_name() is None
|
||||
assert Mammal._get_collection_name() is None
|
||||
|
||||
assert Fish._get_collection_name() == "fish"
|
||||
assert Guppy._get_collection_name() == "fish"
|
||||
assert Human._get_collection_name() == "human"
|
||||
|
||||
# ensure that a subclass of a non-abstract class can't be abstract
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class EvilHuman(Human):
|
||||
evil = BooleanField(default=True)
|
||||
meta = {"abstract": True}
|
||||
|
||||
def test_abstract_embedded_documents(self):
|
||||
# 789: EmbeddedDocument shouldn't inherit abstract
|
||||
class A(EmbeddedDocument):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
assert not B._meta["abstract"]
|
||||
|
||||
def test_inherited_collections(self):
|
||||
"""Ensure that subclassed documents don't override parents'
|
||||
collections
|
||||
"""
|
||||
|
||||
class Drink(Document):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Drinker(Document):
|
||||
drink = GenericReferenceField()
|
||||
|
||||
try:
|
||||
warnings.simplefilter("error")
|
||||
|
||||
class AcloholicDrink(Drink):
|
||||
meta = {"collection": "booze"}
|
||||
|
||||
except SyntaxWarning:
|
||||
warnings.simplefilter("ignore")
|
||||
|
||||
class AlcoholicDrink(Drink):
|
||||
meta = {"collection": "booze"}
|
||||
|
||||
else:
|
||||
raise AssertionError("SyntaxWarning should be triggered")
|
||||
|
||||
warnings.resetwarnings()
|
||||
|
||||
Drink.drop_collection()
|
||||
AlcoholicDrink.drop_collection()
|
||||
Drinker.drop_collection()
|
||||
|
||||
red_bull = Drink(name="Red Bull")
|
||||
red_bull.save()
|
||||
|
||||
programmer = Drinker(drink=red_bull)
|
||||
programmer.save()
|
||||
|
||||
beer = AlcoholicDrink(name="Beer")
|
||||
beer.save()
|
||||
real_person = Drinker(drink=beer)
|
||||
real_person.save()
|
||||
|
||||
assert Drinker.objects[0].drink.name == red_bull.name
|
||||
assert Drinker.objects[1].drink.name == beer.name
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
File diff suppressed because it is too large
Load Diff
@ -1,22 +1,14 @@
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
from datetime import datetime
|
||||
from bson import ObjectId
|
||||
|
||||
import pymongo
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
__all__ = ("TestJson",)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestJson(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class TestJson(MongoDBTestCase):
|
||||
def test_json_names(self):
|
||||
"""
|
||||
Going to test reported issue:
|
||||
@ -25,22 +17,24 @@ class TestJson(unittest.TestCase):
|
||||
a to_json with the original class names and not the abreviated
|
||||
mongodb document keys
|
||||
"""
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
string = StringField(db_field='s')
|
||||
string = StringField(db_field="s")
|
||||
|
||||
class Doc(Document):
|
||||
string = StringField(db_field='s')
|
||||
embedded = EmbeddedDocumentField(Embedded, db_field='e')
|
||||
string = StringField(db_field="s")
|
||||
embedded = EmbeddedDocumentField(Embedded, db_field="e")
|
||||
|
||||
doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello"))
|
||||
doc_json = doc.to_json(sort_keys=True, use_db_field=False, separators=(',', ':'))
|
||||
doc_json = doc.to_json(
|
||||
sort_keys=True, use_db_field=False, separators=(",", ":")
|
||||
)
|
||||
|
||||
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
|
||||
|
||||
self.assertEqual(doc_json, expected_json)
|
||||
assert doc_json == expected_json
|
||||
|
||||
def test_json_simple(self):
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
string = StringField()
|
||||
|
||||
@ -49,16 +43,18 @@ class TestJson(unittest.TestCase):
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.string == other.string and
|
||||
self.embedded_field == other.embedded_field)
|
||||
return (
|
||||
self.string == other.string
|
||||
and self.embedded_field == other.embedded_field
|
||||
)
|
||||
|
||||
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
|
||||
|
||||
doc_json = doc.to_json(sort_keys=True, separators=(',', ':'))
|
||||
doc_json = doc.to_json(sort_keys=True, separators=(",", ":"))
|
||||
expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}"""
|
||||
self.assertEqual(doc_json, expected_json)
|
||||
assert doc_json == expected_json
|
||||
|
||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||
assert doc == Doc.from_json(doc.to_json())
|
||||
|
||||
def test_json_complex(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
@ -68,41 +64,43 @@ class TestJson(unittest.TestCase):
|
||||
pass
|
||||
|
||||
class Doc(Document):
|
||||
string_field = StringField(default='1')
|
||||
string_field = StringField(default="1")
|
||||
int_field = IntField(default=1)
|
||||
float_field = FloatField(default=1.1)
|
||||
boolean_field = BooleanField(default=True)
|
||||
datetime_field = DateTimeField(default=datetime.now)
|
||||
embedded_document_field = EmbeddedDocumentField(EmbeddedDoc,
|
||||
default=lambda: EmbeddedDoc())
|
||||
embedded_document_field = EmbeddedDocumentField(
|
||||
EmbeddedDoc, default=lambda: EmbeddedDoc()
|
||||
)
|
||||
list_field = ListField(default=lambda: [1, 2, 3])
|
||||
dict_field = DictField(default=lambda: {"hello": "world"})
|
||||
objectid_field = ObjectIdField(default=ObjectId)
|
||||
reference_field = ReferenceField(Simple, default=lambda:
|
||||
Simple().save())
|
||||
reference_field = ReferenceField(Simple, default=lambda: Simple().save())
|
||||
map_field = MapField(IntField(), default=lambda: {"simple": 1})
|
||||
decimal_field = DecimalField(default=1.0)
|
||||
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
|
||||
url_field = URLField(default="http://mongoengine.org")
|
||||
dynamic_field = DynamicField(default=1)
|
||||
generic_reference_field = GenericReferenceField(
|
||||
default=lambda: Simple().save())
|
||||
sorted_list_field = SortedListField(IntField(),
|
||||
default=lambda: [1, 2, 3])
|
||||
default=lambda: Simple().save()
|
||||
)
|
||||
sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3])
|
||||
email_field = EmailField(default="ross@example.com")
|
||||
geo_point_field = GeoPointField(default=lambda: [1, 2])
|
||||
sequence_field = SequenceField()
|
||||
uuid_field = UUIDField(default=uuid.uuid4)
|
||||
generic_embedded_document_field = GenericEmbeddedDocumentField(
|
||||
default=lambda: EmbeddedDoc())
|
||||
default=lambda: EmbeddedDoc()
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
import json
|
||||
|
||||
return json.loads(self.to_json()) == json.loads(other.to_json())
|
||||
|
||||
doc = Doc()
|
||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||
assert doc == Doc.from_json(doc.to_json())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -2,55 +2,60 @@
|
||||
import unittest
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
__all__ = ("ValidatorErrorTest",)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class TestValidatorError(MongoDBTestCase):
|
||||
def test_to_dict(self):
|
||||
"""Ensure a ValidationError handles error to_dict correctly.
|
||||
"""
|
||||
error = ValidationError('root')
|
||||
self.assertEqual(error.to_dict(), {})
|
||||
error = ValidationError("root")
|
||||
assert error.to_dict() == {}
|
||||
|
||||
# 1st level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertEqual(error.to_dict()['1st'], 'bad 1st')
|
||||
error.errors = {"1st": ValidationError("bad 1st")}
|
||||
assert "1st" in error.to_dict()
|
||||
assert error.to_dict()["1st"] == "bad 1st"
|
||||
|
||||
# 2nd level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd'),
|
||||
})}
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertIsInstance(error.to_dict()['1st'], dict)
|
||||
self.assertIn('2nd', error.to_dict()['1st'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||
error.errors = {
|
||||
"1st": ValidationError(
|
||||
"bad 1st", errors={"2nd": ValidationError("bad 2nd")}
|
||||
)
|
||||
}
|
||||
assert "1st" in error.to_dict()
|
||||
assert isinstance(error.to_dict()["1st"], dict)
|
||||
assert "2nd" in error.to_dict()["1st"]
|
||||
assert error.to_dict()["1st"]["2nd"] == "bad 2nd"
|
||||
|
||||
# moar levels
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd', errors={
|
||||
'3rd': ValidationError('bad 3rd', errors={
|
||||
'4th': ValidationError('Inception'),
|
||||
}),
|
||||
}),
|
||||
})}
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertIn('2nd', error.to_dict()['1st'])
|
||||
self.assertIn('3rd', error.to_dict()['1st']['2nd'])
|
||||
self.assertIn('4th', error.to_dict()['1st']['2nd']['3rd'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||
'Inception')
|
||||
error.errors = {
|
||||
"1st": ValidationError(
|
||||
"bad 1st",
|
||||
errors={
|
||||
"2nd": ValidationError(
|
||||
"bad 2nd",
|
||||
errors={
|
||||
"3rd": ValidationError(
|
||||
"bad 3rd", errors={"4th": ValidationError("Inception")}
|
||||
)
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
assert "1st" in error.to_dict()
|
||||
assert "2nd" in error.to_dict()["1st"]
|
||||
assert "3rd" in error.to_dict()["1st"]["2nd"]
|
||||
assert "4th" in error.to_dict()["1st"]["2nd"]["3rd"]
|
||||
assert error.to_dict()["1st"]["2nd"]["3rd"]["4th"] == "Inception"
|
||||
|
||||
self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
|
||||
assert error.message == "root(2nd.3rd.4th.Inception: ['1st'])"
|
||||
|
||||
def test_model_validation(self):
|
||||
|
||||
class User(Document):
|
||||
username = StringField(primary_key=True)
|
||||
name = StringField(required=True)
|
||||
@ -58,67 +63,69 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
User().validate()
|
||||
except ValidationError as e:
|
||||
self.assertIn("User:None", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'username': 'Field is required',
|
||||
'name': 'Field is required'})
|
||||
assert "User:None" in e.message
|
||||
assert e.to_dict() == {
|
||||
"username": "Field is required",
|
||||
"name": "Field is required",
|
||||
}
|
||||
|
||||
user = User(username="RossC0", name="Ross").save()
|
||||
user.name = None
|
||||
try:
|
||||
user.save()
|
||||
except ValidationError as e:
|
||||
self.assertIn("User:RossC0", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'name': 'Field is required'})
|
||||
assert "User:RossC0" in e.message
|
||||
assert e.to_dict() == {"name": "Field is required"}
|
||||
|
||||
def test_fields_rewrite(self):
|
||||
class BasePerson(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'abstract': True}
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Person(BasePerson):
|
||||
name = StringField(required=True)
|
||||
|
||||
p = Person(age=15)
|
||||
self.assertRaises(ValidationError, p.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
p.validate()
|
||||
|
||||
def test_embedded_document_validation(self):
|
||||
"""Ensure that embedded documents may be validated.
|
||||
"""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
date = DateTimeField()
|
||||
content = StringField(required=True)
|
||||
|
||||
comment = Comment()
|
||||
self.assertRaises(ValidationError, comment.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
comment.validate()
|
||||
|
||||
comment.content = 'test'
|
||||
comment.content = "test"
|
||||
comment.validate()
|
||||
|
||||
comment.date = 4
|
||||
self.assertRaises(ValidationError, comment.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
comment.validate()
|
||||
|
||||
comment.date = datetime.now()
|
||||
comment.validate()
|
||||
self.assertEqual(comment._instance, None)
|
||||
assert comment._instance is None
|
||||
|
||||
def test_embedded_db_field_validate(self):
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
val = IntField(required=True)
|
||||
|
||||
class Doc(Document):
|
||||
id = StringField(primary_key=True)
|
||||
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
||||
|
||||
try:
|
||||
Doc(id="bad").validate()
|
||||
except ValidationError as e:
|
||||
self.assertIn("SubDoc:None", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
assert "SubDoc:None" in e.message
|
||||
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
@ -126,25 +133,23 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
doc = Doc.objects.first()
|
||||
keys = doc._data.keys()
|
||||
self.assertEqual(2, len(keys))
|
||||
self.assertIn('e', keys)
|
||||
self.assertIn('id', keys)
|
||||
assert 2 == len(keys)
|
||||
assert "e" in keys
|
||||
assert "id" in keys
|
||||
|
||||
doc.e.val = "OK"
|
||||
try:
|
||||
doc.save()
|
||||
except ValidationError as e:
|
||||
self.assertIn("Doc:test", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
assert "Doc:test" in e.message
|
||||
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
||||
|
||||
def test_embedded_weakref(self):
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
val = IntField(required=True)
|
||||
|
||||
class Doc(Document):
|
||||
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
@ -153,23 +158,26 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
s = SubDoc()
|
||||
|
||||
self.assertRaises(ValidationError, s.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
s.validate()
|
||||
|
||||
d1.e = s
|
||||
d2.e = s
|
||||
|
||||
del d1
|
||||
|
||||
self.assertRaises(ValidationError, d2.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
d2.validate()
|
||||
|
||||
def test_parent_reference_in_child_document(self):
|
||||
"""
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited. Issue #954.
|
||||
"""
|
||||
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
meta = {"allow_inheritance": True}
|
||||
reference = ReferenceField("self")
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
@ -190,9 +198,10 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited and when set via attribute. Issue #954.
|
||||
"""
|
||||
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
meta = {"allow_inheritance": True}
|
||||
reference = ReferenceField("self")
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
@ -210,5 +219,5 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
self.fail("ValidationError raised: %s" % e.message)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -1,3 +0,0 @@
|
||||
from .fields import *
|
||||
from .file_tests import *
|
||||
from .geo import *
|
@ -1,27 +1,28 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import uuid
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
import six
|
||||
|
||||
from bson import Binary
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
BIN_VALUE = six.b('\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5')
|
||||
BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode(
|
||||
"latin-1"
|
||||
)
|
||||
|
||||
|
||||
class TestBinaryField(MongoDBTestCase):
|
||||
def test_binary_fields(self):
|
||||
"""Ensure that binary fields can be stored and retrieved.
|
||||
"""
|
||||
|
||||
class Attachment(Document):
|
||||
content_type = StringField()
|
||||
blob = BinaryField()
|
||||
|
||||
BLOB = six.b('\xe6\x00\xc4\xff\x07')
|
||||
MIME_TYPE = 'application/octet-stream'
|
||||
BLOB = "\xe6\x00\xc4\xff\x07".encode("latin-1")
|
||||
MIME_TYPE = "application/octet-stream"
|
||||
|
||||
Attachment.drop_collection()
|
||||
|
||||
@ -29,12 +30,13 @@ class TestBinaryField(MongoDBTestCase):
|
||||
attachment.save()
|
||||
|
||||
attachment_1 = Attachment.objects().first()
|
||||
self.assertEqual(MIME_TYPE, attachment_1.content_type)
|
||||
self.assertEqual(BLOB, six.binary_type(attachment_1.blob))
|
||||
assert MIME_TYPE == attachment_1.content_type
|
||||
assert BLOB == bytes(attachment_1.blob)
|
||||
|
||||
def test_validation_succeeds(self):
|
||||
"""Ensure that valid values can be assigned to binary fields.
|
||||
"""
|
||||
|
||||
class AttachmentRequired(Document):
|
||||
blob = BinaryField(required=True)
|
||||
|
||||
@ -42,13 +44,15 @@ class TestBinaryField(MongoDBTestCase):
|
||||
blob = BinaryField(max_bytes=4)
|
||||
|
||||
attachment_required = AttachmentRequired()
|
||||
self.assertRaises(ValidationError, attachment_required.validate)
|
||||
attachment_required.blob = Binary(six.b('\xe6\x00\xc4\xff\x07'))
|
||||
with pytest.raises(ValidationError):
|
||||
attachment_required.validate()
|
||||
attachment_required.blob = Binary("\xe6\x00\xc4\xff\x07".encode("latin-1"))
|
||||
attachment_required.validate()
|
||||
|
||||
_5_BYTES = six.b('\xe6\x00\xc4\xff\x07')
|
||||
_4_BYTES = six.b('\xe6\x00\xc4\xff')
|
||||
self.assertRaises(ValidationError, AttachmentSizeLimit(blob=_5_BYTES).validate)
|
||||
_5_BYTES = "\xe6\x00\xc4\xff\x07".encode("latin-1")
|
||||
_4_BYTES = "\xe6\x00\xc4\xff".encode("latin-1")
|
||||
with pytest.raises(ValidationError):
|
||||
AttachmentSizeLimit(blob=_5_BYTES).validate()
|
||||
AttachmentSizeLimit(blob=_4_BYTES).validate()
|
||||
|
||||
def test_validation_fails(self):
|
||||
@ -57,8 +61,9 @@ class TestBinaryField(MongoDBTestCase):
|
||||
class Attachment(Document):
|
||||
blob = BinaryField()
|
||||
|
||||
for invalid_data in (2, u'Im_a_unicode', ['some_str']):
|
||||
self.assertRaises(ValidationError, Attachment(blob=invalid_data).validate)
|
||||
for invalid_data in (2, u"Im_a_unicode", ["some_str"]):
|
||||
with pytest.raises(ValidationError):
|
||||
Attachment(blob=invalid_data).validate()
|
||||
|
||||
def test__primary(self):
|
||||
class Attachment(Document):
|
||||
@ -67,23 +72,21 @@ class TestBinaryField(MongoDBTestCase):
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
self.assertEqual(1, Attachment.objects.count())
|
||||
self.assertEqual(1, Attachment.objects.filter(id=att.id).count())
|
||||
assert 1 == Attachment.objects.count()
|
||||
assert 1 == Attachment.objects.filter(id=att.id).count()
|
||||
att.delete()
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
assert 0 == Attachment.objects.count()
|
||||
|
||||
def test_primary_filter_by_binary_pk_as_str(self):
|
||||
raise SkipTest("Querying by id as string is not currently supported")
|
||||
|
||||
class Attachment(Document):
|
||||
id = BinaryField(primary_key=True)
|
||||
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
self.assertEqual(1, Attachment.objects.filter(id=binary_id).count())
|
||||
assert 1 == Attachment.objects.filter(id=binary_id).count()
|
||||
att.delete()
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
assert 0 == Attachment.objects.count()
|
||||
|
||||
def test_match_querying_with_bytes(self):
|
||||
class MyDocument(Document):
|
||||
@ -93,7 +96,7 @@ class TestBinaryField(MongoDBTestCase):
|
||||
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first()
|
||||
self.assertEqual(matched_doc.id, doc.id)
|
||||
assert matched_doc.id == doc.id
|
||||
|
||||
def test_match_querying_with_binary(self):
|
||||
class MyDocument(Document):
|
||||
@ -104,40 +107,37 @@ class TestBinaryField(MongoDBTestCase):
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
|
||||
matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first()
|
||||
self.assertEqual(matched_doc.id, doc.id)
|
||||
assert matched_doc.id == doc.id
|
||||
|
||||
def test_modify_operation__set(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
|
||||
class MyDocument(Document):
|
||||
some_field = StringField()
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
doc = MyDocument.objects(some_field='test').modify(
|
||||
upsert=True, new=True,
|
||||
set__bin_field=BIN_VALUE
|
||||
doc = MyDocument.objects(some_field="test").modify(
|
||||
upsert=True, new=True, set__bin_field=BIN_VALUE
|
||||
)
|
||||
self.assertEqual(doc.some_field, 'test')
|
||||
if six.PY3:
|
||||
self.assertEqual(doc.bin_field, BIN_VALUE)
|
||||
else:
|
||||
self.assertEqual(doc.bin_field, Binary(BIN_VALUE))
|
||||
assert doc.some_field == "test"
|
||||
assert doc.bin_field == BIN_VALUE
|
||||
|
||||
def test_update_one(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
|
||||
class MyDocument(Document):
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
bin_data = six.b('\xe6\x00\xc4\xff\x07')
|
||||
bin_data = "\xe6\x00\xc4\xff\x07".encode("latin-1")
|
||||
doc = MyDocument(bin_field=bin_data).save()
|
||||
|
||||
n_updated = MyDocument.objects(bin_field=bin_data).update_one(bin_field=BIN_VALUE)
|
||||
self.assertEqual(n_updated, 1)
|
||||
n_updated = MyDocument.objects(bin_field=bin_data).update_one(
|
||||
bin_field=BIN_VALUE
|
||||
)
|
||||
assert n_updated == 1
|
||||
fetched = MyDocument.objects.with_id(doc.id)
|
||||
if six.PY3:
|
||||
self.assertEqual(fetched.bin_field, BIN_VALUE)
|
||||
else:
|
||||
self.assertEqual(fetched.bin_field, Binary(BIN_VALUE))
|
||||
assert fetched.bin_field == BIN_VALUE
|
||||
|
@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
@ -11,15 +12,13 @@ class TestBooleanField(MongoDBTestCase):
|
||||
|
||||
person = Person(admin=True)
|
||||
person.save()
|
||||
self.assertEqual(
|
||||
get_as_pymongo(person),
|
||||
{'_id': person.id,
|
||||
'admin': True})
|
||||
assert get_as_pymongo(person) == {"_id": person.id, "admin": True}
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to boolean
|
||||
fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
@ -28,22 +27,26 @@ class TestBooleanField(MongoDBTestCase):
|
||||
person.validate()
|
||||
|
||||
person.admin = 2
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.admin = 'Yes'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.admin = 'False'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.admin = "Yes"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.admin = "False"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
def test_weirdness_constructor(self):
|
||||
"""When attribute is set in contructor, it gets cast into a bool
|
||||
which causes some weird behavior. We dont necessarily want to maintain this behavior
|
||||
but its a known issue
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
new_person = Person(admin='False')
|
||||
self.assertTrue(new_person.admin)
|
||||
new_person = Person(admin="False")
|
||||
assert new_person.admin
|
||||
|
||||
new_person = Person(admin='0')
|
||||
self.assertTrue(new_person.admin)
|
||||
new_person = Person(admin="0")
|
||||
assert new_person.admin
|
||||
|
@ -1,18 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
def test_get_and_save(self):
|
||||
"""
|
||||
Tests #1047: CachedReferenceField creates DBRefs on to_python,
|
||||
but can't save them on to_mongo.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
@ -24,10 +25,11 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
Ocorrence(person="testte",
|
||||
animal=Animal(name="Leopard", tag="heavy").save()).save()
|
||||
Ocorrence(
|
||||
person="testte", animal=Animal(name="Leopard", tag="heavy").save()
|
||||
).save()
|
||||
p = Ocorrence.objects.get()
|
||||
p.person = 'new_testte'
|
||||
p.person = "new_testte"
|
||||
p.save()
|
||||
|
||||
def test_general_things(self):
|
||||
@ -37,8 +39,7 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag'])
|
||||
animal = CachedReferenceField(Animal, fields=["tag"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
@ -46,30 +47,29 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
a = Animal(name="Leopard", tag="heavy")
|
||||
a.save()
|
||||
|
||||
self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal])
|
||||
assert Animal._cached_reference_fields == [Ocorrence.animal]
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
|
||||
p = Ocorrence(person="Wilson")
|
||||
p.save()
|
||||
|
||||
self.assertEqual(Ocorrence.objects(animal=None).count(), 1)
|
||||
assert Ocorrence.objects(animal=None).count() == 1
|
||||
|
||||
self.assertEqual(
|
||||
a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk})
|
||||
assert a.to_mongo(fields=["tag"]) == {"tag": "heavy", "_id": a.pk}
|
||||
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(animal__tag='heavy').count()
|
||||
self.assertEqual(count, 1)
|
||||
count = Ocorrence.objects(animal__tag="heavy").count()
|
||||
assert count == 1
|
||||
|
||||
ocorrence = Ocorrence.objects(animal__tag='heavy').first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
ocorrence = Ocorrence.objects(animal__tag="heavy").first()
|
||||
assert ocorrence.person == "teste"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_with_decimal(self):
|
||||
class PersonAuto(Document):
|
||||
@ -78,28 +78,22 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
class SocialTest(Document):
|
||||
group = StringField()
|
||||
person = CachedReferenceField(
|
||||
PersonAuto,
|
||||
fields=('salary',))
|
||||
person = CachedReferenceField(PersonAuto, fields=("salary",))
|
||||
|
||||
PersonAuto.drop_collection()
|
||||
SocialTest.drop_collection()
|
||||
|
||||
p = PersonAuto(name="Alberto", salary=Decimal('7000.00'))
|
||||
p = PersonAuto(name="Alberto", salary=Decimal("7000.00"))
|
||||
p.save()
|
||||
|
||||
s = SocialTest(group="dev", person=p)
|
||||
s.save()
|
||||
|
||||
self.assertEqual(
|
||||
SocialTest.objects._collection.find_one({'person.salary': 7000.00}), {
|
||||
'_id': s.pk,
|
||||
'group': s.group,
|
||||
'person': {
|
||||
'_id': p.pk,
|
||||
'salary': 7000.00
|
||||
}
|
||||
})
|
||||
assert SocialTest.objects._collection.find_one({"person.salary": 7000.00}) == {
|
||||
"_id": s.pk,
|
||||
"group": s.group,
|
||||
"person": {"_id": p.pk, "salary": 7000.00},
|
||||
}
|
||||
|
||||
def test_cached_reference_field_reference(self):
|
||||
class Group(Document):
|
||||
@ -111,17 +105,14 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
class SocialData(Document):
|
||||
obs = StringField()
|
||||
tags = ListField(
|
||||
StringField())
|
||||
person = CachedReferenceField(
|
||||
Person,
|
||||
fields=('group',))
|
||||
tags = ListField(StringField())
|
||||
person = CachedReferenceField(Person, fields=("group",))
|
||||
|
||||
Group.drop_collection()
|
||||
Person.drop_collection()
|
||||
SocialData.drop_collection()
|
||||
|
||||
g1 = Group(name='dev')
|
||||
g1 = Group(name="dev")
|
||||
g1.save()
|
||||
|
||||
g2 = Group(name="designers")
|
||||
@ -136,25 +127,21 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
p3 = Person(name="Afro design", group=g2)
|
||||
p3.save()
|
||||
|
||||
s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2'])
|
||||
s1 = SocialData(obs="testing 123", person=p1, tags=["tag1", "tag2"])
|
||||
s1.save()
|
||||
|
||||
s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4'])
|
||||
s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"])
|
||||
s2.save()
|
||||
|
||||
self.assertEqual(SocialData.objects._collection.find_one(
|
||||
{'tags': 'tag2'}), {
|
||||
'_id': s1.pk,
|
||||
'obs': 'testing 123',
|
||||
'tags': ['tag1', 'tag2'],
|
||||
'person': {
|
||||
'_id': p1.pk,
|
||||
'group': g1.pk
|
||||
}
|
||||
})
|
||||
assert SocialData.objects._collection.find_one({"tags": "tag2"}) == {
|
||||
"_id": s1.pk,
|
||||
"obs": "testing 123",
|
||||
"tags": ["tag1", "tag2"],
|
||||
"person": {"_id": p1.pk, "group": g1.pk},
|
||||
}
|
||||
|
||||
self.assertEqual(SocialData.objects(person__group=g2).count(), 1)
|
||||
self.assertEqual(SocialData.objects(person__group=g2).first(), s2)
|
||||
assert SocialData.objects(person__group=g2).count() == 1
|
||||
assert SocialData.objects(person__group=g2).first() == s2
|
||||
|
||||
def test_cached_reference_field_push_with_fields(self):
|
||||
class Product(Document):
|
||||
@ -163,185 +150,136 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
Product.drop_collection()
|
||||
|
||||
class Basket(Document):
|
||||
products = ListField(CachedReferenceField(Product, fields=['name']))
|
||||
products = ListField(CachedReferenceField(Product, fields=["name"]))
|
||||
|
||||
Basket.drop_collection()
|
||||
product1 = Product(name='abc').save()
|
||||
product2 = Product(name='def').save()
|
||||
product1 = Product(name="abc").save()
|
||||
product2 = Product(name="def").save()
|
||||
basket = Basket(products=[product1]).save()
|
||||
self.assertEqual(
|
||||
Basket.objects._collection.find_one(),
|
||||
{
|
||||
'_id': basket.pk,
|
||||
'products': [
|
||||
{
|
||||
'_id': product1.pk,
|
||||
'name': product1.name
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
assert Basket.objects._collection.find_one() == {
|
||||
"_id": basket.pk,
|
||||
"products": [{"_id": product1.pk, "name": product1.name}],
|
||||
}
|
||||
# push to list
|
||||
basket.update(push__products=product2)
|
||||
basket.reload()
|
||||
self.assertEqual(
|
||||
Basket.objects._collection.find_one(),
|
||||
{
|
||||
'_id': basket.pk,
|
||||
'products': [
|
||||
{
|
||||
'_id': product1.pk,
|
||||
'name': product1.name
|
||||
},
|
||||
{
|
||||
'_id': product2.pk,
|
||||
'name': product2.name
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
assert Basket.objects._collection.find_one() == {
|
||||
"_id": basket.pk,
|
||||
"products": [
|
||||
{"_id": product1.pk, "name": product1.name},
|
||||
{"_id": product2.pk, "name": product2.name},
|
||||
],
|
||||
}
|
||||
|
||||
def test_cached_reference_field_update_all(self):
|
||||
class Person(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(choices=TYPES)
|
||||
father = CachedReferenceField('self', fields=('tp',))
|
||||
father = CachedReferenceField("self", fields=("tp",))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2 = Person(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a2 = Person.objects.with_id(a2.id)
|
||||
self.assertEqual(a2.father.tp, a1.tp)
|
||||
assert a2.father.tp == a1.tp
|
||||
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {
|
||||
"_id": a1.pk,
|
||||
"tp": u"pj"
|
||||
}
|
||||
})
|
||||
"father": {"_id": a1.pk, "tp": u"pj"},
|
||||
}
|
||||
|
||||
self.assertEqual(Person.objects(father=a1)._query, {
|
||||
'father._id': a1.pk
|
||||
})
|
||||
self.assertEqual(Person.objects(father=a1).count(), 1)
|
||||
assert Person.objects(father=a1)._query == {"father._id": a1.pk}
|
||||
assert Person.objects(father=a1).count() == 1
|
||||
|
||||
Person.objects.update(set__tp="pf")
|
||||
Person.father.sync_all()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {
|
||||
"_id": a1.pk,
|
||||
"tp": u"pf"
|
||||
}
|
||||
})
|
||||
"father": {"_id": a1.pk, "tp": u"pf"},
|
||||
}
|
||||
|
||||
def test_cached_reference_fields_on_embedded_documents(self):
|
||||
with self.assertRaises(InvalidDocumentError):
|
||||
with pytest.raises(InvalidDocumentError):
|
||||
|
||||
class Test(Document):
|
||||
name = StringField()
|
||||
|
||||
type('WrongEmbeddedDocument', (
|
||||
EmbeddedDocument,), {
|
||||
'test': CachedReferenceField(Test)
|
||||
})
|
||||
type(
|
||||
"WrongEmbeddedDocument",
|
||||
(EmbeddedDocument,),
|
||||
{"test": CachedReferenceField(Test)},
|
||||
)
|
||||
|
||||
def test_cached_reference_auto_sync(self):
|
||||
class Person(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
choices=TYPES
|
||||
)
|
||||
tp = StringField(choices=TYPES)
|
||||
|
||||
father = CachedReferenceField('self', fields=('tp',))
|
||||
father = CachedReferenceField("self", fields=("tp",))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2 = Person(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = 'pf'
|
||||
a1.tp = "pf"
|
||||
a1.save()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
'_id': a2.pk,
|
||||
'name': 'Wilson Junior',
|
||||
'tp': 'pf',
|
||||
'father': {
|
||||
'_id': a1.pk,
|
||||
'tp': 'pf'
|
||||
}
|
||||
})
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pf"},
|
||||
}
|
||||
|
||||
def test_cached_reference_auto_sync_disabled(self):
|
||||
class Persone(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
choices=TYPES
|
||||
)
|
||||
tp = StringField(choices=TYPES)
|
||||
|
||||
father = CachedReferenceField(
|
||||
'self', fields=('tp',), auto_sync=False)
|
||||
father = CachedReferenceField("self", fields=("tp",), auto_sync=False)
|
||||
|
||||
Persone.drop_collection()
|
||||
|
||||
a1 = Persone(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Persone(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2 = Persone(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = 'pf'
|
||||
a1.tp = "pf"
|
||||
a1.save()
|
||||
|
||||
self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), {
|
||||
'_id': a2.pk,
|
||||
'name': 'Wilson Junior',
|
||||
'tp': 'pf',
|
||||
'father': {
|
||||
'_id': a1.pk,
|
||||
'tp': 'pj'
|
||||
}
|
||||
})
|
||||
assert Persone.objects._collection.find_one({"_id": a2.pk}) == {
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pj"},
|
||||
}
|
||||
|
||||
def test_cached_reference_embedded_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
TPS = (
|
||||
('n', "Normal"),
|
||||
('u', "Urgent")
|
||||
)
|
||||
TPS = (("n", "Normal"), ("u", "Urgent"))
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
verbose_name="Type",
|
||||
db_field="t",
|
||||
choices=TPS)
|
||||
tp = StringField(verbose_name="Type", db_field="t", choices=TPS)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
@ -351,45 +289,41 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag', 'owner.tp'])
|
||||
animal = CachedReferenceField(Animal, fields=["tag", "owner.tp"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(name="Leopard", tag="heavy",
|
||||
owner=Owner(tp='u', name="Wilson Júnior")
|
||||
)
|
||||
a = Animal(
|
||||
name="Leopard", tag="heavy", owner=Owner(tp="u", name="Wilson Júnior")
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), {
|
||||
'_id': a.pk,
|
||||
'tag': 'heavy',
|
||||
'owner': {
|
||||
't': 'u'
|
||||
}
|
||||
})
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u')
|
||||
assert dict(a.to_mongo(fields=["tag", "owner.tp"])) == {
|
||||
"_id": a.pk,
|
||||
"tag": "heavy",
|
||||
"owner": {"t": "u"},
|
||||
}
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
assert o.to_mongo()["animal"]["owner"]["t"] == "u"
|
||||
|
||||
# Check to_mongo with fields
|
||||
self.assertNotIn('animal', o.to_mongo(fields=['person']))
|
||||
assert "animal" not in o.to_mongo(fields=["person"])
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(
|
||||
animal__tag='heavy', animal__owner__tp='u').count()
|
||||
self.assertEqual(count, 1)
|
||||
count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count()
|
||||
assert count == 1
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag='heavy',
|
||||
animal__owner__tp='u').first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
animal__tag="heavy", animal__owner__tp="u"
|
||||
).first()
|
||||
assert ocorrence.person == "teste"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_cached_reference_embedded_list_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
@ -404,43 +338,40 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag', 'owner.tags'])
|
||||
animal = CachedReferenceField(Animal, fields=["tag", "owner.tags"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(name="Leopard", tag="heavy",
|
||||
owner=Owner(tags=['cool', 'funny'],
|
||||
name="Wilson Júnior")
|
||||
)
|
||||
a = Animal(
|
||||
name="Leopard",
|
||||
tag="heavy",
|
||||
owner=Owner(tags=["cool", "funny"], name="Wilson Júnior"),
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste 2", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), {
|
||||
'_id': a.pk,
|
||||
'tag': 'heavy',
|
||||
'owner': {
|
||||
'tags': ['cool', 'funny']
|
||||
}
|
||||
})
|
||||
assert dict(a.to_mongo(fields=["tag", "owner.tags"])) == {
|
||||
"_id": a.pk,
|
||||
"tag": "heavy",
|
||||
"owner": {"tags": ["cool", "funny"]},
|
||||
}
|
||||
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
self.assertEqual(o.to_mongo()['animal']['owner']['tags'],
|
||||
['cool', 'funny'])
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
assert o.to_mongo()["animal"]["owner"]["tags"] == ["cool", "funny"]
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
query = Ocorrence.objects(
|
||||
animal__tag='heavy', animal__owner__tags='cool')._query
|
||||
self.assertEqual(
|
||||
query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'})
|
||||
animal__tag="heavy", animal__owner__tags="cool"
|
||||
)._query
|
||||
assert query == {"animal.owner.tags": "cool", "animal.tag": "heavy"}
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag='heavy',
|
||||
animal__owner__tags='cool').first()
|
||||
self.assertEqual(ocorrence.person, "teste 2")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
animal__tag="heavy", animal__owner__tags="cool"
|
||||
).first()
|
||||
assert ocorrence.person == "teste 2"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
|
@ -1,9 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import math
|
||||
import itertools
|
||||
import math
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
@ -14,9 +16,10 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
"""Tests for complex datetime fields - which can handle
|
||||
microseconds without rounding.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = ComplexDateTimeField()
|
||||
date_with_dots = ComplexDateTimeField(separator='.')
|
||||
date_with_dots = ComplexDateTimeField(separator=".")
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
@ -27,7 +30,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond - with
|
||||
# default datetimefields
|
||||
@ -35,7 +38,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
|
||||
# Pre UTC dates microseconds below 1000 are dropped - with default
|
||||
# datetimefields
|
||||
@ -43,7 +46,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
|
||||
# Pre UTC microseconds above 1000 is wonky - with default datetimefields
|
||||
# log.date has an invalid microsecond value so I can't construct
|
||||
@ -53,26 +56,34 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
# Test string padding
|
||||
microsecond = map(int, [math.pow(10, x) for x in range(6)])
|
||||
mm = dd = hh = ii = ss = [1, 10]
|
||||
|
||||
for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond):
|
||||
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()['date']
|
||||
self.assertTrue(re.match('^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$', stored) is not None)
|
||||
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"]
|
||||
assert (
|
||||
re.match(r"^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored)
|
||||
is not None
|
||||
)
|
||||
|
||||
# Test separator
|
||||
stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()['date_with_dots']
|
||||
self.assertTrue(re.match('^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$', stored) is not None)
|
||||
stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[
|
||||
"date_with_dots"
|
||||
]
|
||||
assert (
|
||||
re.match(r"^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None
|
||||
)
|
||||
|
||||
def test_complexdatetime_usage(self):
|
||||
"""Tests for complex datetime fields - which can handle
|
||||
microseconds without rounding.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = ComplexDateTimeField()
|
||||
|
||||
@ -84,62 +95,61 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.save()
|
||||
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
# create extra 59 log entries for a total of 60
|
||||
for i in range(1951, 2010):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1, 999)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 60)
|
||||
assert LogEntry.objects.count() == 60
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
assert logs.count() == 30
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
assert logs.count() == 30
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2011, 1, 1),
|
||||
date__gte=datetime.datetime(2000, 1, 1),
|
||||
)
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Test microsecond-level ordering/filtering
|
||||
for microsecond in (99, 999, 9999, 10000):
|
||||
LogEntry(
|
||||
date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)
|
||||
).save()
|
||||
LogEntry(date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)).save()
|
||||
|
||||
logs = list(LogEntry.objects.order_by('date'))
|
||||
logs = list(LogEntry.objects.order_by("date"))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
self.assertTrue(log.date < next_log.date)
|
||||
assert log.date < next_log.date
|
||||
|
||||
logs = list(LogEntry.objects.order_by('-date'))
|
||||
logs = list(LogEntry.objects.order_by("-date"))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
self.assertTrue(log.date > next_log.date)
|
||||
assert log.date > next_log.date
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000))
|
||||
self.assertEqual(logs.count(), 4)
|
||||
date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)
|
||||
)
|
||||
assert logs.count() == 4
|
||||
|
||||
def test_no_default_value(self):
|
||||
class Log(Document):
|
||||
@ -148,25 +158,26 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertIsNone(log.timestamp)
|
||||
assert log.timestamp is None
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertIsNone(fetched_log.timestamp)
|
||||
assert fetched_log.timestamp is None
|
||||
|
||||
def test_default_static_value(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField(default=NOW)
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertEqual(log.timestamp, NOW)
|
||||
assert log.timestamp == NOW
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertEqual(fetched_log.timestamp, NOW)
|
||||
assert fetched_log.timestamp == NOW
|
||||
|
||||
def test_default_callable(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
@ -177,8 +188,23 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertGreaterEqual(log.timestamp, NOW)
|
||||
assert log.timestamp >= NOW
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertGreaterEqual(fetched_log.timestamp, NOW)
|
||||
assert fetched_log.timestamp >= NOW
|
||||
|
||||
def test_setting_bad_value_does_not_raise_unless_validate_is_called(self):
|
||||
# test regression of #2253
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField()
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log(timestamp="garbage")
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
log.save()
|
||||
|
@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import six
|
||||
|
||||
import pytest
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
@ -8,7 +9,6 @@ except ImportError:
|
||||
dateutil = None
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -18,41 +18,47 @@ class TestDateField(MongoDBTestCase):
|
||||
Ensure an exception is raised when trying to
|
||||
cast an empty string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt='')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
md = MyDoc(dt="")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_date_from_whitespace_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast a whitespace-only string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt=' ')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
md = MyDoc(dt=" ")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_default_values_today(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
a document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
day = DateField(default=datetime.date.today)
|
||||
|
||||
person = Person()
|
||||
person.validate()
|
||||
self.assertEqual(person.day, person.day)
|
||||
self.assertEqual(person.day, datetime.date.today())
|
||||
self.assertEqual(person._data['day'], person.day)
|
||||
assert person.day == person.day
|
||||
assert person.day == datetime.date.today()
|
||||
assert person._data["day"] == person.day
|
||||
|
||||
def test_date(self):
|
||||
"""Tests showing pymongo date fields
|
||||
|
||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateField()
|
||||
|
||||
@ -63,7 +69,7 @@ class TestDateField(MongoDBTestCase):
|
||||
log.date = datetime.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, datetime.date.today())
|
||||
assert log.date == datetime.date.today()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
||||
@ -71,30 +77,20 @@ class TestDateField(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
assert log.date == d1.date()
|
||||
assert log.date == d2.date()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = datetime.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
assert log.date == d1.date()
|
||||
assert log.date == d2.date()
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateField()
|
||||
|
||||
@ -106,42 +102,43 @@ class TestDateField(MongoDBTestCase):
|
||||
log.validate()
|
||||
log.save()
|
||||
|
||||
for query in (d1, d1.isoformat(' ')):
|
||||
for query in (d1, d1.isoformat(" ")):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat('T'))
|
||||
self.assertEqual(log, log1)
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
||||
assert log == log1
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 20)
|
||||
assert LogEntry.objects.count() == 20
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
fields.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateField()
|
||||
|
||||
@ -152,14 +149,16 @@ class TestDateField(MongoDBTestCase):
|
||||
log.time = datetime.date.today()
|
||||
log.validate()
|
||||
|
||||
log.time = datetime.datetime.now().isoformat(' ')
|
||||
log.time = datetime.datetime.now().isoformat(" ")
|
||||
log.validate()
|
||||
|
||||
if dateutil:
|
||||
log.time = datetime.datetime.now().isoformat('T')
|
||||
log.time = datetime.datetime.now().isoformat("T")
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = 'ABC'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "ABC"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime as dt
|
||||
import six
|
||||
|
||||
import pytest
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
@ -19,27 +20,32 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
Ensure an exception is raised when trying to
|
||||
cast an empty string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt='')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
md = MyDoc(dt="")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_datetime_from_whitespace_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast a whitespace-only string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt=' ')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
md = MyDoc(dt=" ")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_default_value_utcnow(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
a document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
created = DateTimeField(default=dt.datetime.utcnow)
|
||||
|
||||
@ -47,9 +53,9 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
person = Person()
|
||||
person.validate()
|
||||
person_created_t0 = person.created
|
||||
self.assertLess(person.created - utcnow, dt.timedelta(seconds=1))
|
||||
self.assertEqual(person_created_t0, person.created) # make sure it does not change
|
||||
self.assertEqual(person._data['created'], person.created)
|
||||
assert person.created - utcnow < dt.timedelta(seconds=1)
|
||||
assert person_created_t0 == person.created # make sure it does not change
|
||||
assert person._data["created"] == person.created
|
||||
|
||||
def test_handling_microseconds(self):
|
||||
"""Tests showing pymongo datetime fields handling of microseconds.
|
||||
@ -58,6 +64,7 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
|
||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
@ -68,7 +75,7 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.date = dt.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date.date(), dt.date.today())
|
||||
assert log.date.date() == dt.date.today()
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
||||
# dropped
|
||||
@ -78,8 +85,8 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
assert log.date != d1
|
||||
assert log.date == d2
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond
|
||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
@ -87,22 +94,12 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = dt.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
assert log.date != d1
|
||||
assert log.date == d2
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
@ -114,51 +111,51 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.validate()
|
||||
log.save()
|
||||
|
||||
for query in (d1, d1.isoformat(' ')):
|
||||
for query in (d1, d1.isoformat(" ")):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat('T'))
|
||||
self.assertEqual(log, log1)
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
||||
assert log == log1
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = dt.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 20)
|
||||
assert LogEntry.objects.count() == 20
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=dt.datetime(1980, 1, 1),
|
||||
date__gte=dt.datetime(1975, 1, 1),
|
||||
date__lte=dt.datetime(1980, 1, 1), date__gte=dt.datetime(1975, 1, 1)
|
||||
)
|
||||
self.assertEqual(logs.count(), 5)
|
||||
assert logs.count() == 5
|
||||
|
||||
def test_datetime_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
fields.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateTimeField()
|
||||
|
||||
@ -169,45 +166,51 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.time = dt.date.today()
|
||||
log.validate()
|
||||
|
||||
log.time = dt.datetime.now().isoformat(' ')
|
||||
log.time = dt.datetime.now().isoformat(" ")
|
||||
log.validate()
|
||||
|
||||
log.time = '2019-05-16 21:42:57.897847'
|
||||
log.time = "2019-05-16 21:42:57.897847"
|
||||
log.validate()
|
||||
|
||||
if dateutil:
|
||||
log.time = dt.datetime.now().isoformat('T')
|
||||
log.time = dt.datetime.now().isoformat("T")
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = 'ABC'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = '2019-05-16 21:GARBAGE:12'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = '2019-05-16 21:42:57.GARBAGE'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = '2019-05-16 21:42:57.123.456'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "ABC"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:GARBAGE:12"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:42:57.GARBAGE"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:42:57.123.456"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
||||
def test_parse_datetime_as_str(self):
|
||||
class DTDoc(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
date_str = '2019-03-02 22:26:01'
|
||||
date_str = "2019-03-02 22:26:01"
|
||||
|
||||
# make sure that passing a parsable datetime works
|
||||
dtd = DTDoc()
|
||||
dtd.date = date_str
|
||||
self.assertIsInstance(dtd.date, six.string_types)
|
||||
assert isinstance(dtd.date, str)
|
||||
dtd.save()
|
||||
dtd.reload()
|
||||
|
||||
self.assertIsInstance(dtd.date, dt.datetime)
|
||||
self.assertEqual(str(dtd.date), date_str)
|
||||
assert isinstance(dtd.date, dt.datetime)
|
||||
assert str(dtd.date) == date_str
|
||||
|
||||
dtd.date = 'January 1st, 9999999999'
|
||||
self.assertRaises(ValidationError, dtd.validate)
|
||||
dtd.date = "January 1st, 9999999999"
|
||||
with pytest.raises(ValidationError):
|
||||
dtd.validate()
|
||||
|
||||
|
||||
class TestDateTimeTzAware(MongoDBTestCase):
|
||||
@ -217,7 +220,7 @@ class TestDateTimeTzAware(MongoDBTestCase):
|
||||
connection._connections = {}
|
||||
connection._dbs = {}
|
||||
|
||||
connect(db='mongoenginetest', tz_aware=True)
|
||||
connect(db="mongoenginetest", tz_aware=True)
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateTimeField()
|
||||
@ -228,4 +231,4 @@ class TestDateTimeTzAware(MongoDBTestCase):
|
||||
|
||||
log = LogEntry.objects.first()
|
||||
log.time = dt.datetime(2013, 1, 1, 0, 0, 0)
|
||||
self.assertEqual(['time'], log._changed_fields)
|
||||
assert ["time"] == log._changed_fields
|
||||
|
@ -1,39 +1,44 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDecimalField(MongoDBTestCase):
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to decimal fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
height = DecimalField(min_value=Decimal('0.1'),
|
||||
max_value=Decimal('3.5'))
|
||||
height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5"))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(height=Decimal('1.89')).save()
|
||||
Person(height=Decimal("1.89")).save()
|
||||
person = Person.objects.first()
|
||||
self.assertEqual(person.height, Decimal('1.89'))
|
||||
assert person.height == Decimal("1.89")
|
||||
|
||||
person.height = '2.0'
|
||||
person.height = "2.0"
|
||||
person.save()
|
||||
person.height = 0.01
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = Decimal('0.01')
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = Decimal('4.0')
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = 'something invalid'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = Decimal("0.01")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = Decimal("4.0")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = "something invalid"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person_2 = Person(height='something invalid')
|
||||
self.assertRaises(ValidationError, person_2.validate)
|
||||
person_2 = Person(height="something invalid")
|
||||
with pytest.raises(ValidationError):
|
||||
person_2.validate()
|
||||
|
||||
def test_comparison(self):
|
||||
class Person(Document):
|
||||
@ -46,11 +51,11 @@ class TestDecimalField(MongoDBTestCase):
|
||||
Person(money=8).save()
|
||||
Person(money=10).save()
|
||||
|
||||
self.assertEqual(2, Person.objects(money__gt=Decimal("7")).count())
|
||||
self.assertEqual(2, Person.objects(money__gt=7).count())
|
||||
self.assertEqual(2, Person.objects(money__gt="7").count())
|
||||
assert 2 == Person.objects(money__gt=Decimal("7")).count()
|
||||
assert 2 == Person.objects(money__gt=7).count()
|
||||
assert 2 == Person.objects(money__gt="7").count()
|
||||
|
||||
self.assertEqual(3, Person.objects(money__gte="7").count())
|
||||
assert 3 == Person.objects(money__gte="7").count()
|
||||
|
||||
def test_storage(self):
|
||||
class Person(Document):
|
||||
@ -58,7 +63,14 @@ class TestDecimalField(MongoDBTestCase):
|
||||
string_value = DecimalField(precision=4, force_string=True)
|
||||
|
||||
Person.drop_collection()
|
||||
values_to_store = [10, 10.1, 10.11, "10.111", Decimal("10.1111"), Decimal("10.11111")]
|
||||
values_to_store = [
|
||||
10,
|
||||
10.1,
|
||||
10.11,
|
||||
"10.111",
|
||||
Decimal("10.1111"),
|
||||
Decimal("10.11111"),
|
||||
]
|
||||
for store_at_creation in [True, False]:
|
||||
for value in values_to_store:
|
||||
# to_python is called explicitly if values were sent in the kwargs of __init__
|
||||
@ -72,20 +84,27 @@ class TestDecimalField(MongoDBTestCase):
|
||||
|
||||
# How its stored
|
||||
expected = [
|
||||
{'float_value': 10.0, 'string_value': '10.0000'},
|
||||
{'float_value': 10.1, 'string_value': '10.1000'},
|
||||
{'float_value': 10.11, 'string_value': '10.1100'},
|
||||
{'float_value': 10.111, 'string_value': '10.1110'},
|
||||
{'float_value': 10.1111, 'string_value': '10.1111'},
|
||||
{'float_value': 10.1111, 'string_value': '10.1111'}]
|
||||
{"float_value": 10.0, "string_value": "10.0000"},
|
||||
{"float_value": 10.1, "string_value": "10.1000"},
|
||||
{"float_value": 10.11, "string_value": "10.1100"},
|
||||
{"float_value": 10.111, "string_value": "10.1110"},
|
||||
{"float_value": 10.1111, "string_value": "10.1111"},
|
||||
{"float_value": 10.1111, "string_value": "10.1111"},
|
||||
]
|
||||
expected.extend(expected)
|
||||
actual = list(Person.objects.exclude('id').as_pymongo())
|
||||
self.assertEqual(expected, actual)
|
||||
actual = list(Person.objects.exclude("id").as_pymongo())
|
||||
assert expected == actual
|
||||
|
||||
# How it comes out locally
|
||||
expected = [Decimal('10.0000'), Decimal('10.1000'), Decimal('10.1100'),
|
||||
Decimal('10.1110'), Decimal('10.1111'), Decimal('10.1111')]
|
||||
expected = [
|
||||
Decimal("10.0000"),
|
||||
Decimal("10.1000"),
|
||||
Decimal("10.1100"),
|
||||
Decimal("10.1110"),
|
||||
Decimal("10.1111"),
|
||||
Decimal("10.1111"),
|
||||
]
|
||||
expected.extend(expected)
|
||||
for field_name in ['float_value', 'string_value']:
|
||||
for field_name in ["float_value", "string_value"]:
|
||||
actual = list(Person.objects().scalar(field_name))
|
||||
self.assertEqual(expected, actual)
|
||||
assert expected == actual
|
||||
|
@ -1,100 +1,123 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson import InvalidDocument
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import BaseDict
|
||||
from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version
|
||||
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class TestDictField(MongoDBTestCase):
|
||||
|
||||
def test_storage(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
info = {'testkey': 'testvalue'}
|
||||
info = {"testkey": "testvalue"}
|
||||
post = BlogPost(info=info).save()
|
||||
self.assertEqual(
|
||||
get_as_pymongo(post),
|
||||
{
|
||||
'_id': post.id,
|
||||
'info': info
|
||||
}
|
||||
)
|
||||
assert get_as_pymongo(post) == {"_id": post.id, "info": info}
|
||||
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
def test_validate_invalid_type(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
invalid_infos = ["my post", ["test", "test"], {1: "test"}]
|
||||
for invalid_info in invalid_infos:
|
||||
with pytest.raises(ValidationError):
|
||||
BlogPost(info=invalid_info).validate()
|
||||
|
||||
def test_keys_with_dots_or_dollars(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = 'my post'
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = ['test', 'test']
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
post.info = {"$title": "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {'$title': 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
post.info = {"nested": {"$title": "test"}}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {'nested': {'$title': 'test'}}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
post.info = {"$title.test": "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {'the.title': 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
post.info = {"nested": {"the.title": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
# MongoDB < 3.6 rejects dots
|
||||
# To avoid checking the mongodb version from the DictField class
|
||||
# we rely on MongoDB to reject the data during the save
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
post.info = {'nested': {'the.title': 'test'}}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
post.info = {"dollar_and_dot": {"te$st.test": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
post.info = {1: 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
|
||||
post.info = {'title': 'test'}
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(info={"title": "test"})
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {'title': 'dollar_sign', 'details': {'te$t': 'test'}}
|
||||
post.info = {"title": "dollar_sign", "details": {"te$t": "test"}}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {'details': {'test': 'test'}}
|
||||
post.info = {"details": {"test": "test"}}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {'details': {'test': 3}}
|
||||
post.info = {"details": {"test": 3}}
|
||||
post.save()
|
||||
|
||||
self.assertEqual(BlogPost.objects.count(), 4)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__title__exact='test').count(), 1)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact='test').count(), 1)
|
||||
assert BlogPost.objects.count() == 4
|
||||
assert BlogPost.objects.filter(info__title__exact="test").count() == 1
|
||||
assert BlogPost.objects.filter(info__details__test__exact="test").count() == 1
|
||||
|
||||
post = BlogPost.objects.filter(info__title__exact='dollar_sign').first()
|
||||
self.assertIn('te$t', post['info']['details'])
|
||||
post = BlogPost.objects.filter(info__title__exact="dollar_sign").first()
|
||||
assert "te$t" in post["info"]["details"]
|
||||
|
||||
# Confirm handles non strings or non existing keys
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
|
||||
assert BlogPost.objects.filter(info__details__test__exact=5).count() == 0
|
||||
assert BlogPost.objects.filter(info__made_up__test__exact="test").count() == 0
|
||||
|
||||
post = BlogPost.objects.create(info={'title': 'original'})
|
||||
post.info.update({'title': 'updated'})
|
||||
post = BlogPost.objects.create(info={"title": "original"})
|
||||
post.info.update({"title": "updated"})
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEqual('updated', post.info['title'])
|
||||
assert "updated" == post.info["title"]
|
||||
|
||||
post.info.setdefault('authors', [])
|
||||
post.info.setdefault("authors", [])
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEqual([], post.info['authors'])
|
||||
assert post.info["authors"] == []
|
||||
|
||||
def test_dictfield_dump_document(self):
|
||||
"""Ensure a DictField can handle another document's dump."""
|
||||
|
||||
class Doc(Document):
|
||||
field = DictField()
|
||||
|
||||
@ -106,51 +129,60 @@ class TestDictField(MongoDBTestCase):
|
||||
id = IntField(primary_key=True, default=1)
|
||||
recursive = DictField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class ToEmbedChild(ToEmbedParent):
|
||||
pass
|
||||
|
||||
to_embed_recursive = ToEmbed(id=1).save()
|
||||
to_embed = ToEmbed(
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save()
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()
|
||||
).save()
|
||||
doc = Doc(field=to_embed.to_mongo().to_dict())
|
||||
doc.save()
|
||||
self.assertIsInstance(doc.field, dict)
|
||||
self.assertEqual(doc.field, {'_id': 2, 'recursive': {'_id': 1, 'recursive': {}}})
|
||||
assert isinstance(doc.field, dict)
|
||||
assert doc.field == {"_id": 2, "recursive": {"_id": 1, "recursive": {}}}
|
||||
# Same thing with a Document with a _cls field
|
||||
to_embed_recursive = ToEmbedChild(id=1).save()
|
||||
to_embed_child = ToEmbedChild(
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save()
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()
|
||||
).save()
|
||||
doc = Doc(field=to_embed_child.to_mongo().to_dict())
|
||||
doc.save()
|
||||
self.assertIsInstance(doc.field, dict)
|
||||
assert isinstance(doc.field, dict)
|
||||
expected = {
|
||||
'_id': 2, '_cls': 'ToEmbedParent.ToEmbedChild',
|
||||
'recursive': {'_id': 1, '_cls': 'ToEmbedParent.ToEmbedChild', 'recursive': {}}
|
||||
"_id": 2,
|
||||
"_cls": "ToEmbedParent.ToEmbedChild",
|
||||
"recursive": {
|
||||
"_id": 1,
|
||||
"_cls": "ToEmbedParent.ToEmbedChild",
|
||||
"recursive": {},
|
||||
},
|
||||
}
|
||||
self.assertEqual(doc.field, expected)
|
||||
assert doc.field == expected
|
||||
|
||||
def test_dictfield_strict(self):
|
||||
"""Ensure that dict field handles validation if provided a strict field type."""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=IntField())
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['someint'] = 1
|
||||
e.mapping["someint"] = 1
|
||||
e.save()
|
||||
|
||||
# try creating an invalid mapping
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping['somestring'] = "abc"
|
||||
with pytest.raises(ValidationError):
|
||||
e.mapping["somestring"] = "abc"
|
||||
e.save()
|
||||
|
||||
def test_dictfield_complex(self):
|
||||
"""Ensure that the dict field can handle the complex types."""
|
||||
|
||||
class SettingBase(EmbeddedDocument):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class StringSetting(SettingBase):
|
||||
value = StringField()
|
||||
@ -164,73 +196,76 @@ class TestDictField(MongoDBTestCase):
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['somestring'] = StringSetting(value='foo')
|
||||
e.mapping['someint'] = IntegerSetting(value=42)
|
||||
e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!',
|
||||
'float': 1.001,
|
||||
'complex': IntegerSetting(value=42),
|
||||
'list': [IntegerSetting(value=42),
|
||||
StringSetting(value='foo')]}
|
||||
e.mapping["somestring"] = StringSetting(value="foo")
|
||||
e.mapping["someint"] = IntegerSetting(value=42)
|
||||
e.mapping["nested_dict"] = {
|
||||
"number": 1,
|
||||
"string": "Hi!",
|
||||
"float": 1.001,
|
||||
"complex": IntegerSetting(value=42),
|
||||
"list": [IntegerSetting(value=42), StringSetting(value="foo")],
|
||||
}
|
||||
e.save()
|
||||
|
||||
e2 = Simple.objects.get(id=e.id)
|
||||
self.assertIsInstance(e2.mapping['somestring'], StringSetting)
|
||||
self.assertIsInstance(e2.mapping['someint'], IntegerSetting)
|
||||
assert isinstance(e2.mapping["somestring"], StringSetting)
|
||||
assert isinstance(e2.mapping["someint"], IntegerSetting)
|
||||
|
||||
# Test querying
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__someint__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__number=1).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1)
|
||||
assert Simple.objects.filter(mapping__someint__value=42).count() == 1
|
||||
assert Simple.objects.filter(mapping__nested_dict__number=1).count() == 1
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__complex__value=42).count() == 1
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__0__value=42).count() == 1
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count()
|
||||
== 1
|
||||
)
|
||||
|
||||
# Confirm can update
|
||||
Simple.objects().update(set__mapping={"someint": IntegerSetting(value=10)})
|
||||
Simple.objects().update(
|
||||
set__mapping={"someint": IntegerSetting(value=10)})
|
||||
Simple.objects().update(
|
||||
set__mapping__nested_dict__list__1=StringSetting(value='Boo'))
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1)
|
||||
set__mapping__nested_dict__list__1=StringSetting(value="Boo")
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count()
|
||||
== 0
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count()
|
||||
== 1
|
||||
)
|
||||
|
||||
def test_push_dict(self):
|
||||
class MyModel(Document):
|
||||
events = ListField(DictField())
|
||||
|
||||
doc = MyModel(events=[{'a': 1}]).save()
|
||||
doc = MyModel(events=[{"a": 1}]).save()
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {
|
||||
'_id': doc.id,
|
||||
'events': [{'a': 1}]
|
||||
}
|
||||
self.assertEqual(raw_doc, expected_raw_doc)
|
||||
expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}]}
|
||||
assert raw_doc == expected_raw_doc
|
||||
|
||||
MyModel.objects(id=doc.id).update(push__events={})
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {
|
||||
'_id': doc.id,
|
||||
'events': [{'a': 1}, {}]
|
||||
}
|
||||
self.assertEqual(raw_doc, expected_raw_doc)
|
||||
expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}, {}]}
|
||||
assert raw_doc == expected_raw_doc
|
||||
|
||||
def test_ensure_unique_default_instances(self):
|
||||
"""Ensure that every field has it's own unique default instance."""
|
||||
|
||||
class D(Document):
|
||||
data = DictField()
|
||||
data2 = DictField(default=lambda: {})
|
||||
|
||||
d1 = D()
|
||||
d1.data['foo'] = 'bar'
|
||||
d1.data2['foo'] = 'bar'
|
||||
d1.data["foo"] = "bar"
|
||||
d1.data2["foo"] = "bar"
|
||||
d2 = D()
|
||||
self.assertEqual(d2.data, {})
|
||||
self.assertEqual(d2.data2, {})
|
||||
assert d2.data == {}
|
||||
assert d2.data2 == {}
|
||||
|
||||
def test_dict_field_invalid_dict_value(self):
|
||||
class DictFieldTest(Document):
|
||||
@ -240,11 +275,13 @@ class TestDictField(MongoDBTestCase):
|
||||
|
||||
test = DictFieldTest(dictionary=None)
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
test.validate()
|
||||
|
||||
test = DictFieldTest(dictionary=False)
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
test.validate()
|
||||
|
||||
def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self):
|
||||
class DictFieldTest(Document):
|
||||
@ -255,31 +292,34 @@ class TestDictField(MongoDBTestCase):
|
||||
class Embedded(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
embed = Embedded(name='garbage')
|
||||
embed = Embedded(name="garbage")
|
||||
doc = DictFieldTest(dictionary=embed)
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
doc.validate()
|
||||
self.assertIn("'dictionary'", str(ctx_err.exception))
|
||||
self.assertIn('Only dictionaries may be used in a DictField', str(ctx_err.exception))
|
||||
|
||||
error_msg = str(exc_info.value)
|
||||
assert "'dictionary'" in error_msg
|
||||
assert "Only dictionaries may be used in a DictField" in error_msg
|
||||
|
||||
def test_atomic_update_dict_field(self):
|
||||
"""Ensure that the entire DictField can be atomically updated."""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=ListField(IntField(required=True)))
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['someints'] = [1, 2]
|
||||
e.mapping["someints"] = [1, 2]
|
||||
e.save()
|
||||
e.update(set__mapping={"ints": [3, 4]})
|
||||
e.reload()
|
||||
self.assertEqual(BaseDict, type(e.mapping))
|
||||
self.assertEqual({"ints": [3, 4]}, e.mapping)
|
||||
assert isinstance(e.mapping, BaseDict)
|
||||
assert {"ints": [3, 4]} == e.mapping
|
||||
|
||||
# try creating an invalid mapping
|
||||
with self.assertRaises(ValueError):
|
||||
e.update(set__mapping={"somestrings": ["foo", "bar", ]})
|
||||
with pytest.raises(ValueError):
|
||||
e.update(set__mapping={"somestrings": ["foo", "bar"]})
|
||||
|
||||
def test_dictfield_with_referencefield_complex_nesting_cases(self):
|
||||
"""Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)"""
|
||||
@ -296,29 +336,33 @@ class TestDictField(MongoDBTestCase):
|
||||
mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False)))
|
||||
mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True))))
|
||||
mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False))))
|
||||
mapping8 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=True)))))
|
||||
mapping9 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=False)))))
|
||||
mapping8 = DictField(
|
||||
ListField(DictField(ListField(ReferenceField(Doc, dbref=True))))
|
||||
)
|
||||
mapping9 = DictField(
|
||||
ListField(DictField(ListField(ReferenceField(Doc, dbref=False))))
|
||||
)
|
||||
|
||||
Doc.drop_collection()
|
||||
Simple.drop_collection()
|
||||
|
||||
d = Doc(s='aa').save()
|
||||
d = Doc(s="aa").save()
|
||||
e = Simple()
|
||||
e.mapping0['someint'] = e.mapping1['someint'] = d
|
||||
e.mapping2['someint'] = e.mapping3['someint'] = [d]
|
||||
e.mapping4['someint'] = e.mapping5['someint'] = {'d': d}
|
||||
e.mapping6['someint'] = e.mapping7['someint'] = [{'d': d}]
|
||||
e.mapping8['someint'] = e.mapping9['someint'] = [{'d': [d]}]
|
||||
e.mapping0["someint"] = e.mapping1["someint"] = d
|
||||
e.mapping2["someint"] = e.mapping3["someint"] = [d]
|
||||
e.mapping4["someint"] = e.mapping5["someint"] = {"d": d}
|
||||
e.mapping6["someint"] = e.mapping7["someint"] = [{"d": d}]
|
||||
e.mapping8["someint"] = e.mapping9["someint"] = [{"d": [d]}]
|
||||
e.save()
|
||||
|
||||
s = Simple.objects.first()
|
||||
self.assertIsInstance(s.mapping0['someint'], Doc)
|
||||
self.assertIsInstance(s.mapping1['someint'], Doc)
|
||||
self.assertIsInstance(s.mapping2['someint'][0], Doc)
|
||||
self.assertIsInstance(s.mapping3['someint'][0], Doc)
|
||||
self.assertIsInstance(s.mapping4['someint']['d'], Doc)
|
||||
self.assertIsInstance(s.mapping5['someint']['d'], Doc)
|
||||
self.assertIsInstance(s.mapping6['someint'][0]['d'], Doc)
|
||||
self.assertIsInstance(s.mapping7['someint'][0]['d'], Doc)
|
||||
self.assertIsInstance(s.mapping8['someint'][0]['d'][0], Doc)
|
||||
self.assertIsInstance(s.mapping9['someint'][0]['d'][0], Doc)
|
||||
assert isinstance(s.mapping0["someint"], Doc)
|
||||
assert isinstance(s.mapping1["someint"], Doc)
|
||||
assert isinstance(s.mapping2["someint"][0], Doc)
|
||||
assert isinstance(s.mapping3["someint"][0], Doc)
|
||||
assert isinstance(s.mapping4["someint"]["d"], Doc)
|
||||
assert isinstance(s.mapping5["someint"]["d"], Doc)
|
||||
assert isinstance(s.mapping6["someint"][0]["d"], Doc)
|
||||
assert isinstance(s.mapping7["someint"][0]["d"], Doc)
|
||||
assert isinstance(s.mapping8["someint"][0]["d"][0], Doc)
|
||||
assert isinstance(s.mapping9["someint"][0]["d"][0], Doc)
|
||||
|
@ -1,9 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
from unittest import SkipTest
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -12,52 +12,52 @@ class TestEmailField(MongoDBTestCase):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
user = User(email='ross@example.com')
|
||||
user = User(email="ross@example.com")
|
||||
user.validate()
|
||||
|
||||
user = User(email='ross@example.co.uk')
|
||||
user = User(email="ross@example.co.uk")
|
||||
user.validate()
|
||||
|
||||
user = User(email=('Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S'
|
||||
'aJIazqqWkm7.net'))
|
||||
user = User(
|
||||
email=("Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5SaJIazqqWkm7.net")
|
||||
)
|
||||
user.validate()
|
||||
|
||||
user = User(email='new-tld@example.technology')
|
||||
user = User(email="new-tld@example.technology")
|
||||
user.validate()
|
||||
|
||||
user = User(email='ross@example.com.')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email="ross@example.com.")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# unicode domain
|
||||
user = User(email=u'user@пример.рф')
|
||||
user = User(email=u"user@пример.рф")
|
||||
user.validate()
|
||||
|
||||
# invalid unicode domain
|
||||
user = User(email=u'user@пример')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email=u"user@пример")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# invalid data type
|
||||
user = User(email=123)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
def test_email_field_unicode_user(self):
|
||||
# Don't run this test on pypy3, which doesn't support unicode regex:
|
||||
# https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
raise SkipTest('unicode email addresses are not supported on PyPy 3')
|
||||
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
# unicode user shouldn't validate by default...
|
||||
user = User(email=u'Dörte@Sörensen.example.com')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email=u"Dörte@Sörensen.example.com")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine with allow_utf8_user set to True
|
||||
class User(Document):
|
||||
email = EmailField(allow_utf8_user=True)
|
||||
|
||||
user = User(email=u'Dörte@Sörensen.example.com')
|
||||
user = User(email=u"Dörte@Sörensen.example.com")
|
||||
user.validate()
|
||||
|
||||
def test_email_field_domain_whitelist(self):
|
||||
@ -65,43 +65,48 @@ class TestEmailField(MongoDBTestCase):
|
||||
email = EmailField()
|
||||
|
||||
# localhost domain shouldn't validate by default...
|
||||
user = User(email='me@localhost')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email="me@localhost")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine if it's whitelisted
|
||||
class User(Document):
|
||||
email = EmailField(domain_whitelist=['localhost'])
|
||||
email = EmailField(domain_whitelist=["localhost"])
|
||||
|
||||
user = User(email='me@localhost')
|
||||
user = User(email="me@localhost")
|
||||
user.validate()
|
||||
|
||||
def test_email_domain_validation_fails_if_invalid_idn(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
invalid_idn = '.google.com'
|
||||
user = User(email='me@%s' % invalid_idn)
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
invalid_idn = ".google.com"
|
||||
user = User(email="me@%s" % invalid_idn)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
user.validate()
|
||||
self.assertIn("domain failed IDN encoding", str(ctx_err.exception))
|
||||
assert "domain failed IDN encoding" in str(exc_info.value)
|
||||
|
||||
def test_email_field_ip_domain(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
valid_ipv4 = 'email@[127.0.0.1]'
|
||||
valid_ipv6 = 'email@[2001:dB8::1]'
|
||||
invalid_ip = 'email@[324.0.0.1]'
|
||||
valid_ipv4 = "email@[127.0.0.1]"
|
||||
valid_ipv6 = "email@[2001:dB8::1]"
|
||||
invalid_ip = "email@[324.0.0.1]"
|
||||
|
||||
# IP address as a domain shouldn't validate by default...
|
||||
user = User(email=valid_ipv4)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
user = User(email=valid_ipv6)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
user = User(email=invalid_ip)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine with allow_ip_domain set to True
|
||||
class User(Document):
|
||||
@ -115,16 +120,18 @@ class TestEmailField(MongoDBTestCase):
|
||||
|
||||
# invalid IP should still fail validation
|
||||
user = User(email=invalid_ip)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
def test_email_field_honors_regex(self):
|
||||
class User(Document):
|
||||
email = EmailField(regex=r'\w+@example.com')
|
||||
email = EmailField(regex=r"\w+@example.com")
|
||||
|
||||
# Fails regex validation
|
||||
user = User(email='me@foo.com')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email="me@foo.com")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# Passes regex validation
|
||||
user = User(email='me@example.com')
|
||||
self.assertIsNone(user.validate())
|
||||
user = User(email="me@example.com")
|
||||
assert user.validate() is None
|
||||
|
@ -1,7 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import Document, StringField, ValidationError, EmbeddedDocument, EmbeddedDocumentField, \
|
||||
InvalidQueryError, LookUpError, IntField, GenericEmbeddedDocumentField, ListField, EmbeddedDocumentListField, \
|
||||
ReferenceField
|
||||
import pytest
|
||||
|
||||
from mongoengine import (
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
GenericEmbeddedDocumentField,
|
||||
IntField,
|
||||
InvalidQueryError,
|
||||
ListField,
|
||||
LookUpError,
|
||||
StringField,
|
||||
ValidationError,
|
||||
)
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
@ -12,37 +23,41 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
name = StringField()
|
||||
|
||||
field = EmbeddedDocumentField(MyDoc)
|
||||
self.assertEqual(field.document_type_obj, MyDoc)
|
||||
assert field.document_type_obj == MyDoc
|
||||
|
||||
field2 = EmbeddedDocumentField('MyDoc')
|
||||
self.assertEqual(field2.document_type_obj, 'MyDoc')
|
||||
field2 = EmbeddedDocumentField("MyDoc")
|
||||
assert field2.document_type_obj == "MyDoc"
|
||||
|
||||
def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self):
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
EmbeddedDocumentField(dict)
|
||||
|
||||
def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
name = StringField()
|
||||
|
||||
emb = EmbeddedDocumentField('MyDoc')
|
||||
with self.assertRaises(ValidationError) as ctx:
|
||||
emb = EmbeddedDocumentField("MyDoc")
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
emb.document_type
|
||||
self.assertIn('Invalid embedded document class provided to an EmbeddedDocumentField', str(ctx.exception))
|
||||
assert (
|
||||
"Invalid embedded document class provided to an EmbeddedDocumentField"
|
||||
in str(exc_info.value)
|
||||
)
|
||||
|
||||
def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self):
|
||||
# Relates to #1661
|
||||
class MyDoc(Document):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
|
||||
class MyFailingDoc(Document):
|
||||
emb = EmbeddedDocumentField(MyDoc)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
|
||||
class MyFailingdoc2(Document):
|
||||
emb = EmbeddedDocumentField('MyDoc')
|
||||
emb = EmbeddedDocumentField("MyDoc")
|
||||
|
||||
def test_query_embedded_document_attribute(self):
|
||||
class AdminSettings(EmbeddedDocument):
|
||||
@ -55,34 +70,31 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(
|
||||
settings=AdminSettings(foo1='bar1', foo2='bar2'),
|
||||
name='John',
|
||||
).save()
|
||||
p = Person(settings=AdminSettings(foo1="bar1", foo2="bar2"), name="John").save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
Person.objects(settings__notexist='bar').first()
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
with self.assertRaises(LookUpError):
|
||||
Person.objects.only('settings.notexist')
|
||||
with pytest.raises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p.id)
|
||||
only_p = Person.objects.only('settings.foo1').first()
|
||||
self.assertEqual(only_p.settings.foo1, p.settings.foo1)
|
||||
self.assertIsNone(only_p.settings.foo2)
|
||||
self.assertIsNone(only_p.name)
|
||||
assert Person.objects(settings__foo1="bar1").first().id == p.id
|
||||
only_p = Person.objects.only("settings.foo1").first()
|
||||
assert only_p.settings.foo1 == p.settings.foo1
|
||||
assert only_p.settings.foo2 is None
|
||||
assert only_p.name is None
|
||||
|
||||
exclude_p = Person.objects.exclude('settings.foo1').first()
|
||||
self.assertIsNone(exclude_p.settings.foo1)
|
||||
self.assertEqual(exclude_p.settings.foo2, p.settings.foo2)
|
||||
self.assertEqual(exclude_p.name, p.name)
|
||||
exclude_p = Person.objects.exclude("settings.foo1").first()
|
||||
assert exclude_p.settings.foo1 is None
|
||||
assert exclude_p.settings.foo2 == p.settings.foo2
|
||||
assert exclude_p.name == p.name
|
||||
|
||||
def test_query_embedded_document_attribute_with_inheritance(self):
|
||||
class BaseSettings(EmbeddedDocument):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
base_foo = StringField()
|
||||
|
||||
class AdminSettings(BaseSettings):
|
||||
@ -93,26 +105,26 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo'))
|
||||
p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo"))
|
||||
p.save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id)
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id)
|
||||
self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id)
|
||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||
assert Person.objects(settings__sub_foo="subfoo").first().id == p.id
|
||||
|
||||
only_p = Person.objects.only('settings.base_foo', 'settings._cls').first()
|
||||
self.assertEqual(only_p.settings.base_foo, 'basefoo')
|
||||
self.assertIsNone(only_p.settings.sub_foo)
|
||||
only_p = Person.objects.only("settings.base_foo", "settings._cls").first()
|
||||
assert only_p.settings.base_foo == "basefoo"
|
||||
assert only_p.settings.sub_foo is None
|
||||
|
||||
def test_query_list_embedded_document_with_inheritance(self):
|
||||
class Post(EmbeddedDocument):
|
||||
title = StringField(max_length=120, required=True)
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class TextPost(Post):
|
||||
content = StringField()
|
||||
@ -123,22 +135,21 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
class Record(Document):
|
||||
posts = ListField(EmbeddedDocumentField(Post))
|
||||
|
||||
record_movie = Record(posts=[MoviePost(author='John', title='foo')]).save()
|
||||
record_text = Record(posts=[TextPost(content='a', title='foo')]).save()
|
||||
record_movie = Record(posts=[MoviePost(author="John", title="foo")]).save()
|
||||
record_text = Record(posts=[TextPost(content="a", title="foo")]).save()
|
||||
|
||||
records = list(Record.objects(posts__author=record_movie.posts[0].author))
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(records[0].id, record_movie.id)
|
||||
assert len(records) == 1
|
||||
assert records[0].id == record_movie.id
|
||||
|
||||
records = list(Record.objects(posts__content=record_text.posts[0].content))
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(records[0].id, record_text.id)
|
||||
assert len(records) == 1
|
||||
assert records[0].id == record_text.id
|
||||
|
||||
self.assertEqual(Record.objects(posts__title='foo').count(), 2)
|
||||
assert Record.objects(posts__title="foo").count() == 2
|
||||
|
||||
|
||||
class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
def test_generic_embedded_document(self):
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
@ -153,21 +164,22 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name='Test User')
|
||||
person.like = Car(name='Fiat')
|
||||
person = Person(name="Test User")
|
||||
person.like = Car(name="Fiat")
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Car)
|
||||
assert isinstance(person.like, Car)
|
||||
|
||||
person.like = Dish(food="arroz", number=15)
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Dish)
|
||||
assert isinstance(person.like, Dish)
|
||||
|
||||
def test_generic_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices."""
|
||||
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
@ -181,20 +193,22 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name='Test User')
|
||||
person.like = Car(name='Fiat')
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person = Person(name="Test User")
|
||||
person.like = Car(name="Fiat")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.like = Dish(food="arroz", number=15)
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Dish)
|
||||
assert isinstance(person.like, Dish)
|
||||
|
||||
def test_generic_list_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices inside
|
||||
a list field.
|
||||
"""
|
||||
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
@ -208,39 +222,38 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name='Test User')
|
||||
person.likes = [Car(name='Fiat')]
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person = Person(name="Test User")
|
||||
person.likes = [Car(name="Fiat")]
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.likes = [Dish(food="arroz", number=15)]
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.likes[0], Dish)
|
||||
assert isinstance(person.likes[0], Dish)
|
||||
|
||||
def test_choices_validation_documents(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given a valid choice.
|
||||
"""
|
||||
|
||||
class UserComments(EmbeddedDocument):
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(
|
||||
GenericEmbeddedDocumentField(choices=(UserComments,))
|
||||
)
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,)))
|
||||
|
||||
# Ensure Validation Passes
|
||||
BlogPost(comments=[
|
||||
UserComments(author='user2', message='message2'),
|
||||
]).save()
|
||||
BlogPost(comments=[UserComments(author="user2", message="message2")]).save()
|
||||
|
||||
def test_choices_validation_documents_invalid(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given an invalid choice.
|
||||
This should throw a ValidationError exception.
|
||||
"""
|
||||
|
||||
class UserComments(EmbeddedDocument):
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
@ -250,31 +263,30 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
message = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(
|
||||
GenericEmbeddedDocumentField(choices=(UserComments,))
|
||||
)
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,)))
|
||||
|
||||
# Single Entry Failure
|
||||
post = BlogPost(comments=[
|
||||
ModeratorComments(author='mod1', message='message1'),
|
||||
])
|
||||
self.assertRaises(ValidationError, post.save)
|
||||
post = BlogPost(comments=[ModeratorComments(author="mod1", message="message1")])
|
||||
with pytest.raises(ValidationError):
|
||||
post.save()
|
||||
|
||||
# Mixed Entry Failure
|
||||
post = BlogPost(comments=[
|
||||
ModeratorComments(author='mod1', message='message1'),
|
||||
UserComments(author='user2', message='message2'),
|
||||
])
|
||||
self.assertRaises(ValidationError, post.save)
|
||||
post = BlogPost(
|
||||
comments=[
|
||||
ModeratorComments(author="mod1", message="message1"),
|
||||
UserComments(author="user2", message="message2"),
|
||||
]
|
||||
)
|
||||
with pytest.raises(ValidationError):
|
||||
post.save()
|
||||
|
||||
def test_choices_validation_documents_inheritance(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given subclass of choice.
|
||||
"""
|
||||
|
||||
class Comments(EmbeddedDocument):
|
||||
meta = {
|
||||
'abstract': True
|
||||
}
|
||||
meta = {"abstract": True}
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
@ -282,14 +294,10 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
pass
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(
|
||||
GenericEmbeddedDocumentField(choices=(Comments,))
|
||||
)
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(Comments,)))
|
||||
|
||||
# Save Valid EmbeddedDocument Type
|
||||
BlogPost(comments=[
|
||||
UserComments(author='user2', message='message2'),
|
||||
]).save()
|
||||
BlogPost(comments=[UserComments(author="user2", message="message2")]).save()
|
||||
|
||||
def test_query_generic_embedded_document_attribute(self):
|
||||
class AdminSettings(EmbeddedDocument):
|
||||
@ -299,28 +307,30 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
foo2 = StringField()
|
||||
|
||||
class Person(Document):
|
||||
settings = GenericEmbeddedDocumentField(choices=(AdminSettings, NonAdminSettings))
|
||||
settings = GenericEmbeddedDocumentField(
|
||||
choices=(AdminSettings, NonAdminSettings)
|
||||
)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(settings=AdminSettings(foo1='bar1')).save()
|
||||
p2 = Person(settings=NonAdminSettings(foo2='bar2')).save()
|
||||
p1 = Person(settings=AdminSettings(foo1="bar1")).save()
|
||||
p2 = Person(settings=NonAdminSettings(foo2="bar2")).save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
Person.objects(settings__notexist='bar').first()
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
with self.assertRaises(LookUpError):
|
||||
Person.objects.only('settings.notexist')
|
||||
with pytest.raises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p1.id)
|
||||
self.assertEqual(Person.objects(settings__foo2='bar2').first().id, p2.id)
|
||||
assert Person.objects(settings__foo1="bar1").first().id == p1.id
|
||||
assert Person.objects(settings__foo2="bar2").first().id == p2.id
|
||||
|
||||
def test_query_generic_embedded_document_attribute_with_inheritance(self):
|
||||
class BaseSettings(EmbeddedDocument):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
base_foo = StringField()
|
||||
|
||||
class AdminSettings(BaseSettings):
|
||||
@ -331,14 +341,14 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo'))
|
||||
p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo"))
|
||||
p.save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id)
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id)
|
||||
self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id)
|
||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||
assert Person.objects(settings__sub_foo="subfoo").first().id == p.id
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,49 +1,51 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import copy
|
||||
import os
|
||||
import unittest
|
||||
import tempfile
|
||||
import unittest
|
||||
from io import BytesIO
|
||||
|
||||
import gridfs
|
||||
import six
|
||||
import pytest
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.python_support import StringIO
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
|
||||
HAS_PIL = True
|
||||
except ImportError:
|
||||
HAS_PIL = False
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
||||
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png')
|
||||
require_pil = pytest.mark.skipif(not HAS_PIL, reason="PIL not installed")
|
||||
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "mongoengine.png")
|
||||
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png")
|
||||
|
||||
|
||||
def get_file(path):
|
||||
"""Use a BytesIO instead of a file to allow
|
||||
to have a one-liner and avoid that the file remains opened"""
|
||||
bytes_io = StringIO()
|
||||
with open(path, 'rb') as f:
|
||||
bytes_io = BytesIO()
|
||||
with open(path, "rb") as f:
|
||||
bytes_io.write(f.read())
|
||||
bytes_io.seek(0)
|
||||
return bytes_io
|
||||
|
||||
|
||||
class FileTest(MongoDBTestCase):
|
||||
|
||||
class TestFileField(MongoDBTestCase):
|
||||
def tearDown(self):
|
||||
self.db.drop_collection('fs.files')
|
||||
self.db.drop_collection('fs.chunks')
|
||||
self.db.drop_collection("fs.files")
|
||||
self.db.drop_collection("fs.chunks")
|
||||
|
||||
def test_file_field_optional(self):
|
||||
# Make sure FileField is optional and not required
|
||||
class DemoFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
DemoFile.objects.create()
|
||||
|
||||
def test_file_fields(self):
|
||||
@ -55,18 +57,21 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
putfile = PutFile()
|
||||
putfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello (%s)>" % result.the_file.grid_id)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert putfile == result
|
||||
assert (
|
||||
"%s" % result.the_file
|
||||
== "<GridFSProxy: hello (%s)>" % result.the_file.grid_id
|
||||
)
|
||||
assert result.the_file.read() == text
|
||||
assert result.the_file.content_type == content_type
|
||||
result.the_file.delete() # Remove file from GridFS
|
||||
PutFile.objects.delete()
|
||||
|
||||
@ -74,29 +79,30 @@ class FileTest(MongoDBTestCase):
|
||||
PutFile.drop_collection()
|
||||
|
||||
putfile = PutFile()
|
||||
putstring = StringIO()
|
||||
putstring = BytesIO()
|
||||
putstring.write(text)
|
||||
putstring.seek(0)
|
||||
putfile.the_file.put(putstring, content_type=content_type)
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert putfile == result
|
||||
assert result.the_file.read() == text
|
||||
assert result.the_file.content_type == content_type
|
||||
result.the_file.delete()
|
||||
|
||||
def test_file_fields_stream(self):
|
||||
"""Ensure that file fields can be written to and their data retrieved
|
||||
"""
|
||||
|
||||
class StreamFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
content_type = 'text/plain'
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
more_text = "Foo Bar".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.the_file.new_file(content_type=content_type)
|
||||
@ -106,32 +112,32 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert streamfile == result
|
||||
assert result.the_file.read() == text + more_text
|
||||
assert result.the_file.content_type == content_type
|
||||
result.the_file.seek(0)
|
||||
self.assertEqual(result.the_file.tell(), 0)
|
||||
self.assertEqual(result.the_file.read(len(text)), text)
|
||||
self.assertEqual(result.the_file.tell(), len(text))
|
||||
self.assertEqual(result.the_file.read(len(more_text)), more_text)
|
||||
self.assertEqual(result.the_file.tell(), len(text + more_text))
|
||||
assert result.the_file.tell() == 0
|
||||
assert result.the_file.read(len(text)) == text
|
||||
assert result.the_file.tell() == len(text)
|
||||
assert result.the_file.read(len(more_text)) == more_text
|
||||
assert result.the_file.tell() == len(text + more_text)
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.the_file.read() is None)
|
||||
assert result.the_file.read() is None
|
||||
|
||||
def test_file_fields_stream_after_none(self):
|
||||
"""Ensure that a file field can be written to after it has been saved as
|
||||
None
|
||||
"""
|
||||
|
||||
class StreamFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
content_type = 'text/plain'
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
more_text = "Foo Bar".encode("latin-1")
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.save()
|
||||
@ -142,27 +148,26 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
# self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert streamfile == result
|
||||
assert result.the_file.read() == text + more_text
|
||||
# assert result.the_file.content_type == content_type
|
||||
result.the_file.seek(0)
|
||||
self.assertEqual(result.the_file.tell(), 0)
|
||||
self.assertEqual(result.the_file.read(len(text)), text)
|
||||
self.assertEqual(result.the_file.tell(), len(text))
|
||||
self.assertEqual(result.the_file.read(len(more_text)), more_text)
|
||||
self.assertEqual(result.the_file.tell(), len(text + more_text))
|
||||
assert result.the_file.tell() == 0
|
||||
assert result.the_file.read(len(text)) == text
|
||||
assert result.the_file.tell() == len(text)
|
||||
assert result.the_file.read(len(more_text)) == more_text
|
||||
assert result.the_file.tell() == len(text + more_text)
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.the_file.read() is None)
|
||||
assert result.the_file.read() is None
|
||||
|
||||
def test_file_fields_set(self):
|
||||
|
||||
class SetFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
more_text = "Foo Bar".encode("latin-1")
|
||||
|
||||
SetFile.drop_collection()
|
||||
|
||||
@ -171,27 +176,26 @@ class FileTest(MongoDBTestCase):
|
||||
setfile.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
assert setfile == result
|
||||
assert result.the_file.read() == text
|
||||
|
||||
# Try replacing file with new one
|
||||
result.the_file.replace(more_text)
|
||||
result.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), more_text)
|
||||
assert setfile == result
|
||||
assert result.the_file.read() == more_text
|
||||
result.the_file.delete()
|
||||
|
||||
def test_file_field_no_default(self):
|
||||
|
||||
class GridDocument(Document):
|
||||
the_file = FileField()
|
||||
|
||||
GridDocument.drop_collection()
|
||||
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write(six.b("Hello World!"))
|
||||
f.write("Hello World!".encode("latin-1"))
|
||||
f.flush()
|
||||
|
||||
# Test without default
|
||||
@ -199,34 +203,35 @@ class FileTest(MongoDBTestCase):
|
||||
doc_a.save()
|
||||
|
||||
doc_b = GridDocument.objects.with_id(doc_a.id)
|
||||
doc_b.the_file.replace(f, filename='doc_b')
|
||||
doc_b.the_file.replace(f, filename="doc_b")
|
||||
doc_b.save()
|
||||
self.assertNotEqual(doc_b.the_file.grid_id, None)
|
||||
assert doc_b.the_file.grid_id is not None
|
||||
|
||||
# Test it matches
|
||||
doc_c = GridDocument.objects.with_id(doc_b.id)
|
||||
self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
|
||||
assert doc_b.the_file.grid_id == doc_c.the_file.grid_id
|
||||
|
||||
# Test with default
|
||||
doc_d = GridDocument(the_file=six.b(''))
|
||||
doc_d = GridDocument(the_file="".encode("latin-1"))
|
||||
doc_d.save()
|
||||
|
||||
doc_e = GridDocument.objects.with_id(doc_d.id)
|
||||
self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id)
|
||||
assert doc_d.the_file.grid_id == doc_e.the_file.grid_id
|
||||
|
||||
doc_e.the_file.replace(f, filename='doc_e')
|
||||
doc_e.the_file.replace(f, filename="doc_e")
|
||||
doc_e.save()
|
||||
|
||||
doc_f = GridDocument.objects.with_id(doc_e.id)
|
||||
self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id)
|
||||
assert doc_e.the_file.grid_id == doc_f.the_file.grid_id
|
||||
|
||||
db = GridDocument._get_db()
|
||||
grid_fs = gridfs.GridFS(db)
|
||||
self.assertEqual(['doc_b', 'doc_e'], grid_fs.list())
|
||||
assert ["doc_b", "doc_e"] == grid_fs.list()
|
||||
|
||||
def test_file_uniqueness(self):
|
||||
"""Ensure that each instance of a FileField is unique
|
||||
"""
|
||||
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
the_file = FileField()
|
||||
@ -234,15 +239,15 @@ class FileTest(MongoDBTestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(six.b('Hello, World!'))
|
||||
test_file.the_file.put("Hello, World!".encode("latin-1"))
|
||||
test_file.save()
|
||||
|
||||
# Second instance
|
||||
test_file_dupe = TestFile()
|
||||
data = test_file_dupe.the_file.read() # Should be None
|
||||
|
||||
self.assertNotEqual(test_file.name, test_file_dupe.name)
|
||||
self.assertNotEqual(test_file.the_file.read(), data)
|
||||
assert test_file.name != test_file_dupe.name
|
||||
assert test_file.the_file.read() != data
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
@ -255,61 +260,68 @@ class FileTest(MongoDBTestCase):
|
||||
photo = FileField()
|
||||
|
||||
Animal.drop_collection()
|
||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||
marmot = Animal(genus="Marmota", family="Sciuridae")
|
||||
|
||||
marmot_photo_content = get_file(TEST_IMAGE_PATH) # Retrieve a photo from disk
|
||||
marmot.photo.put(marmot_photo_content, content_type='image/jpeg', foo='bar')
|
||||
marmot.photo.put(marmot_photo_content, content_type="image/jpeg", foo="bar")
|
||||
marmot.photo.close()
|
||||
marmot.save()
|
||||
|
||||
marmot = Animal.objects.get()
|
||||
self.assertEqual(marmot.photo.content_type, 'image/jpeg')
|
||||
self.assertEqual(marmot.photo.foo, 'bar')
|
||||
assert marmot.photo.content_type == "image/jpeg"
|
||||
assert marmot.photo.foo == "bar"
|
||||
|
||||
def test_file_reassigning(self):
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save()
|
||||
self.assertEqual(test_file.the_file.get().length, 8313)
|
||||
assert test_file.the_file.get().length == 8313
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = get_file(TEST_IMAGE2_PATH)
|
||||
test_file.save()
|
||||
self.assertEqual(test_file.the_file.get().length, 4971)
|
||||
assert test_file.the_file.get().length == 4971
|
||||
|
||||
def test_file_boolean(self):
|
||||
"""Ensure that a boolean test of a FileField indicates its presence
|
||||
"""
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertFalse(bool(test_file.the_file))
|
||||
test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain')
|
||||
assert not bool(test_file.the_file)
|
||||
test_file.the_file.put(
|
||||
"Hello, World!".encode("latin-1"), content_type="text/plain"
|
||||
)
|
||||
test_file.save()
|
||||
self.assertTrue(bool(test_file.the_file))
|
||||
assert bool(test_file.the_file)
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.content_type, "text/plain")
|
||||
assert test_file.the_file.content_type == "text/plain"
|
||||
|
||||
def test_file_cmp(self):
|
||||
"""Test comparing against other types"""
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertNotIn(test_file.the_file, [{"test": 1}])
|
||||
assert test_file.the_file not in [{"test": 1}]
|
||||
|
||||
def test_file_disk_space(self):
|
||||
""" Test disk space usage when we delete/replace a file """
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
testfile = TestFile()
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
@ -320,16 +332,16 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 1)
|
||||
self.assertEqual(len(list(chunks)), 1)
|
||||
assert len(list(files)) == 1
|
||||
assert len(list(chunks)) == 1
|
||||
|
||||
# Deleting the docoument should delete the files
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
# Test case where we don't store a file in the first place
|
||||
testfile = TestFile()
|
||||
@ -337,48 +349,46 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
# Test case where we overwrite the file
|
||||
testfile = TestFile()
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
text = six.b('Bonjour, World!')
|
||||
text = "Bonjour, World!".encode("latin-1")
|
||||
testfile.the_file.replace(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 1)
|
||||
self.assertEqual(len(list(chunks)), 1)
|
||||
assert len(list(files)) == 1
|
||||
assert len(list(chunks)) == 1
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
@require_pil
|
||||
def test_image_field(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField()
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write(six.b("Hello World!"))
|
||||
f.write("Hello World!".encode("latin-1"))
|
||||
f.flush()
|
||||
|
||||
t = TestImage()
|
||||
@ -386,7 +396,7 @@ class FileTest(MongoDBTestCase):
|
||||
t.image.put(f)
|
||||
self.fail("Should have raised an invalidation error")
|
||||
except ValidationError as e:
|
||||
self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f)
|
||||
assert "%s" % e == "Invalid image: cannot identify image file %s" % f
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
@ -394,34 +404,31 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, 'PNG')
|
||||
assert t.image.format == "PNG"
|
||||
|
||||
w, h = t.image.size
|
||||
self.assertEqual(w, 371)
|
||||
self.assertEqual(h, 76)
|
||||
assert w == 371
|
||||
assert h == 76
|
||||
|
||||
t.image.delete()
|
||||
|
||||
@require_pil
|
||||
def test_image_field_reassigning(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = ImageField()
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save()
|
||||
self.assertEqual(test_file.the_file.size, (371, 76))
|
||||
assert test_file.the_file.size == (371, 76)
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = get_file(TEST_IMAGE2_PATH)
|
||||
test_file.save()
|
||||
self.assertEqual(test_file.the_file.size, (45, 101))
|
||||
assert test_file.the_file.size == (45, 101)
|
||||
|
||||
@require_pil
|
||||
def test_image_field_resize(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(size=(185, 37))
|
||||
|
||||
@ -433,18 +440,16 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, 'PNG')
|
||||
assert t.image.format == "PNG"
|
||||
w, h = t.image.size
|
||||
|
||||
self.assertEqual(w, 185)
|
||||
self.assertEqual(h, 37)
|
||||
assert w == 185
|
||||
assert h == 37
|
||||
|
||||
t.image.delete()
|
||||
|
||||
@require_pil
|
||||
def test_image_field_resize_force(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(size=(185, 37, True))
|
||||
|
||||
@ -456,18 +461,16 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, 'PNG')
|
||||
assert t.image.format == "PNG"
|
||||
w, h = t.image.size
|
||||
|
||||
self.assertEqual(w, 185)
|
||||
self.assertEqual(h, 37)
|
||||
assert w == 185
|
||||
assert h == 37
|
||||
|
||||
t.image.delete()
|
||||
|
||||
@require_pil
|
||||
def test_image_field_thumbnail(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(thumbnail_size=(92, 18))
|
||||
|
||||
@ -479,19 +482,18 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.thumbnail.format, 'PNG')
|
||||
self.assertEqual(t.image.thumbnail.width, 92)
|
||||
self.assertEqual(t.image.thumbnail.height, 18)
|
||||
assert t.image.thumbnail.format == "PNG"
|
||||
assert t.image.thumbnail.width == 92
|
||||
assert t.image.thumbnail.height == 18
|
||||
|
||||
t.image.delete()
|
||||
|
||||
def test_file_multidb(self):
|
||||
register_connection('test_files', 'test_files')
|
||||
register_connection("test_files", "test_files")
|
||||
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
the_file = FileField(db_alias="test_files",
|
||||
collection_name="macumba")
|
||||
the_file = FileField(db_alias="test_files", collection_name="macumba")
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
@ -502,23 +504,21 @@ class FileTest(MongoDBTestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(six.b('Hello, World!'),
|
||||
name="hello.txt")
|
||||
test_file.the_file.put("Hello, World!".encode("latin-1"), name="hello.txt")
|
||||
test_file.save()
|
||||
|
||||
data = get_db("test_files").macumba.files.find_one()
|
||||
self.assertEqual(data.get('name'), 'hello.txt')
|
||||
assert data.get("name") == "hello.txt"
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.read(), six.b('Hello, World!'))
|
||||
assert test_file.the_file.read() == "Hello, World!".encode("latin-1")
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = six.b('HELLO, WORLD!')
|
||||
test_file.the_file = "Hello, World!".encode("latin-1")
|
||||
test_file.save()
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.read(),
|
||||
six.b('HELLO, WORLD!'))
|
||||
assert test_file.the_file.read() == "Hello, World!".encode("latin-1")
|
||||
|
||||
def test_copyable(self):
|
||||
class PutFile(Document):
|
||||
@ -526,8 +526,8 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
putfile = PutFile()
|
||||
putfile.the_file.put(text, content_type=content_type)
|
||||
@ -536,14 +536,11 @@ class FileTest(MongoDBTestCase):
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
|
||||
self.assertEqual(putfile, copy.copy(putfile))
|
||||
self.assertEqual(putfile, copy.deepcopy(putfile))
|
||||
assert putfile == copy.copy(putfile)
|
||||
assert putfile == copy.deepcopy(putfile)
|
||||
|
||||
@require_pil
|
||||
def test_get_image_by_grid_id(self):
|
||||
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestImage(Document):
|
||||
|
||||
image1 = ImageField()
|
||||
@ -559,8 +556,7 @@ class FileTest(MongoDBTestCase):
|
||||
test = TestImage.objects.first()
|
||||
grid_id = test.image1.grid_id
|
||||
|
||||
self.assertEqual(1, TestImage.objects(Q(image1=grid_id)
|
||||
or Q(image2=grid_id)).count())
|
||||
assert 1 == TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count()
|
||||
|
||||
def test_complex_field_filefield(self):
|
||||
"""Ensure you can add meta data to file"""
|
||||
@ -571,21 +567,21 @@ class FileTest(MongoDBTestCase):
|
||||
photos = ListField(FileField())
|
||||
|
||||
Animal.drop_collection()
|
||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||
marmot = Animal(genus="Marmota", family="Sciuridae")
|
||||
|
||||
with open(TEST_IMAGE_PATH, 'rb') as marmot_photo: # Retrieve a photo from disk
|
||||
photos_field = marmot._fields['photos'].field
|
||||
new_proxy = photos_field.get_proxy_obj('photos', marmot)
|
||||
new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar')
|
||||
with open(TEST_IMAGE_PATH, "rb") as marmot_photo: # Retrieve a photo from disk
|
||||
photos_field = marmot._fields["photos"].field
|
||||
new_proxy = photos_field.get_proxy_obj("photos", marmot)
|
||||
new_proxy.put(marmot_photo, content_type="image/jpeg", foo="bar")
|
||||
|
||||
marmot.photos.append(new_proxy)
|
||||
marmot.save()
|
||||
|
||||
marmot = Animal.objects.get()
|
||||
self.assertEqual(marmot.photos[0].content_type, 'image/jpeg')
|
||||
self.assertEqual(marmot.photos[0].foo, 'bar')
|
||||
self.assertEqual(marmot.photos[0].get().length, 8313)
|
||||
assert marmot.photos[0].content_type == "image/jpeg"
|
||||
assert marmot.photos[0].foo == "bar"
|
||||
assert marmot.photos[0].get().length == 8313
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import six
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
@ -7,7 +7,6 @@ from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestFloatField(MongoDBTestCase):
|
||||
|
||||
def test_float_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
float_fld = FloatField()
|
||||
@ -17,12 +16,13 @@ class TestFloatField(MongoDBTestCase):
|
||||
TestDocument(float_fld=None).save()
|
||||
TestDocument(float_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=1).count())
|
||||
assert 1 == TestDocument.objects(float_fld__ne=None).count()
|
||||
assert 1 == TestDocument.objects(float_fld__ne=1).count()
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to float fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
height = FloatField(min_value=0.1, max_value=3.5)
|
||||
|
||||
@ -33,26 +33,30 @@ class TestFloatField(MongoDBTestCase):
|
||||
person.height = 1.89
|
||||
person.validate()
|
||||
|
||||
person.height = '2.0'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = "2.0"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.height = 0.01
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.height = 4.0
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person_2 = Person(height='something invalid')
|
||||
self.assertRaises(ValidationError, person_2.validate)
|
||||
person_2 = Person(height="something invalid")
|
||||
with pytest.raises(ValidationError):
|
||||
person_2.validate()
|
||||
|
||||
big_person = BigPerson()
|
||||
|
||||
for value, value_type in enumerate(six.integer_types):
|
||||
big_person.height = value_type(value)
|
||||
big_person.validate()
|
||||
big_person.height = int(0)
|
||||
big_person.validate()
|
||||
|
||||
big_person.height = 2 ** 500
|
||||
big_person.validate()
|
||||
|
||||
big_person.height = 2 ** 100000 # Too big for a float value
|
||||
self.assertRaises(ValidationError, big_person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
big_person.validate()
|
||||
|
@ -2,30 +2,23 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("GeoFieldTest", )
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class GeoFieldTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
class TestGeoField(MongoDBTestCase):
|
||||
def _test_for_expected_error(self, Cls, loc, expected):
|
||||
try:
|
||||
Cls(loc=loc).validate()
|
||||
self.fail('Should not validate the location {0}'.format(loc))
|
||||
self.fail("Should not validate the location {0}".format(loc))
|
||||
except ValidationError as e:
|
||||
self.assertEqual(expected, e.to_dict()['loc'])
|
||||
assert expected == e.to_dict()["loc"]
|
||||
|
||||
def test_geopoint_validation(self):
|
||||
class Location(Document):
|
||||
loc = GeoPointField()
|
||||
|
||||
invalid_coords = [{"x": 1, "y": 2}, 5, "a"]
|
||||
expected = 'GeoPointField can only accept tuples or lists of (x, y)'
|
||||
expected = "GeoPointField can only accept tuples or lists of (x, y)"
|
||||
|
||||
for coord in invalid_coords:
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
@ -40,7 +33,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
expected = "Both values (%s) in point must be float or int" % repr(coord)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
invalid_coords = [21, 4, 'a']
|
||||
invalid_coords = [21, 4, "a"]
|
||||
for coord in invalid_coords:
|
||||
expected = "GeoPointField can only accept tuples or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
@ -50,7 +43,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
loc = PointField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = (
|
||||
"PointField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": []}
|
||||
@ -77,19 +72,16 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[1, 2]).validate()
|
||||
Location(loc={
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
81.4471435546875,
|
||||
23.61432859499169
|
||||
]}).validate()
|
||||
Location(
|
||||
loc={"type": "Point", "coordinates": [81.4471435546875, 23.61432859499169]}
|
||||
).validate()
|
||||
|
||||
def test_linestring_validation(self):
|
||||
class Location(Document):
|
||||
loc = LineStringField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@ -97,7 +89,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]}
|
||||
expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
expected = (
|
||||
"Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [5, "a"]
|
||||
@ -105,16 +99,25 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[1]]
|
||||
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
|
||||
expected = (
|
||||
"Invalid LineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[1, 2, 3]]
|
||||
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
|
||||
expected = (
|
||||
"Invalid LineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[{}, {}]], [("a", "b")]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
|
||||
expected = (
|
||||
"Invalid LineString:\nBoth values (%s) in point must be float or int"
|
||||
% repr(coord[0])
|
||||
)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate()
|
||||
@ -124,7 +127,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
loc = PolygonField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = (
|
||||
"PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@ -136,7 +141,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[5, "a"]]]
|
||||
expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int"
|
||||
expected = (
|
||||
"Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[]]]
|
||||
@ -162,7 +169,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
loc = MultiPointField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@ -188,19 +195,19 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2]]).validate()
|
||||
Location(loc={
|
||||
"type": "MultiPoint",
|
||||
"coordinates": [
|
||||
[1, 2],
|
||||
[81.4471435546875, 23.61432859499169]
|
||||
]}).validate()
|
||||
Location(
|
||||
loc={
|
||||
"type": "MultiPoint",
|
||||
"coordinates": [[1, 2], [81.4471435546875, 23.61432859499169]],
|
||||
}
|
||||
).validate()
|
||||
|
||||
def test_multilinestring_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiLineStringField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@ -216,16 +223,25 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
expected = (
|
||||
"Invalid MultiLineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0][0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1, 2, 3]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
expected = (
|
||||
"Invalid MultiLineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0][0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
|
||||
expected = (
|
||||
"Invalid MultiLineString:\nBoth values (%s) in point must be float or int"
|
||||
% repr(coord[0][0])
|
||||
)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
|
||||
@ -235,7 +251,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
loc = MultiPolygonField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@ -243,7 +259,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]}
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
expected = (
|
||||
"Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[5, "a"]]]]
|
||||
@ -255,7 +273,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2, 3]]]]
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
expected = (
|
||||
"Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
@ -263,7 +283,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2], [3, 4]]]]
|
||||
expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point"
|
||||
expected = (
|
||||
"Invalid MultiPolygon:\nLineStrings must start and end at the same point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate()
|
||||
@ -271,17 +293,19 @@ class GeoFieldTest(unittest.TestCase):
|
||||
def test_indexes_geopoint(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}])
|
||||
assert geo_indicies == [{"fields": [("location", "2d")]}]
|
||||
|
||||
def test_geopoint_embedded_indexes(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields on
|
||||
embedded documents.
|
||||
"""
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
location = GeoPointField()
|
||||
name = StringField()
|
||||
@ -291,11 +315,12 @@ class GeoFieldTest(unittest.TestCase):
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}])
|
||||
assert geo_indicies == [{"fields": [("venue.location", "2d")]}]
|
||||
|
||||
def test_indexes_2dsphere(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
point = PointField()
|
||||
@ -303,13 +328,14 @@ class GeoFieldTest(unittest.TestCase):
|
||||
polygon = PolygonField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertIn({'fields': [('line', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('polygon', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('point', '2dsphere')]}, geo_indicies)
|
||||
assert {"fields": [("line", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("polygon", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("point", "2dsphere")]} in geo_indicies
|
||||
|
||||
def test_indexes_2dsphere_embedded(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
name = StringField()
|
||||
point = PointField()
|
||||
@ -321,12 +347,11 @@ class GeoFieldTest(unittest.TestCase):
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertIn({'fields': [('venue.line', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('venue.polygon', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('venue.point', '2dsphere')]}, geo_indicies)
|
||||
assert {"fields": [("venue.line", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("venue.polygon", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("venue.point", "2dsphere")]} in geo_indicies
|
||||
|
||||
def test_geo_indexes_recursion(self):
|
||||
|
||||
class Location(Document):
|
||||
name = StringField()
|
||||
location = GeoPointField()
|
||||
@ -338,14 +363,14 @@ class GeoFieldTest(unittest.TestCase):
|
||||
Location.drop_collection()
|
||||
Parent.drop_collection()
|
||||
|
||||
Parent(name='Berlin').save()
|
||||
Parent(name="Berlin").save()
|
||||
info = Parent._get_collection().index_information()
|
||||
self.assertNotIn('location_2d', info)
|
||||
assert "location_2d" not in info
|
||||
info = Location._get_collection().index_information()
|
||||
self.assertIn('location_2d', info)
|
||||
assert "location_2d" in info
|
||||
|
||||
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||
self.assertEqual(len(Location._geo_indices()), 1)
|
||||
assert len(Parent._geo_indices()) == 0
|
||||
assert len(Location._geo_indices()) == 1
|
||||
|
||||
def test_geo_indexes_auto_index(self):
|
||||
|
||||
@ -354,18 +379,18 @@ class GeoFieldTest(unittest.TestCase):
|
||||
location = PointField(auto_index=False)
|
||||
datetime = DateTimeField()
|
||||
|
||||
meta = {
|
||||
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
|
||||
}
|
||||
meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]}
|
||||
|
||||
self.assertEqual([], Log._geo_indices())
|
||||
assert Log._geo_indices() == []
|
||||
|
||||
Log.drop_collection()
|
||||
Log.ensure_indexes()
|
||||
|
||||
info = Log._get_collection().index_information()
|
||||
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
|
||||
[('location', '2dsphere'), ('datetime', 1)])
|
||||
assert info["location_2dsphere_datetime_1"]["key"] == [
|
||||
("location", "2dsphere"),
|
||||
("datetime", 1),
|
||||
]
|
||||
|
||||
# Test listing explicitly
|
||||
class Log(Document):
|
||||
@ -373,20 +398,20 @@ class GeoFieldTest(unittest.TestCase):
|
||||
datetime = DateTimeField()
|
||||
|
||||
meta = {
|
||||
'indexes': [
|
||||
{'fields': [("location", "2dsphere"), ("datetime", 1)]}
|
||||
]
|
||||
"indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}]
|
||||
}
|
||||
|
||||
self.assertEqual([], Log._geo_indices())
|
||||
assert Log._geo_indices() == []
|
||||
|
||||
Log.drop_collection()
|
||||
Log.ensure_indexes()
|
||||
|
||||
info = Log._get_collection().index_information()
|
||||
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
|
||||
[('location', '2dsphere'), ('datetime', 1)])
|
||||
assert info["location_2dsphere_datetime_1"]["key"] == [
|
||||
("location", "2dsphere"),
|
||||
("datetime", 1),
|
||||
]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -1,14 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestIntField(MongoDBTestCase):
|
||||
|
||||
def test_int_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to int fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
age = IntField(min_value=0, max_value=110)
|
||||
|
||||
@ -23,11 +25,14 @@ class TestIntField(MongoDBTestCase):
|
||||
person.validate()
|
||||
|
||||
person.age = -1
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.age = 120
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.age = 'ten'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.age = "ten"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
def test_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
@ -38,5 +43,5 @@ class TestIntField(MongoDBTestCase):
|
||||
TestDocument(int_fld=None).save()
|
||||
TestDocument(int_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=1).count())
|
||||
assert 1 == TestDocument.objects(int_fld__ne=None).count()
|
||||
assert 1 == TestDocument.objects(int_fld__ne=1).count()
|
||||
|
@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson import DBRef, ObjectId
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import LazyReference
|
||||
@ -11,7 +12,8 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
def test_lazy_reference_config(self):
|
||||
# Make sure ReferenceField only accepts a document class or a string
|
||||
# with a document class name.
|
||||
self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument)
|
||||
with pytest.raises(ValidationError):
|
||||
LazyReferenceField(EmbeddedDocument)
|
||||
|
||||
def test___repr__(self):
|
||||
class Animal(Document):
|
||||
@ -25,7 +27,7 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
animal = Animal()
|
||||
oc = Ocurrence(animal=animal)
|
||||
self.assertIn('LazyReference', repr(oc.animal))
|
||||
assert "LazyReference" in repr(oc.animal)
|
||||
|
||||
def test___getattr___unknown_attr_raises_attribute_error(self):
|
||||
class Animal(Document):
|
||||
@ -39,7 +41,7 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
animal = Animal().save()
|
||||
oc = Ocurrence(animal=animal)
|
||||
with self.assertRaises(AttributeError):
|
||||
with pytest.raises(AttributeError):
|
||||
oc.animal.not_exist
|
||||
|
||||
def test_lazy_reference_simple(self):
|
||||
@ -57,19 +59,19 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
fetched_animal = p.animal.fetch()
|
||||
self.assertEqual(fetched_animal, animal)
|
||||
assert fetched_animal == animal
|
||||
# `fetch` keep cache on referenced document by default...
|
||||
animal.tag = "not so heavy"
|
||||
animal.save()
|
||||
double_fetch = p.animal.fetch()
|
||||
self.assertIs(fetched_animal, double_fetch)
|
||||
self.assertEqual(double_fetch.tag, "heavy")
|
||||
assert fetched_animal is double_fetch
|
||||
assert double_fetch.tag == "heavy"
|
||||
# ...unless specified otherwise
|
||||
fetch_force = p.animal.fetch(force=True)
|
||||
self.assertIsNot(fetch_force, fetched_animal)
|
||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
||||
assert fetch_force is not fetched_animal
|
||||
assert fetch_force.tag == "not so heavy"
|
||||
|
||||
def test_lazy_reference_fetch_invalid_ref(self):
|
||||
class Animal(Document):
|
||||
@ -87,13 +89,13 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
animal.delete()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
with self.assertRaises(DoesNotExist):
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
with pytest.raises(DoesNotExist):
|
||||
p.animal.fetch()
|
||||
|
||||
def test_lazy_reference_set(self):
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
@ -109,21 +111,20 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
nick = StringField()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
sub_animal = SubAnimal(nick='doggo', name='dog').save()
|
||||
sub_animal = SubAnimal(nick="doggo", name="dog").save()
|
||||
for ref in (
|
||||
animal,
|
||||
animal.pk,
|
||||
DBRef(animal._get_collection_name(), animal.pk),
|
||||
LazyReference(Animal, animal.pk),
|
||||
|
||||
sub_animal,
|
||||
sub_animal.pk,
|
||||
DBRef(sub_animal._get_collection_name(), sub_animal.pk),
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
):
|
||||
animal,
|
||||
animal.pk,
|
||||
DBRef(animal._get_collection_name(), animal.pk),
|
||||
LazyReference(Animal, animal.pk),
|
||||
sub_animal,
|
||||
sub_animal.pk,
|
||||
DBRef(sub_animal._get_collection_name(), sub_animal.pk),
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
):
|
||||
p = Ocurrence(person="test", animal=ref).save()
|
||||
p.reload()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
p.animal.fetch()
|
||||
|
||||
def test_lazy_reference_bad_set(self):
|
||||
@ -144,19 +145,20 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
baddoc = BadDoc().save()
|
||||
for bad in (
|
||||
42,
|
||||
'foo',
|
||||
baddoc,
|
||||
DBRef(baddoc._get_collection_name(), animal.pk),
|
||||
LazyReference(BadDoc, animal.pk)
|
||||
):
|
||||
with self.assertRaises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
42,
|
||||
"foo",
|
||||
baddoc,
|
||||
DBRef(baddoc._get_collection_name(), animal.pk),
|
||||
LazyReference(BadDoc, animal.pk),
|
||||
):
|
||||
with pytest.raises(ValidationError):
|
||||
Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_lazy_reference_query_conversion(self):
|
||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
@ -172,26 +174,27 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_lazy_reference_query_conversion_dbref(self):
|
||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
@ -207,21 +210,21 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_lazy_reference_passthrough(self):
|
||||
class Animal(Document):
|
||||
@ -238,21 +241,21 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(animal=animal, animal_passthrough=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
with self.assertRaises(KeyError):
|
||||
p.animal['name']
|
||||
with self.assertRaises(AttributeError):
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
with pytest.raises(KeyError):
|
||||
p.animal["name"]
|
||||
with pytest.raises(AttributeError):
|
||||
p.animal.name
|
||||
self.assertEqual(p.animal.pk, animal.pk)
|
||||
assert p.animal.pk == animal.pk
|
||||
|
||||
self.assertEqual(p.animal_passthrough.name, "Leopard")
|
||||
self.assertEqual(p.animal_passthrough['name'], "Leopard")
|
||||
assert p.animal_passthrough.name == "Leopard"
|
||||
assert p.animal_passthrough["name"] == "Leopard"
|
||||
|
||||
# Should not be able to access referenced document's methods
|
||||
with self.assertRaises(AttributeError):
|
||||
with pytest.raises(AttributeError):
|
||||
p.animal.save
|
||||
with self.assertRaises(KeyError):
|
||||
p.animal['save']
|
||||
with pytest.raises(KeyError):
|
||||
p.animal["save"]
|
||||
|
||||
def test_lazy_reference_not_set(self):
|
||||
class Animal(Document):
|
||||
@ -266,9 +269,9 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
Ocurrence(person='foo').save()
|
||||
Ocurrence(person="foo").save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIs(p.animal, None)
|
||||
assert p.animal is None
|
||||
|
||||
def test_lazy_reference_equality(self):
|
||||
class Animal(Document):
|
||||
@ -279,12 +282,12 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
animalref = LazyReference(Animal, animal.pk)
|
||||
self.assertEqual(animal, animalref)
|
||||
self.assertEqual(animalref, animal)
|
||||
assert animal == animalref
|
||||
assert animalref == animal
|
||||
|
||||
other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90"))
|
||||
self.assertNotEqual(animal, other_animalref)
|
||||
self.assertNotEqual(other_animalref, animal)
|
||||
assert animal != other_animalref
|
||||
assert other_animalref != animal
|
||||
|
||||
def test_lazy_reference_embedded(self):
|
||||
class Animal(Document):
|
||||
@ -303,21 +306,21 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal1 = Animal('doggo').save()
|
||||
animal2 = Animal('cheeta').save()
|
||||
animal1 = Animal(name="doggo").save()
|
||||
animal2 = Animal(name="cheeta").save()
|
||||
|
||||
def check_fields_type(occ):
|
||||
self.assertIsInstance(occ.direct, LazyReference)
|
||||
assert isinstance(occ.direct, LazyReference)
|
||||
for elem in occ.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
assert isinstance(occ.in_embedded.direct, LazyReference)
|
||||
for elem in occ.in_embedded.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
|
||||
occ = Ocurrence(
|
||||
in_list=[animal1, animal2],
|
||||
in_embedded={'in_list': [animal1, animal2], 'direct': animal1},
|
||||
direct=animal1
|
||||
in_embedded={"in_list": [animal1, animal2], "direct": animal1},
|
||||
direct=animal1,
|
||||
).save()
|
||||
check_fields_type(occ)
|
||||
occ.reload()
|
||||
@ -345,19 +348,19 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
fetched_animal = p.animal.fetch()
|
||||
self.assertEqual(fetched_animal, animal)
|
||||
assert fetched_animal == animal
|
||||
# `fetch` keep cache on referenced document by default...
|
||||
animal.tag = "not so heavy"
|
||||
animal.save()
|
||||
double_fetch = p.animal.fetch()
|
||||
self.assertIs(fetched_animal, double_fetch)
|
||||
self.assertEqual(double_fetch.tag, "heavy")
|
||||
assert fetched_animal is double_fetch
|
||||
assert double_fetch.tag == "heavy"
|
||||
# ...unless specified otherwise
|
||||
fetch_force = p.animal.fetch(force=True)
|
||||
self.assertIsNot(fetch_force, fetched_animal)
|
||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
||||
assert fetch_force is not fetched_animal
|
||||
assert fetch_force.tag == "not so heavy"
|
||||
|
||||
def test_generic_lazy_reference_choices(self):
|
||||
class Animal(Document):
|
||||
@ -383,14 +386,14 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
mineral = Mineral(name="Granite").save()
|
||||
|
||||
occ_animal = Ocurrence(living_thing=animal, thing=animal).save()
|
||||
occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
||||
with self.assertRaises(ValidationError):
|
||||
_ = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
||||
with pytest.raises(ValidationError):
|
||||
Ocurrence(living_thing=mineral).save()
|
||||
|
||||
occ = Ocurrence.objects.get(living_thing=animal)
|
||||
self.assertEqual(occ, occ_animal)
|
||||
self.assertIsInstance(occ.thing, LazyReference)
|
||||
self.assertIsInstance(occ.living_thing, LazyReference)
|
||||
assert occ == occ_animal
|
||||
assert isinstance(occ.thing, LazyReference)
|
||||
assert isinstance(occ.living_thing, LazyReference)
|
||||
|
||||
occ.thing = vegetal
|
||||
occ.living_thing = vegetal
|
||||
@ -398,12 +401,12 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
occ.thing = mineral
|
||||
occ.living_thing = mineral
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
occ.save()
|
||||
|
||||
def test_generic_lazy_reference_set(self):
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
@ -419,19 +422,21 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
nick = StringField()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
sub_animal = SubAnimal(nick='doggo', name='dog').save()
|
||||
sub_animal = SubAnimal(nick="doggo", name="dog").save()
|
||||
for ref in (
|
||||
animal,
|
||||
LazyReference(Animal, animal.pk),
|
||||
{'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)},
|
||||
|
||||
sub_animal,
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
{'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)},
|
||||
):
|
||||
animal,
|
||||
LazyReference(Animal, animal.pk),
|
||||
{"_cls": "Animal", "_ref": DBRef(animal._get_collection_name(), animal.pk)},
|
||||
sub_animal,
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
{
|
||||
"_cls": "SubAnimal",
|
||||
"_ref": DBRef(sub_animal._get_collection_name(), sub_animal.pk),
|
||||
},
|
||||
):
|
||||
p = Ocurrence(person="test", animal=ref).save()
|
||||
p.reload()
|
||||
self.assertIsInstance(p.animal, (LazyReference, Document))
|
||||
assert isinstance(p.animal, (LazyReference, Document))
|
||||
p.animal.fetch()
|
||||
|
||||
def test_generic_lazy_reference_bad_set(self):
|
||||
@ -441,7 +446,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField(choices=['Animal'])
|
||||
animal = GenericLazyReferenceField(choices=["Animal"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
@ -451,14 +456,9 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
baddoc = BadDoc().save()
|
||||
for bad in (
|
||||
42,
|
||||
'foo',
|
||||
baddoc,
|
||||
LazyReference(BadDoc, animal.pk)
|
||||
):
|
||||
with self.assertRaises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)):
|
||||
with pytest.raises(ValidationError):
|
||||
Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_generic_lazy_reference_query_conversion(self):
|
||||
class Member(Document):
|
||||
@ -476,21 +476,21 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_generic_lazy_reference_not_set(self):
|
||||
class Animal(Document):
|
||||
@ -504,9 +504,9 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
Ocurrence(person='foo').save()
|
||||
Ocurrence(person="foo").save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIs(p.animal, None)
|
||||
assert p.animal is None
|
||||
|
||||
def test_generic_lazy_reference_accepts_string_instead_of_class(self):
|
||||
class Animal(Document):
|
||||
@ -515,7 +515,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField('Animal')
|
||||
animal = GenericLazyReferenceField("Animal")
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
@ -523,7 +523,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal().save()
|
||||
Ocurrence(animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertEqual(p.animal, animal)
|
||||
assert p.animal == animal
|
||||
|
||||
def test_generic_lazy_reference_embedded(self):
|
||||
class Animal(Document):
|
||||
@ -542,27 +542,33 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal1 = Animal('doggo').save()
|
||||
animal2 = Animal('cheeta').save()
|
||||
animal1 = Animal(name="doggo").save()
|
||||
animal2 = Animal(name="cheeta").save()
|
||||
|
||||
def check_fields_type(occ):
|
||||
self.assertIsInstance(occ.direct, LazyReference)
|
||||
assert isinstance(occ.direct, LazyReference)
|
||||
for elem in occ.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
assert isinstance(occ.in_embedded.direct, LazyReference)
|
||||
for elem in occ.in_embedded.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
|
||||
occ = Ocurrence(
|
||||
in_list=[animal1, animal2],
|
||||
in_embedded={'in_list': [animal1, animal2], 'direct': animal1},
|
||||
direct=animal1
|
||||
in_embedded={"in_list": [animal1, animal2], "direct": animal1},
|
||||
direct=animal1,
|
||||
).save()
|
||||
check_fields_type(occ)
|
||||
occ.reload()
|
||||
check_fields_type(occ)
|
||||
animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)}
|
||||
animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)}
|
||||
animal1_ref = {
|
||||
"_cls": "Animal",
|
||||
"_ref": DBRef(animal1._get_collection_name(), animal1.pk),
|
||||
}
|
||||
animal2_ref = {
|
||||
"_cls": "Animal",
|
||||
"_ref": DBRef(animal2._get_collection_name(), animal2.pk),
|
||||
}
|
||||
occ.direct = animal1_ref
|
||||
occ.in_list = [animal1_ref, animal2_ref]
|
||||
occ.in_embedded.direct = animal1_ref
|
||||
|
@ -1,10 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import six
|
||||
|
||||
try:
|
||||
from bson.int64 import Int64
|
||||
except ImportError:
|
||||
Int64 = long
|
||||
from bson.int64 import Int64
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
@ -13,23 +8,26 @@ from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestLongField(MongoDBTestCase):
|
||||
|
||||
def test_long_field_is_considered_as_int64(self):
|
||||
"""
|
||||
Tests that long fields are stored as long in mongo, even if long
|
||||
value is small enough to be an int.
|
||||
"""
|
||||
|
||||
class TestLongFieldConsideredAsInt64(Document):
|
||||
some_long = LongField()
|
||||
|
||||
doc = TestLongFieldConsideredAsInt64(some_long=42).save()
|
||||
db = get_db()
|
||||
self.assertIsInstance(db.test_long_field_considered_as_int64.find()[0]['some_long'], Int64)
|
||||
self.assertIsInstance(doc.some_long, six.integer_types)
|
||||
assert isinstance(
|
||||
db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64
|
||||
)
|
||||
assert isinstance(doc.some_long, int)
|
||||
|
||||
def test_long_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to long fields.
|
||||
"""
|
||||
|
||||
class TestDocument(Document):
|
||||
value = LongField(min_value=0, max_value=110)
|
||||
|
||||
@ -38,11 +36,14 @@ class TestLongField(MongoDBTestCase):
|
||||
doc.validate()
|
||||
|
||||
doc.value = -1
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
doc.validate()
|
||||
doc.value = 120
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
doc.value = 'ten'
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
doc.validate()
|
||||
doc.value = "ten"
|
||||
with pytest.raises(ValidationError):
|
||||
doc.validate()
|
||||
|
||||
def test_long_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
@ -53,4 +54,4 @@ class TestLongField(MongoDBTestCase):
|
||||
TestDocument(long_fld=None).save()
|
||||
TestDocument(long_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count())
|
||||
assert 1 == TestDocument.objects(long_fld__ne=None).count()
|
||||
|
@ -1,29 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestMapField(MongoDBTestCase):
|
||||
|
||||
def test_mapfield(self):
|
||||
"""Ensure that the MapField handles the declared type."""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = MapField(IntField())
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['someint'] = 1
|
||||
e.mapping["someint"] = 1
|
||||
e.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping['somestring'] = "abc"
|
||||
with pytest.raises(ValidationError):
|
||||
e.mapping["somestring"] = "abc"
|
||||
e.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
|
||||
class NoDeclaredType(Document):
|
||||
mapping = MapField()
|
||||
|
||||
@ -45,38 +47,37 @@ class TestMapField(MongoDBTestCase):
|
||||
Extensible.drop_collection()
|
||||
|
||||
e = Extensible()
|
||||
e.mapping['somestring'] = StringSetting(value='foo')
|
||||
e.mapping['someint'] = IntegerSetting(value=42)
|
||||
e.mapping["somestring"] = StringSetting(value="foo")
|
||||
e.mapping["someint"] = IntegerSetting(value=42)
|
||||
e.save()
|
||||
|
||||
e2 = Extensible.objects.get(id=e.id)
|
||||
self.assertIsInstance(e2.mapping['somestring'], StringSetting)
|
||||
self.assertIsInstance(e2.mapping['someint'], IntegerSetting)
|
||||
assert isinstance(e2.mapping["somestring"], StringSetting)
|
||||
assert isinstance(e2.mapping["someint"], IntegerSetting)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping['someint'] = 123
|
||||
with pytest.raises(ValidationError):
|
||||
e.mapping["someint"] = 123
|
||||
e.save()
|
||||
|
||||
def test_embedded_mapfield_db_field(self):
|
||||
class Embedded(EmbeddedDocument):
|
||||
number = IntField(default=0, db_field='i')
|
||||
number = IntField(default=0, db_field="i")
|
||||
|
||||
class Test(Document):
|
||||
my_map = MapField(field=EmbeddedDocumentField(Embedded),
|
||||
db_field='x')
|
||||
my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field="x")
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
test = Test()
|
||||
test.my_map['DICTIONARY_KEY'] = Embedded(number=1)
|
||||
test.my_map["DICTIONARY_KEY"] = Embedded(number=1)
|
||||
test.save()
|
||||
|
||||
Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1)
|
||||
|
||||
test = Test.objects.get()
|
||||
self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2)
|
||||
assert test.my_map["DICTIONARY_KEY"].number == 2
|
||||
doc = self.db.test.find_one()
|
||||
self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2)
|
||||
assert doc["x"]["DICTIONARY_KEY"]["i"] == 2
|
||||
|
||||
def test_mapfield_numerical_index(self):
|
||||
"""Ensure that MapField accept numeric strings as indexes."""
|
||||
@ -90,9 +91,9 @@ class TestMapField(MongoDBTestCase):
|
||||
Test.drop_collection()
|
||||
|
||||
test = Test()
|
||||
test.my_map['1'] = Embedded(name='test')
|
||||
test.my_map["1"] = Embedded(name="test")
|
||||
test.save()
|
||||
test.my_map['1'].name = 'test updated'
|
||||
test.my_map["1"].name = "test updated"
|
||||
test.save()
|
||||
|
||||
def test_map_field_lookup(self):
|
||||
@ -110,15 +111,20 @@ class TestMapField(MongoDBTestCase):
|
||||
actions = MapField(EmbeddedDocumentField(Action))
|
||||
|
||||
Log.drop_collection()
|
||||
Log(name="wilson", visited={'friends': datetime.datetime.now()},
|
||||
actions={'friends': Action(operation='drink', object='beer')}).save()
|
||||
Log(
|
||||
name="wilson",
|
||||
visited={"friends": datetime.datetime.now()},
|
||||
actions={"friends": Action(operation="drink", object="beer")},
|
||||
).save()
|
||||
|
||||
self.assertEqual(1, Log.objects(
|
||||
visited__friends__exists=True).count())
|
||||
assert 1 == Log.objects(visited__friends__exists=True).count()
|
||||
|
||||
self.assertEqual(1, Log.objects(
|
||||
actions__friends__operation='drink',
|
||||
actions__friends__object='beer').count())
|
||||
assert (
|
||||
1
|
||||
== Log.objects(
|
||||
actions__friends__operation="drink", actions__friends__object="beer"
|
||||
).count()
|
||||
)
|
||||
|
||||
def test_map_field_unicode(self):
|
||||
class Info(EmbeddedDocument):
|
||||
@ -130,15 +136,11 @@ class TestMapField(MongoDBTestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
tree = BlogPost(info_dict={
|
||||
u"éééé": {
|
||||
'description': u"VALUE: éééé"
|
||||
}
|
||||
})
|
||||
tree = BlogPost(info_dict={u"éééé": {"description": u"VALUE: éééé"}})
|
||||
|
||||
tree.save()
|
||||
|
||||
self.assertEqual(
|
||||
BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description,
|
||||
u"VALUE: éééé"
|
||||
assert (
|
||||
BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description
|
||||
== u"VALUE: éééé"
|
||||
)
|
||||
|
@ -1,8 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson import SON, DBRef
|
||||
from bson import DBRef, SON
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -24,19 +24,22 @@ class TestReferenceField(MongoDBTestCase):
|
||||
|
||||
# Make sure ReferenceField only accepts a document class or a string
|
||||
# with a document class name.
|
||||
self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument)
|
||||
with pytest.raises(ValidationError):
|
||||
ReferenceField(EmbeddedDocument)
|
||||
|
||||
user = User(name='Test User')
|
||||
user = User(name="Test User")
|
||||
|
||||
# Ensure that the referenced object must have been saved
|
||||
post1 = BlogPost(content='Chips and gravy taste good.')
|
||||
post1 = BlogPost(content="Chips and gravy taste good.")
|
||||
post1.author = user
|
||||
self.assertRaises(ValidationError, post1.save)
|
||||
with pytest.raises(ValidationError):
|
||||
post1.save()
|
||||
|
||||
# Check that an invalid object type cannot be used
|
||||
post2 = BlogPost(content='Chips and chilli taste good.')
|
||||
post2 = BlogPost(content="Chips and chilli taste good.")
|
||||
post1.author = post2
|
||||
self.assertRaises(ValidationError, post1.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
post1.validate()
|
||||
|
||||
# Ensure ObjectID's are accepted as references
|
||||
user_object_id = user.pk
|
||||
@ -52,42 +55,27 @@ class TestReferenceField(MongoDBTestCase):
|
||||
# Make sure referencing a saved document of the *wrong* type fails
|
||||
post2.save()
|
||||
post1.author = post2
|
||||
self.assertRaises(ValidationError, post1.validate)
|
||||
|
||||
def test_objectid_reference_fields(self):
|
||||
"""Make sure storing Object ID references works."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self')
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1.pk).save()
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
with pytest.raises(ValidationError):
|
||||
post1.validate()
|
||||
|
||||
def test_dbref_reference_fields(self):
|
||||
"""Make sure storing references as bson.dbref.DBRef works."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self', dbref=True)
|
||||
parent = ReferenceField("self", dbref=True)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1).save()
|
||||
|
||||
self.assertEqual(
|
||||
Person._get_collection().find_one({'name': 'Ross'})['parent'],
|
||||
DBRef('person', p1.pk)
|
||||
assert Person._get_collection().find_one({"name": "Ross"})["parent"] == DBRef(
|
||||
"person", p1.pk
|
||||
)
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
assert p.parent == p1
|
||||
|
||||
def test_dbref_to_mongo(self):
|
||||
"""Make sure that calling to_mongo on a ReferenceField which
|
||||
@ -97,21 +85,15 @@ class TestReferenceField(MongoDBTestCase):
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self', dbref=False)
|
||||
parent = ReferenceField("self", dbref=False)
|
||||
|
||||
p = Person(
|
||||
name='Steve',
|
||||
parent=DBRef('person', 'abcdefghijklmnop')
|
||||
)
|
||||
self.assertEqual(p.to_mongo(), SON([
|
||||
('name', u'Steve'),
|
||||
('parent', 'abcdefghijklmnop')
|
||||
]))
|
||||
p = Person(name="Steve", parent=DBRef("person", "abcdefghijklmnop"))
|
||||
assert p.to_mongo() == SON([("name", u"Steve"), ("parent", "abcdefghijklmnop")])
|
||||
|
||||
def test_objectid_reference_fields(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self', dbref=False)
|
||||
parent = ReferenceField("self", dbref=False)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
@ -119,18 +101,19 @@ class TestReferenceField(MongoDBTestCase):
|
||||
Person(name="Ross", parent=p1).save()
|
||||
|
||||
col = Person._get_collection()
|
||||
data = col.find_one({'name': 'Ross'})
|
||||
self.assertEqual(data['parent'], p1.pk)
|
||||
data = col.find_one({"name": "Ross"})
|
||||
assert data["parent"] == p1.pk
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
assert p.parent == p1
|
||||
|
||||
def test_undefined_reference(self):
|
||||
"""Ensure that ReferenceFields may reference undefined Documents.
|
||||
"""
|
||||
|
||||
class Product(Document):
|
||||
name = StringField()
|
||||
company = ReferenceField('Company')
|
||||
company = ReferenceField("Company")
|
||||
|
||||
class Company(Document):
|
||||
name = StringField()
|
||||
@ -138,28 +121,29 @@ class TestReferenceField(MongoDBTestCase):
|
||||
Product.drop_collection()
|
||||
Company.drop_collection()
|
||||
|
||||
ten_gen = Company(name='10gen')
|
||||
ten_gen = Company(name="10gen")
|
||||
ten_gen.save()
|
||||
mongodb = Product(name='MongoDB', company=ten_gen)
|
||||
mongodb = Product(name="MongoDB", company=ten_gen)
|
||||
mongodb.save()
|
||||
|
||||
me = Product(name='MongoEngine')
|
||||
me = Product(name="MongoEngine")
|
||||
me.save()
|
||||
|
||||
obj = Product.objects(company=ten_gen).first()
|
||||
self.assertEqual(obj, mongodb)
|
||||
self.assertEqual(obj.company, ten_gen)
|
||||
assert obj == mongodb
|
||||
assert obj.company == ten_gen
|
||||
|
||||
obj = Product.objects(company=None).first()
|
||||
self.assertEqual(obj, me)
|
||||
assert obj == me
|
||||
|
||||
obj = Product.objects.get(company=None)
|
||||
self.assertEqual(obj, me)
|
||||
assert obj == me
|
||||
|
||||
def test_reference_query_conversion(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
@ -175,22 +159,23 @@ class TestReferenceField(MongoDBTestCase):
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_reference_query_conversion_dbref(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
@ -206,14 +191,14 @@ class TestReferenceField(MongoDBTestCase):
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
@ -11,79 +11,79 @@ class TestSequenceField(MongoDBTestCase):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 1000
|
||||
|
||||
def test_sequence_field_get_next_value(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), 11)
|
||||
self.db['mongoengine.counters'].drop()
|
||||
assert Person.id.get_next_value() == 11
|
||||
self.db["mongoengine.counters"].drop()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), 1)
|
||||
assert Person.id.get_next_value() == 1
|
||||
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, value_decorator=str)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), '11')
|
||||
self.db['mongoengine.counters'].drop()
|
||||
assert Person.id.get_next_value() == "11"
|
||||
self.db["mongoengine.counters"].drop()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), '1')
|
||||
assert Person.id.get_next_value() == "1"
|
||||
|
||||
def test_sequence_field_sequence_name(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, sequence_name='jelly')
|
||||
id = SequenceField(primary_key=True, sequence_name="jelly")
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'})
|
||||
self.assertEqual(c['next'], 1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
assert c["next"] == 1000
|
||||
|
||||
def test_multiple_sequence_fields(self):
|
||||
class Person(Document):
|
||||
@ -91,56 +91,56 @@ class TestSequenceField(MongoDBTestCase):
|
||||
counter = SequenceField()
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
counters = [i.counter for i in Person.objects]
|
||||
self.assertEqual(counters, range(1, 11))
|
||||
assert counters == list(range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 1000
|
||||
|
||||
Person.counter.set_next_value(999)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.counter'})
|
||||
self.assertEqual(c['next'], 999)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.counter"})
|
||||
assert c["next"] == 999
|
||||
|
||||
def test_sequence_fields_reload(self):
|
||||
class Animal(Document):
|
||||
counter = SequenceField()
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Animal.drop_collection()
|
||||
|
||||
a = Animal(name="Boi").save()
|
||||
|
||||
self.assertEqual(a.counter, 1)
|
||||
assert a.counter == 1
|
||||
a.reload()
|
||||
self.assertEqual(a.counter, 1)
|
||||
assert a.counter == 1
|
||||
|
||||
a.counter = None
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
a.save()
|
||||
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
|
||||
a = Animal.objects.first()
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
a.reload()
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
|
||||
def test_multiple_sequence_fields_on_docs(self):
|
||||
class Animal(Document):
|
||||
@ -151,7 +151,7 @@ class TestSequenceField(MongoDBTestCase):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Animal.drop_collection()
|
||||
Person.drop_collection()
|
||||
|
||||
@ -159,44 +159,44 @@ class TestSequenceField(MongoDBTestCase):
|
||||
Animal(name="Animal %s" % x).save()
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
id = [i.id for i in Animal.objects]
|
||||
self.assertEqual(id, range(1, 11))
|
||||
assert id == list(range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
def test_sequence_field_value_decorator(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, value_decorator=str)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
p = Person(name="Person %s" % x)
|
||||
p.save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, map(str, range(1, 11)))
|
||||
assert ids == [str(i) for i in range(1, 11)]
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
def test_embedded_sequence_field(self):
|
||||
class Comment(EmbeddedDocument):
|
||||
@ -207,23 +207,27 @@ class TestSequenceField(MongoDBTestCase):
|
||||
title = StringField(required=True)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Post.drop_collection()
|
||||
|
||||
Post(title="MongoEngine",
|
||||
comments=[Comment(content="NoSQL Rocks"),
|
||||
Comment(content="MongoEngine Rocks")]).save()
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'comment.id'})
|
||||
self.assertEqual(c['next'], 2)
|
||||
Post(
|
||||
title="MongoEngine",
|
||||
comments=[
|
||||
Comment(content="NoSQL Rocks"),
|
||||
Comment(content="MongoEngine Rocks"),
|
||||
],
|
||||
).save()
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "comment.id"})
|
||||
assert c["next"] == 2
|
||||
post = Post.objects.first()
|
||||
self.assertEqual(1, post.comments[0].id)
|
||||
self.assertEqual(2, post.comments[1].id)
|
||||
assert 1 == post.comments[0].id
|
||||
assert 2 == post.comments[1].id
|
||||
|
||||
def test_inherited_sequencefield(self):
|
||||
class Base(Document):
|
||||
name = StringField()
|
||||
counter = SequenceField()
|
||||
meta = {'abstract': True}
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Foo(Base):
|
||||
pass
|
||||
@ -231,24 +235,25 @@ class TestSequenceField(MongoDBTestCase):
|
||||
class Bar(Base):
|
||||
pass
|
||||
|
||||
bar = Bar(name='Bar')
|
||||
bar = Bar(name="Bar")
|
||||
bar.save()
|
||||
|
||||
foo = Foo(name='Foo')
|
||||
foo = Foo(name="Foo")
|
||||
foo.save()
|
||||
|
||||
self.assertTrue('base.counter' in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertFalse(('foo.counter' or 'bar.counter') in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertNotEqual(foo.counter, bar.counter)
|
||||
self.assertEqual(foo._fields['counter'].owner_document, Base)
|
||||
self.assertEqual(bar._fields['counter'].owner_document, Base)
|
||||
assert "base.counter" in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
assert not (
|
||||
("foo.counter" or "bar.counter")
|
||||
in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
)
|
||||
assert foo.counter != bar.counter
|
||||
assert foo._fields["counter"].owner_document == Base
|
||||
assert bar._fields["counter"].owner_document == Base
|
||||
|
||||
def test_no_inherited_sequencefield(self):
|
||||
class Base(Document):
|
||||
name = StringField()
|
||||
meta = {'abstract': True}
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Foo(Base):
|
||||
counter = SequenceField()
|
||||
@ -256,16 +261,18 @@ class TestSequenceField(MongoDBTestCase):
|
||||
class Bar(Base):
|
||||
counter = SequenceField()
|
||||
|
||||
bar = Bar(name='Bar')
|
||||
bar = Bar(name="Bar")
|
||||
bar.save()
|
||||
|
||||
foo = Foo(name='Foo')
|
||||
foo = Foo(name="Foo")
|
||||
foo.save()
|
||||
|
||||
self.assertFalse('base.counter' in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertTrue(('foo.counter' and 'bar.counter') in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertEqual(foo.counter, bar.counter)
|
||||
self.assertEqual(foo._fields['counter'].owner_document, Foo)
|
||||
self.assertEqual(bar._fields['counter'].owner_document, Bar)
|
||||
assert "base.counter" not in self.db["mongoengine.counters"].find().distinct(
|
||||
"_id"
|
||||
)
|
||||
existing_counters = self.db["mongoengine.counters"].find().distinct("_id")
|
||||
assert "foo.counter" in existing_counters
|
||||
assert "bar.counter" in existing_counters
|
||||
assert foo.counter == bar.counter
|
||||
assert foo._fields["counter"].owner_document == Foo
|
||||
assert bar._fields["counter"].owner_document == Bar
|
||||
|
@ -1,53 +1,60 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestURLField(MongoDBTestCase):
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that URLFields validate urls properly."""
|
||||
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = 'google'
|
||||
self.assertRaises(ValidationError, link.validate)
|
||||
link.url = "google"
|
||||
with pytest.raises(ValidationError):
|
||||
link.validate()
|
||||
|
||||
link.url = 'http://www.google.com:8080'
|
||||
link.url = "http://www.google.com:8080"
|
||||
link.validate()
|
||||
|
||||
def test_unicode_url_validation(self):
|
||||
"""Ensure unicode URLs are validated properly."""
|
||||
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = u'http://привет.com'
|
||||
link.url = u"http://привет.com"
|
||||
|
||||
# TODO fix URL validation - this *IS* a valid URL
|
||||
# For now we just want to make sure that the error message is correct
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
link.validate()
|
||||
self.assertEqual(unicode(ctx_err.exception),
|
||||
u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])")
|
||||
assert (
|
||||
str(exc_info.value)
|
||||
== u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])"
|
||||
)
|
||||
|
||||
def test_url_scheme_validation(self):
|
||||
"""Ensure that URLFields validate urls with specific schemes properly.
|
||||
"""
|
||||
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
class SchemeLink(Document):
|
||||
url = URLField(schemes=['ws', 'irc'])
|
||||
url = URLField(schemes=["ws", "irc"])
|
||||
|
||||
link = Link()
|
||||
link.url = 'ws://google.com'
|
||||
self.assertRaises(ValidationError, link.validate)
|
||||
link.url = "ws://google.com"
|
||||
with pytest.raises(ValidationError):
|
||||
link.validate()
|
||||
|
||||
scheme_link = SchemeLink()
|
||||
scheme_link.url = 'ws://google.com'
|
||||
scheme_link.url = "ws://google.com"
|
||||
scheme_link.validate()
|
||||
|
||||
def test_underscore_allowed_in_domains_names(self):
|
||||
@ -55,5 +62,5 @@ class TestURLField(MongoDBTestCase):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = 'https://san_leandro-ca.geebo.com'
|
||||
link.url = "https://san_leandro-ca.geebo.com"
|
||||
link.validate()
|
||||
|
@ -1,8 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import uuid
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
@ -14,12 +15,7 @@ class TestUUIDField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
uid = uuid.uuid4()
|
||||
person = Person(api_key=uid).save()
|
||||
self.assertEqual(
|
||||
get_as_pymongo(person),
|
||||
{'_id': person.id,
|
||||
'api_key': str(uid)
|
||||
}
|
||||
)
|
||||
assert get_as_pymongo(person) == {"_id": person.id, "api_key": str(uid)}
|
||||
|
||||
def test_field_string(self):
|
||||
"""Test UUID fields storing as String
|
||||
@ -28,8 +24,8 @@ class TestUUIDField(MongoDBTestCase):
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
assert 1 == Person.objects(api_key=uu).count()
|
||||
assert uu == Person.objects.first().api_key
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
@ -37,11 +33,14 @@ class TestUUIDField(MongoDBTestCase):
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
|
||||
'9d159858-549b-4975-9f98-dd2f987c113')
|
||||
invalid = (
|
||||
"9d159858-549b-4975-9f98-dd2f987c113g",
|
||||
"9d159858-549b-4975-9f98-dd2f987c113",
|
||||
)
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
def test_field_binary(self):
|
||||
"""Test UUID fields storing as Binary object."""
|
||||
@ -49,8 +48,8 @@ class TestUUIDField(MongoDBTestCase):
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
assert 1 == Person.objects(api_key=uu).count()
|
||||
assert uu == Person.objects.first().api_key
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
@ -58,8 +57,11 @@ class TestUUIDField(MongoDBTestCase):
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
|
||||
'9d159858-549b-4975-9f98-dd2f987c113')
|
||||
invalid = (
|
||||
"9d159858-549b-4975-9f98-dd2f987c113g",
|
||||
"9d159858-549b-4975-9f98-dd2f987c113",
|
||||
)
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
@ -11,7 +11,7 @@ class PickleEmbedded(EmbeddedDocument):
|
||||
|
||||
class PickleTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
string = StringField(choices=(("One", "1"), ("Two", "2")))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
photo = FileField()
|
||||
@ -19,7 +19,7 @@ class PickleTest(Document):
|
||||
|
||||
class NewDocumentPickleTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
string = StringField(choices=(("One", "1"), ("Two", "2")))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
photo = FileField()
|
||||
@ -36,17 +36,17 @@ class PickleDynamicTest(DynamicDocument):
|
||||
|
||||
class PickleSignalsTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
string = StringField(choices=(("One", "1"), ("Two", "2")))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
|
||||
@classmethod
|
||||
def post_save(self, sender, document, created, **kwargs):
|
||||
pickled = pickle.dumps(document)
|
||||
pickle.dumps(document)
|
||||
|
||||
@classmethod
|
||||
def post_delete(self, sender, document, **kwargs):
|
||||
pickled = pickle.dumps(document)
|
||||
pickle.dumps(document)
|
||||
|
||||
|
||||
signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest)
|
||||
@ -58,4 +58,4 @@ class Mixin(object):
|
||||
|
||||
|
||||
class Base(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
@ -1,6 +0,0 @@
|
||||
from .transform import *
|
||||
from .field_list import *
|
||||
from .queryset import *
|
||||
from .visitor import *
|
||||
from .geo import *
|
||||
from .modify import *
|
@ -1,440 +0,0 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.queryset import QueryFieldList
|
||||
|
||||
__all__ = ("QueryFieldListTest", "OnlyExcludeAllTest")
|
||||
|
||||
|
||||
class QueryFieldListTest(unittest.TestCase):
|
||||
|
||||
def test_empty(self):
|
||||
q = QueryFieldList()
|
||||
self.assertFalse(q)
|
||||
|
||||
q = QueryFieldList(always_include=['_cls'])
|
||||
self.assertFalse(q)
|
||||
|
||||
def test_include_include(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY, _only_called=True)
|
||||
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||
|
||||
def test_include_exclude(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 1})
|
||||
|
||||
def test_exclude_exclude(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0, 'c': 0})
|
||||
|
||||
def test_exclude_include(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'c': 1})
|
||||
|
||||
def test_always_include(self):
|
||||
q = QueryFieldList(always_include=['x', 'y'])
|
||||
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1})
|
||||
|
||||
def test_reset(self):
|
||||
q = QueryFieldList(always_include=['x', 'y'])
|
||||
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1})
|
||||
q.reset()
|
||||
self.assertFalse(q)
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'b': 1, 'c': 1})
|
||||
|
||||
def test_using_a_slice(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a'], value={"$slice": 5})
|
||||
self.assertEqual(q.as_dict(), {'a': {"$slice": 5}})
|
||||
|
||||
|
||||
class OnlyExcludeAllTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
Person.drop_collection()
|
||||
self.Person = Person
|
||||
|
||||
def test_mixing_only_exclude(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
c = StringField()
|
||||
d = StringField()
|
||||
e = StringField()
|
||||
f = StringField()
|
||||
|
||||
include = ['a', 'b', 'c', 'd', 'e']
|
||||
exclude = ['d', 'e']
|
||||
only = ['b', 'c']
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1})
|
||||
qs = qs.only(*only)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
qs = qs.exclude(*exclude)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||
qs = qs.only(*only)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
|
||||
qs = MyDoc.objects.exclude(*exclude)
|
||||
qs = qs.fields(**{i: 1 for i in include})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||
qs = qs.only(*only)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
|
||||
def test_slicing(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
a = ListField()
|
||||
b = ListField()
|
||||
c = ListField()
|
||||
d = ListField()
|
||||
e = ListField()
|
||||
f = ListField()
|
||||
|
||||
include = ['a', 'b', 'c', 'd', 'e']
|
||||
exclude = ['d', 'e']
|
||||
only = ['b', 'c']
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
qs = qs.only(*only)
|
||||
qs = qs.fields(slice__b=5)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'b': {'$slice': 5}, 'c': 1})
|
||||
|
||||
qs = qs.fields(slice__c=[5, 1])
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}})
|
||||
|
||||
qs = qs.exclude('c')
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'b': {'$slice': 5}})
|
||||
|
||||
def test_mix_slice_with_other_fields(self):
|
||||
class MyDoc(Document):
|
||||
a = ListField()
|
||||
b = ListField()
|
||||
c = ListField()
|
||||
|
||||
qs = MyDoc.objects.fields(a=1, b=0, slice__c=2)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'c': {'$slice': 2}, 'a': 1})
|
||||
|
||||
def test_only(self):
|
||||
"""Ensure that QuerySet.only only returns the requested fields.
|
||||
"""
|
||||
person = self.Person(name='test', age=25)
|
||||
person.save()
|
||||
|
||||
obj = self.Person.objects.only('name').get()
|
||||
self.assertEqual(obj.name, person.name)
|
||||
self.assertEqual(obj.age, None)
|
||||
|
||||
obj = self.Person.objects.only('age').get()
|
||||
self.assertEqual(obj.name, None)
|
||||
self.assertEqual(obj.age, person.age)
|
||||
|
||||
obj = self.Person.objects.only('name', 'age').get()
|
||||
self.assertEqual(obj.name, person.name)
|
||||
self.assertEqual(obj.age, person.age)
|
||||
|
||||
obj = self.Person.objects.only(*('id', 'name',)).get()
|
||||
self.assertEqual(obj.name, person.name)
|
||||
self.assertEqual(obj.age, None)
|
||||
|
||||
# Check polymorphism still works
|
||||
class Employee(self.Person):
|
||||
salary = IntField(db_field='wage')
|
||||
|
||||
employee = Employee(name='test employee', age=40, salary=30000)
|
||||
employee.save()
|
||||
|
||||
obj = self.Person.objects(id=employee.id).only('age').get()
|
||||
self.assertIsInstance(obj, Employee)
|
||||
|
||||
# Check field names are looked up properly
|
||||
obj = Employee.objects(id=employee.id).only('salary').get()
|
||||
self.assertEqual(obj.salary, employee.salary)
|
||||
self.assertEqual(obj.name, None)
|
||||
|
||||
def test_only_with_subfields(self):
|
||||
class User(EmbeddedDocument):
|
||||
name = StringField()
|
||||
email = StringField()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
title = StringField()
|
||||
text = StringField()
|
||||
|
||||
class VariousData(EmbeddedDocument):
|
||||
some = BooleanField()
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = EmbeddedDocumentField(User)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
various = MapField(field=EmbeddedDocumentField(VariousData))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(content='Had a good coffee today...', various={'test_dynamic': {'some': True}})
|
||||
post.author = User(name='Test User')
|
||||
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||
post.save()
|
||||
|
||||
obj = BlogPost.objects.only('author.name',).get()
|
||||
self.assertEqual(obj.content, None)
|
||||
self.assertEqual(obj.author.email, None)
|
||||
self.assertEqual(obj.author.name, 'Test User')
|
||||
self.assertEqual(obj.comments, [])
|
||||
|
||||
obj = BlogPost.objects.only('various.test_dynamic.some').get()
|
||||
self.assertEqual(obj.various["test_dynamic"].some, True)
|
||||
|
||||
obj = BlogPost.objects.only('content', 'comments.title',).get()
|
||||
self.assertEqual(obj.content, 'Had a good coffee today...')
|
||||
self.assertEqual(obj.author, None)
|
||||
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||
self.assertEqual(obj.comments[1].title, 'Coffee')
|
||||
self.assertEqual(obj.comments[0].text, None)
|
||||
self.assertEqual(obj.comments[1].text, None)
|
||||
|
||||
obj = BlogPost.objects.only('comments',).get()
|
||||
self.assertEqual(obj.content, None)
|
||||
self.assertEqual(obj.author, None)
|
||||
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||
self.assertEqual(obj.comments[1].title, 'Coffee')
|
||||
self.assertEqual(obj.comments[0].text, 'Great post!')
|
||||
self.assertEqual(obj.comments[1].text, 'I hate coffee')
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_exclude(self):
|
||||
class User(EmbeddedDocument):
|
||||
name = StringField()
|
||||
email = StringField()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
title = StringField()
|
||||
text = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = EmbeddedDocumentField(User)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(content='Had a good coffee today...')
|
||||
post.author = User(name='Test User')
|
||||
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||
post.save()
|
||||
|
||||
obj = BlogPost.objects.exclude('author', 'comments.text').get()
|
||||
self.assertEqual(obj.author, None)
|
||||
self.assertEqual(obj.content, 'Had a good coffee today...')
|
||||
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||
self.assertEqual(obj.comments[0].text, None)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_exclude_only_combining(self):
|
||||
class Attachment(EmbeddedDocument):
|
||||
name = StringField()
|
||||
content = StringField()
|
||||
|
||||
class Email(Document):
|
||||
sender = StringField()
|
||||
to = StringField()
|
||||
subject = StringField()
|
||||
body = StringField()
|
||||
content_type = StringField()
|
||||
attachments = ListField(EmbeddedDocumentField(Attachment))
|
||||
|
||||
Email.drop_collection()
|
||||
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
|
||||
email.attachments = [
|
||||
Attachment(name='file1.doc', content='ABC'),
|
||||
Attachment(name='file2.doc', content='XYZ'),
|
||||
]
|
||||
email.save()
|
||||
|
||||
obj = Email.objects.exclude('content_type').exclude('body').get()
|
||||
self.assertEqual(obj.sender, 'me')
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.subject, 'From Russia with Love')
|
||||
self.assertEqual(obj.body, None)
|
||||
self.assertEqual(obj.content_type, None)
|
||||
|
||||
obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get()
|
||||
self.assertEqual(obj.sender, None)
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.subject, None)
|
||||
self.assertEqual(obj.body, None)
|
||||
self.assertEqual(obj.content_type, None)
|
||||
|
||||
obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get()
|
||||
self.assertEqual(obj.attachments[0].name, 'file1.doc')
|
||||
self.assertEqual(obj.attachments[0].content, None)
|
||||
self.assertEqual(obj.sender, None)
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.subject, None)
|
||||
self.assertEqual(obj.body, None)
|
||||
self.assertEqual(obj.content_type, None)
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
def test_all_fields(self):
|
||||
|
||||
class Email(Document):
|
||||
sender = StringField()
|
||||
to = StringField()
|
||||
subject = StringField()
|
||||
body = StringField()
|
||||
content_type = StringField()
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
|
||||
email.save()
|
||||
|
||||
obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get()
|
||||
self.assertEqual(obj.sender, 'me')
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.subject, 'From Russia with Love')
|
||||
self.assertEqual(obj.body, 'Hello!')
|
||||
self.assertEqual(obj.content_type, 'text/plain')
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
def test_slicing_fields(self):
|
||||
"""Ensure that query slicing an array works.
|
||||
"""
|
||||
class Numbers(Document):
|
||||
n = ListField(IntField())
|
||||
|
||||
Numbers.drop_collection()
|
||||
|
||||
numbers = Numbers(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
|
||||
numbers.save()
|
||||
|
||||
# first three
|
||||
numbers = Numbers.objects.fields(slice__n=3).get()
|
||||
self.assertEqual(numbers.n, [0, 1, 2])
|
||||
|
||||
# last three
|
||||
numbers = Numbers.objects.fields(slice__n=-3).get()
|
||||
self.assertEqual(numbers.n, [-3, -2, -1])
|
||||
|
||||
# skip 2, limit 3
|
||||
numbers = Numbers.objects.fields(slice__n=[2, 3]).get()
|
||||
self.assertEqual(numbers.n, [2, 3, 4])
|
||||
|
||||
# skip to fifth from last, limit 4
|
||||
numbers = Numbers.objects.fields(slice__n=[-5, 4]).get()
|
||||
self.assertEqual(numbers.n, [-5, -4, -3, -2])
|
||||
|
||||
# skip to fifth from last, limit 10
|
||||
numbers = Numbers.objects.fields(slice__n=[-5, 10]).get()
|
||||
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
|
||||
|
||||
# skip to fifth from last, limit 10 dict method
|
||||
numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get()
|
||||
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
|
||||
|
||||
def test_slicing_nested_fields(self):
|
||||
"""Ensure that query slicing an embedded array works.
|
||||
"""
|
||||
|
||||
class EmbeddedNumber(EmbeddedDocument):
|
||||
n = ListField(IntField())
|
||||
|
||||
class Numbers(Document):
|
||||
embedded = EmbeddedDocumentField(EmbeddedNumber)
|
||||
|
||||
Numbers.drop_collection()
|
||||
|
||||
numbers = Numbers()
|
||||
numbers.embedded = EmbeddedNumber(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
|
||||
numbers.save()
|
||||
|
||||
# first three
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=3).get()
|
||||
self.assertEqual(numbers.embedded.n, [0, 1, 2])
|
||||
|
||||
# last three
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=-3).get()
|
||||
self.assertEqual(numbers.embedded.n, [-3, -2, -1])
|
||||
|
||||
# skip 2, limit 3
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get()
|
||||
self.assertEqual(numbers.embedded.n, [2, 3, 4])
|
||||
|
||||
# skip to fifth from last, limit 4
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get()
|
||||
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2])
|
||||
|
||||
# skip to fifth from last, limit 10
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get()
|
||||
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||
|
||||
# skip to fifth from last, limit 10 dict method
|
||||
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
|
||||
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||
|
||||
def test_exclude_from_subclasses_docs(self):
|
||||
|
||||
class Base(Document):
|
||||
username = StringField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Anon(Base):
|
||||
anon = BooleanField()
|
||||
|
||||
class User(Base):
|
||||
password = StringField()
|
||||
wibble = StringField()
|
||||
|
||||
Base.drop_collection()
|
||||
User(username="mongodb", password="secret").save()
|
||||
|
||||
user = Base.objects().exclude("password", "wibble").first()
|
||||
self.assertEqual(user.password, None)
|
||||
|
||||
self.assertRaises(LookUpError, Base.objects.exclude, "made_up")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
467
tests/queryset/test_field_list.py
Normal file
467
tests/queryset/test_field_list.py
Normal file
@ -0,0 +1,467 @@
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.queryset import QueryFieldList
|
||||
|
||||
|
||||
class TestQueryFieldList:
|
||||
def test_empty(self):
|
||||
q = QueryFieldList()
|
||||
assert not q
|
||||
|
||||
q = QueryFieldList(always_include=["_cls"])
|
||||
assert not q
|
||||
|
||||
def test_include_include(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(
|
||||
fields=["a", "b"], value=QueryFieldList.ONLY, _only_called=True
|
||||
)
|
||||
assert q.as_dict() == {"a": 1, "b": 1}
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"a": 1, "b": 1, "c": 1}
|
||||
|
||||
def test_include_exclude(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"a": 1, "b": 1}
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE)
|
||||
assert q.as_dict() == {"a": 1}
|
||||
|
||||
def test_exclude_exclude(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE)
|
||||
assert q.as_dict() == {"a": 0, "b": 0}
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE)
|
||||
assert q.as_dict() == {"a": 0, "b": 0, "c": 0}
|
||||
|
||||
def test_exclude_include(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE)
|
||||
assert q.as_dict() == {"a": 0, "b": 0}
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"c": 1}
|
||||
|
||||
def test_always_include(self):
|
||||
q = QueryFieldList(always_include=["x", "y"])
|
||||
q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"x": 1, "y": 1, "c": 1}
|
||||
|
||||
def test_reset(self):
|
||||
q = QueryFieldList(always_include=["x", "y"])
|
||||
q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"x": 1, "y": 1, "c": 1}
|
||||
q.reset()
|
||||
assert not q
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"x": 1, "y": 1, "b": 1, "c": 1}
|
||||
|
||||
def test_using_a_slice(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=["a"], value={"$slice": 5})
|
||||
assert q.as_dict() == {"a": {"$slice": 5}}
|
||||
|
||||
|
||||
class TestOnlyExcludeAll(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
self.Person = Person
|
||||
|
||||
def test_mixing_only_exclude(self):
|
||||
class MyDoc(Document):
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
c = StringField()
|
||||
d = StringField()
|
||||
e = StringField()
|
||||
f = StringField()
|
||||
|
||||
include = ["a", "b", "c", "d", "e"]
|
||||
exclude = ["d", "e"]
|
||||
only = ["b", "c"]
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1, "d": 1, "e": 1}
|
||||
qs = qs.only(*only)
|
||||
assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1}
|
||||
qs = qs.exclude(*exclude)
|
||||
assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1}
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1}
|
||||
qs = qs.only(*only)
|
||||
assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1}
|
||||
|
||||
qs = MyDoc.objects.exclude(*exclude)
|
||||
qs = qs.fields(**{i: 1 for i in include})
|
||||
assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1}
|
||||
qs = qs.only(*only)
|
||||
assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1}
|
||||
|
||||
def test_slicing(self):
|
||||
class MyDoc(Document):
|
||||
a = ListField()
|
||||
b = ListField()
|
||||
c = ListField()
|
||||
d = ListField()
|
||||
e = ListField()
|
||||
f = ListField()
|
||||
|
||||
include = ["a", "b", "c", "d", "e"]
|
||||
exclude = ["d", "e"]
|
||||
only = ["b", "c"]
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
qs = qs.only(*only)
|
||||
qs = qs.fields(slice__b=5)
|
||||
assert qs._loaded_fields.as_dict() == {"b": {"$slice": 5}, "c": 1}
|
||||
|
||||
qs = qs.fields(slice__c=[5, 1])
|
||||
assert qs._loaded_fields.as_dict() == {
|
||||
"b": {"$slice": 5},
|
||||
"c": {"$slice": [5, 1]},
|
||||
}
|
||||
|
||||
qs = qs.exclude("c")
|
||||
assert qs._loaded_fields.as_dict() == {"b": {"$slice": 5}}
|
||||
|
||||
def test_mix_slice_with_other_fields(self):
|
||||
class MyDoc(Document):
|
||||
a = ListField()
|
||||
b = ListField()
|
||||
c = ListField()
|
||||
|
||||
qs = MyDoc.objects.fields(a=1, b=0, slice__c=2)
|
||||
assert qs._loaded_fields.as_dict() == {"c": {"$slice": 2}, "a": 1}
|
||||
|
||||
def test_only(self):
|
||||
"""Ensure that QuerySet.only only returns the requested fields.
|
||||
"""
|
||||
person = self.Person(name="test", age=25)
|
||||
person.save()
|
||||
|
||||
obj = self.Person.objects.only("name").get()
|
||||
assert obj.name == person.name
|
||||
assert obj.age is None
|
||||
|
||||
obj = self.Person.objects.only("age").get()
|
||||
assert obj.name is None
|
||||
assert obj.age == person.age
|
||||
|
||||
obj = self.Person.objects.only("name", "age").get()
|
||||
assert obj.name == person.name
|
||||
assert obj.age == person.age
|
||||
|
||||
obj = self.Person.objects.only(*("id", "name")).get()
|
||||
assert obj.name == person.name
|
||||
assert obj.age is None
|
||||
|
||||
# Check polymorphism still works
|
||||
class Employee(self.Person):
|
||||
salary = IntField(db_field="wage")
|
||||
|
||||
employee = Employee(name="test employee", age=40, salary=30000)
|
||||
employee.save()
|
||||
|
||||
obj = self.Person.objects(id=employee.id).only("age").get()
|
||||
assert isinstance(obj, Employee)
|
||||
|
||||
# Check field names are looked up properly
|
||||
obj = Employee.objects(id=employee.id).only("salary").get()
|
||||
assert obj.salary == employee.salary
|
||||
assert obj.name is None
|
||||
|
||||
def test_only_with_subfields(self):
|
||||
class User(EmbeddedDocument):
|
||||
name = StringField()
|
||||
email = StringField()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
title = StringField()
|
||||
text = StringField()
|
||||
|
||||
class VariousData(EmbeddedDocument):
|
||||
some = BooleanField()
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = EmbeddedDocumentField(User)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
various = MapField(field=EmbeddedDocumentField(VariousData))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(
|
||||
content="Had a good coffee today...",
|
||||
various={"test_dynamic": {"some": True}},
|
||||
)
|
||||
post.author = User(name="Test User")
|
||||
post.comments = [
|
||||
Comment(title="I aggree", text="Great post!"),
|
||||
Comment(title="Coffee", text="I hate coffee"),
|
||||
]
|
||||
post.save()
|
||||
|
||||
obj = BlogPost.objects.only("author.name").get()
|
||||
assert obj.content is None
|
||||
assert obj.author.email is None
|
||||
assert obj.author.name == "Test User"
|
||||
assert obj.comments == []
|
||||
|
||||
obj = BlogPost.objects.only("various.test_dynamic.some").get()
|
||||
assert obj.various["test_dynamic"].some is True
|
||||
|
||||
obj = BlogPost.objects.only("content", "comments.title").get()
|
||||
assert obj.content == "Had a good coffee today..."
|
||||
assert obj.author is None
|
||||
assert obj.comments[0].title == "I aggree"
|
||||
assert obj.comments[1].title == "Coffee"
|
||||
assert obj.comments[0].text is None
|
||||
assert obj.comments[1].text is None
|
||||
|
||||
obj = BlogPost.objects.only("comments").get()
|
||||
assert obj.content is None
|
||||
assert obj.author is None
|
||||
assert obj.comments[0].title == "I aggree"
|
||||
assert obj.comments[1].title == "Coffee"
|
||||
assert obj.comments[0].text == "Great post!"
|
||||
assert obj.comments[1].text == "I hate coffee"
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_exclude(self):
|
||||
class User(EmbeddedDocument):
|
||||
name = StringField()
|
||||
email = StringField()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
title = StringField()
|
||||
text = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = EmbeddedDocumentField(User)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(content="Had a good coffee today...")
|
||||
post.author = User(name="Test User")
|
||||
post.comments = [
|
||||
Comment(title="I aggree", text="Great post!"),
|
||||
Comment(title="Coffee", text="I hate coffee"),
|
||||
]
|
||||
post.save()
|
||||
|
||||
obj = BlogPost.objects.exclude("author", "comments.text").get()
|
||||
assert obj.author is None
|
||||
assert obj.content == "Had a good coffee today..."
|
||||
assert obj.comments[0].title == "I aggree"
|
||||
assert obj.comments[0].text is None
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_exclude_only_combining(self):
|
||||
class Attachment(EmbeddedDocument):
|
||||
name = StringField()
|
||||
content = StringField()
|
||||
|
||||
class Email(Document):
|
||||
sender = StringField()
|
||||
to = StringField()
|
||||
subject = StringField()
|
||||
body = StringField()
|
||||
content_type = StringField()
|
||||
attachments = ListField(EmbeddedDocumentField(Attachment))
|
||||
|
||||
Email.drop_collection()
|
||||
email = Email(
|
||||
sender="me",
|
||||
to="you",
|
||||
subject="From Russia with Love",
|
||||
body="Hello!",
|
||||
content_type="text/plain",
|
||||
)
|
||||
email.attachments = [
|
||||
Attachment(name="file1.doc", content="ABC"),
|
||||
Attachment(name="file2.doc", content="XYZ"),
|
||||
]
|
||||
email.save()
|
||||
|
||||
obj = Email.objects.exclude("content_type").exclude("body").get()
|
||||
assert obj.sender == "me"
|
||||
assert obj.to == "you"
|
||||
assert obj.subject == "From Russia with Love"
|
||||
assert obj.body is None
|
||||
assert obj.content_type is None
|
||||
|
||||
obj = Email.objects.only("sender", "to").exclude("body", "sender").get()
|
||||
assert obj.sender is None
|
||||
assert obj.to == "you"
|
||||
assert obj.subject is None
|
||||
assert obj.body is None
|
||||
assert obj.content_type is None
|
||||
|
||||
obj = (
|
||||
Email.objects.exclude("attachments.content")
|
||||
.exclude("body")
|
||||
.only("to", "attachments.name")
|
||||
.get()
|
||||
)
|
||||
assert obj.attachments[0].name == "file1.doc"
|
||||
assert obj.attachments[0].content is None
|
||||
assert obj.sender is None
|
||||
assert obj.to == "you"
|
||||
assert obj.subject is None
|
||||
assert obj.body is None
|
||||
assert obj.content_type is None
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
def test_all_fields(self):
|
||||
class Email(Document):
|
||||
sender = StringField()
|
||||
to = StringField()
|
||||
subject = StringField()
|
||||
body = StringField()
|
||||
content_type = StringField()
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
email = Email(
|
||||
sender="me",
|
||||
to="you",
|
||||
subject="From Russia with Love",
|
||||
body="Hello!",
|
||||
content_type="text/plain",
|
||||
)
|
||||
email.save()
|
||||
|
||||
obj = (
|
||||
Email.objects.exclude("content_type", "body")
|
||||
.only("to", "body")
|
||||
.all_fields()
|
||||
.get()
|
||||
)
|
||||
assert obj.sender == "me"
|
||||
assert obj.to == "you"
|
||||
assert obj.subject == "From Russia with Love"
|
||||
assert obj.body == "Hello!"
|
||||
assert obj.content_type == "text/plain"
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
def test_slicing_fields(self):
|
||||
"""Ensure that query slicing an array works.
|
||||
"""
|
||||
|
||||
class Numbers(Document):
|
||||
n = ListField(IntField())
|
||||
|
||||
Numbers.drop_collection()
|
||||
|
||||
numbers = Numbers(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
|
||||
numbers.save()
|
||||
|
||||
# first three
|
||||
numbers = Numbers.objects.fields(slice__n=3).get()
|
||||
assert numbers.n == [0, 1, 2]
|
||||
|
||||
# last three
|
||||
numbers = Numbers.objects.fields(slice__n=-3).get()
|
||||
assert numbers.n == [-3, -2, -1]
|
||||
|
||||
# skip 2, limit 3
|
||||
numbers = Numbers.objects.fields(slice__n=[2, 3]).get()
|
||||
assert numbers.n == [2, 3, 4]
|
||||
|
||||
# skip to fifth from last, limit 4
|
||||
numbers = Numbers.objects.fields(slice__n=[-5, 4]).get()
|
||||
assert numbers.n == [-5, -4, -3, -2]
|
||||
|
||||
# skip to fifth from last, limit 10
|
||||
numbers = Numbers.objects.fields(slice__n=[-5, 10]).get()
|
||||
assert numbers.n == [-5, -4, -3, -2, -1]
|
||||
|
||||
# skip to fifth from last, limit 10 dict method
|
||||
numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get()
|
||||
assert numbers.n == [-5, -4, -3, -2, -1]
|
||||
|
||||
def test_slicing_nested_fields(self):
|
||||
"""Ensure that query slicing an embedded array works.
|
||||
"""
|
||||
|
||||
class EmbeddedNumber(EmbeddedDocument):
|
||||
n = ListField(IntField())
|
||||
|
||||
class Numbers(Document):
|
||||
embedded = EmbeddedDocumentField(EmbeddedNumber)
|
||||
|
||||
Numbers.drop_collection()
|
||||
|
||||
numbers = Numbers()
|
||||
numbers.embedded = EmbeddedNumber(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
|
||||
numbers.save()
|
||||
|
||||
# first three
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=3).get()
|
||||
assert numbers.embedded.n == [0, 1, 2]
|
||||
|
||||
# last three
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=-3).get()
|
||||
assert numbers.embedded.n == [-3, -2, -1]
|
||||
|
||||
# skip 2, limit 3
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get()
|
||||
assert numbers.embedded.n == [2, 3, 4]
|
||||
|
||||
# skip to fifth from last, limit 4
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get()
|
||||
assert numbers.embedded.n == [-5, -4, -3, -2]
|
||||
|
||||
# skip to fifth from last, limit 10
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get()
|
||||
assert numbers.embedded.n == [-5, -4, -3, -2, -1]
|
||||
|
||||
# skip to fifth from last, limit 10 dict method
|
||||
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
|
||||
assert numbers.embedded.n == [-5, -4, -3, -2, -1]
|
||||
|
||||
def test_exclude_from_subclasses_docs(self):
|
||||
class Base(Document):
|
||||
username = StringField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Anon(Base):
|
||||
anon = BooleanField()
|
||||
|
||||
class User(Base):
|
||||
password = StringField()
|
||||
wibble = StringField()
|
||||
|
||||
Base.drop_collection()
|
||||
User(username="mongodb", password="secret").save()
|
||||
|
||||
user = Base.objects().exclude("password", "wibble").first()
|
||||
assert user.password is None
|
||||
|
||||
with pytest.raises(LookUpError):
|
||||
Base.objects.exclude("made_up")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -6,13 +6,10 @@ from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
__all__ = ("GeoQueriesTest",)
|
||||
|
||||
|
||||
class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
class TestGeoQueries(MongoDBTestCase):
|
||||
def _create_event_data(self, point_field_class=GeoPointField):
|
||||
"""Create some sample data re-used in many of the tests below."""
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
date = DateTimeField()
|
||||
@ -28,15 +25,18 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
event1 = Event.objects.create(
|
||||
title="Coltrane Motion @ Double Door",
|
||||
date=datetime.datetime.now() - datetime.timedelta(days=1),
|
||||
location=[-87.677137, 41.909889])
|
||||
location=[-87.677137, 41.909889],
|
||||
)
|
||||
event2 = Event.objects.create(
|
||||
title="Coltrane Motion @ Bottom of the Hill",
|
||||
date=datetime.datetime.now() - datetime.timedelta(days=10),
|
||||
location=[-122.4194155, 37.7749295])
|
||||
location=[-122.4194155, 37.7749295],
|
||||
)
|
||||
event3 = Event.objects.create(
|
||||
title="Coltrane Motion @ Empty Bottle",
|
||||
date=datetime.datetime.now(),
|
||||
location=[-87.686638, 41.900474])
|
||||
location=[-87.686638, 41.900474],
|
||||
)
|
||||
|
||||
return event1, event2, event3
|
||||
|
||||
@ -48,14 +48,14 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
# note that "near" will show the san francisco event, too,
|
||||
# although it sorts to last.
|
||||
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event1, event3, event2])
|
||||
assert events.count() == 3
|
||||
assert list(events) == [event1, event3, event2]
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event3, event1, event2])
|
||||
assert events.count() == 3
|
||||
assert list(events) == [event3, event1, event2]
|
||||
|
||||
def test_near_and_max_distance(self):
|
||||
"""Ensure the "max_distance" operator works alongside the "near"
|
||||
@ -65,10 +65,9 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = self.Event.objects(location__near=point,
|
||||
location__max_distance=10)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
events = self.Event.objects(location__near=point, location__max_distance=10)
|
||||
assert events.count() == 1
|
||||
assert events[0] == event2
|
||||
|
||||
def test_near_and_min_distance(self):
|
||||
"""Ensure the "min_distance" operator works alongside the "near"
|
||||
@ -78,9 +77,8 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# find events at least 10 degrees away of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = self.Event.objects(location__near=point,
|
||||
location__min_distance=10)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = self.Event.objects(location__near=point, location__min_distance=10)
|
||||
assert events.count() == 2
|
||||
|
||||
def test_within_distance(self):
|
||||
"""Make sure the "within_distance" operator works."""
|
||||
@ -88,34 +86,30 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# find events within 5 degrees of pitchfork office, chicago
|
||||
point_and_distance = [[-87.67892, 41.9120459], 5]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
assert events.count() == 2
|
||||
events = list(events)
|
||||
self.assertNotIn(event2, events)
|
||||
self.assertIn(event1, events)
|
||||
self.assertIn(event3, events)
|
||||
assert event2 not in events
|
||||
assert event1 in events
|
||||
assert event3 in events
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point_and_distance = [[-122.415579, 37.7566023], 10]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
assert events.count() == 1
|
||||
assert events[0] == event2
|
||||
|
||||
# find events within 1 degree of greenpoint, broolyn, nyc, ny
|
||||
point_and_distance = [[-73.9509714, 40.7237134], 1]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 0)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
assert events.count() == 0
|
||||
|
||||
# ensure ordering is respected by "within_distance"
|
||||
point_and_distance = [[-87.67892, 41.9120459], 10]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 2)
|
||||
self.assertEqual(events[0], event3)
|
||||
assert events.count() == 2
|
||||
assert events[0] == event3
|
||||
|
||||
def test_within_box(self):
|
||||
"""Ensure the "within_box" operator works."""
|
||||
@ -124,8 +118,8 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
# check that within_box works
|
||||
box = [(-125.0, 35.0), (-100.0, 40.0)]
|
||||
events = self.Event.objects(location__within_box=box)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event2.id)
|
||||
assert events.count() == 1
|
||||
assert events[0].id == event2.id
|
||||
|
||||
def test_within_polygon(self):
|
||||
"""Ensure the "within_polygon" operator works."""
|
||||
@ -139,87 +133,78 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
(-87.656164, 41.898061),
|
||||
]
|
||||
events = self.Event.objects(location__within_polygon=polygon)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event1.id)
|
||||
assert events.count() == 1
|
||||
assert events[0].id == event1.id
|
||||
|
||||
polygon2 = [
|
||||
(-1.742249, 54.033586),
|
||||
(-1.225891, 52.792797),
|
||||
(-4.40094, 53.389881)
|
||||
(-4.40094, 53.389881),
|
||||
]
|
||||
events = self.Event.objects(location__within_polygon=polygon2)
|
||||
self.assertEqual(events.count(), 0)
|
||||
assert events.count() == 0
|
||||
|
||||
def test_2dsphere_near(self):
|
||||
"""Make sure the "near" operator works with a PointField, which
|
||||
corresponds to a 2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# find all events "near" pitchfork office, chicago.
|
||||
# note that "near" will show the san francisco event, too,
|
||||
# although it sorts to last.
|
||||
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event1, event3, event2])
|
||||
assert events.count() == 3
|
||||
assert list(events) == [event1, event3, event2]
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event3, event1, event2])
|
||||
assert events.count() == 3
|
||||
assert list(events) == [event3, event1, event2]
|
||||
|
||||
def test_2dsphere_near_and_max_distance(self):
|
||||
"""Ensure the "max_distance" operator works alongside the "near"
|
||||
operator with a 2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# find events within 10km of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = self.Event.objects(location__near=point,
|
||||
location__max_distance=10000)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
events = self.Event.objects(location__near=point, location__max_distance=10000)
|
||||
assert events.count() == 1
|
||||
assert events[0] == event2
|
||||
|
||||
# find events within 1km of greenpoint, broolyn, nyc, ny
|
||||
events = self.Event.objects(location__near=[-73.9509714, 40.7237134],
|
||||
location__max_distance=1000)
|
||||
self.assertEqual(events.count(), 0)
|
||||
events = self.Event.objects(
|
||||
location__near=[-73.9509714, 40.7237134], location__max_distance=1000
|
||||
)
|
||||
assert events.count() == 0
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = self.Event.objects(
|
||||
location__near=[-87.67892, 41.9120459],
|
||||
location__max_distance=10000
|
||||
location__near=[-87.67892, 41.9120459], location__max_distance=10000
|
||||
).order_by("-date")
|
||||
self.assertEqual(events.count(), 2)
|
||||
self.assertEqual(events[0], event3)
|
||||
assert events.count() == 2
|
||||
assert events[0] == event3
|
||||
|
||||
def test_2dsphere_geo_within_box(self):
|
||||
"""Ensure the "geo_within_box" operator works with a 2dsphere
|
||||
index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# check that within_box works
|
||||
box = [(-125.0, 35.0), (-100.0, 40.0)]
|
||||
events = self.Event.objects(location__geo_within_box=box)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event2.id)
|
||||
assert events.count() == 1
|
||||
assert events[0].id == event2.id
|
||||
|
||||
def test_2dsphere_geo_within_polygon(self):
|
||||
"""Ensure the "geo_within_polygon" operator works with a
|
||||
2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
polygon = [
|
||||
(-87.694445, 41.912114),
|
||||
@ -229,64 +214,59 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
(-87.656164, 41.898061),
|
||||
]
|
||||
events = self.Event.objects(location__geo_within_polygon=polygon)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event1.id)
|
||||
assert events.count() == 1
|
||||
assert events[0].id == event1.id
|
||||
|
||||
polygon2 = [
|
||||
(-1.742249, 54.033586),
|
||||
(-1.225891, 52.792797),
|
||||
(-4.40094, 53.389881)
|
||||
(-4.40094, 53.389881),
|
||||
]
|
||||
events = self.Event.objects(location__geo_within_polygon=polygon2)
|
||||
self.assertEqual(events.count(), 0)
|
||||
assert events.count() == 0
|
||||
|
||||
def test_2dsphere_near_and_min_max_distance(self):
|
||||
"""Ensure "min_distace" and "max_distance" operators work well
|
||||
together with the "near" operator in a 2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# ensure min_distance and max_distance combine well
|
||||
events = self.Event.objects(
|
||||
location__near=[-87.67892, 41.9120459],
|
||||
location__min_distance=1000,
|
||||
location__max_distance=10000
|
||||
location__max_distance=10000,
|
||||
).order_by("-date")
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event3)
|
||||
assert events.count() == 1
|
||||
assert events[0] == event3
|
||||
|
||||
# ensure ordering is respected by "near" with "min_distance"
|
||||
events = self.Event.objects(
|
||||
location__near=[-87.67892, 41.9120459],
|
||||
location__min_distance=10000
|
||||
location__near=[-87.67892, 41.9120459], location__min_distance=10000
|
||||
).order_by("-date")
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
assert events.count() == 1
|
||||
assert events[0] == event2
|
||||
|
||||
def test_2dsphere_geo_within_center(self):
|
||||
"""Make sure the "geo_within_center" operator works with a
|
||||
2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# find events within 5 degrees of pitchfork office, chicago
|
||||
point_and_distance = [[-87.67892, 41.9120459], 2]
|
||||
events = self.Event.objects(
|
||||
location__geo_within_center=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = self.Event.objects(location__geo_within_center=point_and_distance)
|
||||
assert events.count() == 2
|
||||
events = list(events)
|
||||
self.assertNotIn(event2, events)
|
||||
self.assertIn(event1, events)
|
||||
self.assertIn(event3, events)
|
||||
assert event2 not in events
|
||||
assert event1 in events
|
||||
assert event3 in events
|
||||
|
||||
def _test_embedded(self, point_field_class):
|
||||
"""Helper test method ensuring given point field class works
|
||||
well in an embedded document.
|
||||
"""
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
location = point_field_class()
|
||||
name = StringField()
|
||||
@ -300,19 +280,18 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889])
|
||||
venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295])
|
||||
|
||||
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||
venue=venue1).save()
|
||||
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||
venue=venue2).save()
|
||||
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||
venue=venue1).save()
|
||||
event1 = Event(title="Coltrane Motion @ Double Door", venue=venue1).save()
|
||||
event2 = Event(
|
||||
title="Coltrane Motion @ Bottom of the Hill", venue=venue2
|
||||
).save()
|
||||
event3 = Event(title="Coltrane Motion @ Empty Bottle", venue=venue1).save()
|
||||
|
||||
# find all events "near" pitchfork office, chicago.
|
||||
# note that "near" will show the san francisco event, too,
|
||||
# although it sorts to last.
|
||||
events = Event.objects(venue__location__near=[-87.67892, 41.9120459])
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event1, event3, event2])
|
||||
assert events.count() == 3
|
||||
assert list(events) == [event1, event3, event2]
|
||||
|
||||
def test_geo_spatial_embedded(self):
|
||||
"""Make sure GeoPointField works properly in an embedded document."""
|
||||
@ -324,6 +303,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
def test_spherical_geospatial_operators(self):
|
||||
"""Ensure that spherical geospatial queries are working."""
|
||||
|
||||
class Point(Document):
|
||||
location = GeoPointField()
|
||||
|
||||
@ -339,58 +319,55 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
# Finds both points because they are within 60 km of the reference
|
||||
# point equidistant between them.
|
||||
points = Point.objects(location__near_sphere=[-122, 37.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
assert points.count() == 2
|
||||
|
||||
# Same behavior for _within_spherical_distance
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[
|
||||
[-122, 37.5],
|
||||
60 / earth_radius
|
||||
]
|
||||
location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius]
|
||||
)
|
||||
self.assertEqual(points.count(), 2)
|
||||
assert points.count() == 2
|
||||
|
||||
points = Point.objects(location__near_sphere=[-122, 37.5],
|
||||
location__max_distance=60 / earth_radius)
|
||||
self.assertEqual(points.count(), 2)
|
||||
points = Point.objects(
|
||||
location__near_sphere=[-122, 37.5], location__max_distance=60 / earth_radius
|
||||
)
|
||||
assert points.count() == 2
|
||||
|
||||
# Test query works with max_distance, being farer from one point
|
||||
points = Point.objects(location__near_sphere=[-122, 37.8],
|
||||
location__max_distance=60 / earth_radius)
|
||||
points = Point.objects(
|
||||
location__near_sphere=[-122, 37.8], location__max_distance=60 / earth_radius
|
||||
)
|
||||
close_point = points.first()
|
||||
self.assertEqual(points.count(), 1)
|
||||
assert points.count() == 1
|
||||
|
||||
# Test query works with min_distance, being farer from one point
|
||||
points = Point.objects(location__near_sphere=[-122, 37.8],
|
||||
location__min_distance=60 / earth_radius)
|
||||
self.assertEqual(points.count(), 1)
|
||||
points = Point.objects(
|
||||
location__near_sphere=[-122, 37.8], location__min_distance=60 / earth_radius
|
||||
)
|
||||
assert points.count() == 1
|
||||
far_point = points.first()
|
||||
self.assertNotEqual(close_point, far_point)
|
||||
assert close_point != far_point
|
||||
|
||||
# Finds both points, but orders the north point first because it's
|
||||
# closer to the reference point to the north.
|
||||
points = Point.objects(location__near_sphere=[-122, 38.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
self.assertEqual(points[0].id, north_point.id)
|
||||
self.assertEqual(points[1].id, south_point.id)
|
||||
assert points.count() == 2
|
||||
assert points[0].id == north_point.id
|
||||
assert points[1].id == south_point.id
|
||||
|
||||
# Finds both points, but orders the south point first because it's
|
||||
# closer to the reference point to the south.
|
||||
points = Point.objects(location__near_sphere=[-122, 36.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
self.assertEqual(points[0].id, south_point.id)
|
||||
self.assertEqual(points[1].id, north_point.id)
|
||||
assert points.count() == 2
|
||||
assert points[0].id == south_point.id
|
||||
assert points[1].id == north_point.id
|
||||
|
||||
# Finds only one point because only the first point is within 60km of
|
||||
# the reference point to the south.
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[
|
||||
[-122, 36.5],
|
||||
60 / earth_radius
|
||||
]
|
||||
location__within_spherical_distance=[[-122, 36.5], 60 / earth_radius]
|
||||
)
|
||||
self.assertEqual(points.count(), 1)
|
||||
self.assertEqual(points[0].id, south_point.id)
|
||||
assert points.count() == 1
|
||||
assert points[0].id == south_point.id
|
||||
|
||||
def test_linestring(self):
|
||||
class Road(Document):
|
||||
@ -404,48 +381,51 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
# near
|
||||
point = {"type": "Point", "coordinates": [40, 5]}
|
||||
roads = Road.objects.filter(line__near=point["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__near=point).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__near={"$geometry": point}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
# Within
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_within=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_within={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
# Intersects
|
||||
line = {"type": "LineString",
|
||||
"coordinates": [[40, 5], [40, 6]]}
|
||||
line = {"type": "LineString", "coordinates": [[40, 5], [40, 6]]}
|
||||
roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects=line).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
def test_polygon(self):
|
||||
class Road(Document):
|
||||
@ -459,66 +439,66 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
# near
|
||||
point = {"type": "Point", "coordinates": [40, 5]}
|
||||
roads = Road.objects.filter(poly__near=point["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__near=point).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__near={"$geometry": point}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
# Within
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_within=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
# Intersects
|
||||
line = {"type": "LineString",
|
||||
"coordinates": [[40, 5], [41, 6]]}
|
||||
line = {"type": "LineString", "coordinates": [[40, 5], [41, 6]]}
|
||||
roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects=line).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
def test_aspymongo_with_only(self):
|
||||
"""Ensure as_pymongo works with only"""
|
||||
|
||||
class Place(Document):
|
||||
location = PointField()
|
||||
|
||||
Place.drop_collection()
|
||||
p = Place(location=[24.946861267089844, 60.16311983618494])
|
||||
p.save()
|
||||
qs = Place.objects().only('location')
|
||||
self.assertDictEqual(
|
||||
qs.as_pymongo()[0]['location'],
|
||||
{u'type': u'Point',
|
||||
u'coordinates': [
|
||||
24.946861267089844,
|
||||
60.16311983618494]
|
||||
}
|
||||
)
|
||||
qs = Place.objects().only("location")
|
||||
assert qs.as_pymongo()[0]["location"] == {
|
||||
u"type": u"Point",
|
||||
u"coordinates": [24.946861267089844, 60.16311983618494],
|
||||
}
|
||||
|
||||
def test_2dsphere_point_sets_correctly(self):
|
||||
class Location(Document):
|
||||
@ -528,11 +508,11 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
Location(loc=[1, 2]).save()
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]})
|
||||
assert loc["loc"] == {"type": "Point", "coordinates": [1, 2]}
|
||||
|
||||
Location.objects.update(set__loc=[2, 1])
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]})
|
||||
assert loc["loc"] == {"type": "Point", "coordinates": [2, 1]}
|
||||
|
||||
def test_2dsphere_linestring_sets_correctly(self):
|
||||
class Location(Document):
|
||||
@ -542,11 +522,11 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
Location(line=[[1, 2], [2, 2]]).save()
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]})
|
||||
assert loc["line"] == {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}
|
||||
|
||||
Location.objects.update(set__line=[[2, 1], [1, 2]])
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]})
|
||||
assert loc["line"] == {"type": "LineString", "coordinates": [[2, 1], [1, 2]]}
|
||||
|
||||
def test_geojson_PolygonField(self):
|
||||
class Location(Document):
|
||||
@ -556,12 +536,18 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
Location(poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save()
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
||||
assert loc["poly"] == {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]],
|
||||
}
|
||||
|
||||
Location.objects.update(set__poly=[[[40, 4], [40, 6], [41, 6], [40, 4]]])
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]})
|
||||
assert loc["poly"] == {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]],
|
||||
}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user