Compare commits
No commits in common. "master" and "v0.19.1" have entirely different histories.
143
.github/workflows/github-actions.yml
vendored
143
.github/workflows/github-actions.yml
vendored
@ -1,143 +0,0 @@
|
|||||||
name: MongoengineCI
|
|
||||||
on:
|
|
||||||
# All PR
|
|
||||||
pull_request:
|
|
||||||
# master branch merge
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
# release tags
|
|
||||||
create:
|
|
||||||
tags:
|
|
||||||
- 'v[0-9]+\.[0-9]+\.[0-9]+*'
|
|
||||||
env:
|
|
||||||
MONGODB_3_6: 3.6.14
|
|
||||||
MONGODB_4_0: 4.0.23
|
|
||||||
MONGODB_4_2: 4.2
|
|
||||||
MONGODB_4_4: 4.4
|
|
||||||
|
|
||||||
PYMONGO_3_4: 3.4
|
|
||||||
PYMONGO_3_6: 3.6
|
|
||||||
PYMONGO_3_9: 3.9
|
|
||||||
PYMONGO_3_11: 3.11
|
|
||||||
|
|
||||||
MAIN_PYTHON_VERSION: 3.7
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
linting:
|
|
||||||
# Run pre-commit (https://pre-commit.com/)
|
|
||||||
# which runs pre-configured linter & autoformatter
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python 3.7
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
- run: bash .github/workflows/install_ci_python_dep.sh
|
|
||||||
- run: pre-commit run -a
|
|
||||||
|
|
||||||
test:
|
|
||||||
# Test suite run against recent python versions
|
|
||||||
# and against a few combination of MongoDB and pymongo
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
python-version: [3.6, 3.7, 3.8, 3.9, "3.10", pypy3]
|
|
||||||
MONGODB: [$MONGODB_4_0]
|
|
||||||
PYMONGO: [$PYMONGO_3_11]
|
|
||||||
include:
|
|
||||||
- python-version: 3.7
|
|
||||||
MONGODB: $MONGODB_3_6
|
|
||||||
PYMONGO: $PYMONGO_3_9
|
|
||||||
- python-version: 3.7
|
|
||||||
MONGODB: $MONGODB_4_2
|
|
||||||
PYMONGO: $PYMONGO_3_6
|
|
||||||
- python-version: 3.7
|
|
||||||
MONGODB: $MONGODB_4_4
|
|
||||||
PYMONGO: $PYMONGO_3_11
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: install mongo and ci dependencies
|
|
||||||
run: |
|
|
||||||
bash .github/workflows/install_mongo.sh ${{ matrix.MONGODB }}
|
|
||||||
bash .github/workflows/install_ci_python_dep.sh
|
|
||||||
bash .github/workflows/start_mongo.sh ${{ matrix.MONGODB }}
|
|
||||||
- name: tox dry-run (to pre-install venv)
|
|
||||||
run: tox -e $(echo py${{ matrix.python-version }}-mg${{ matrix.PYMONGO }} | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder"
|
|
||||||
- name: Run test suite
|
|
||||||
run: tox -e $(echo py${{ matrix.python-version }}-mg${{ matrix.PYMONGO }} | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine"
|
|
||||||
- name: Send coverage to Coveralls
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
COVERALLS_SERVICE_NAME: github
|
|
||||||
if: ${{ matrix.python-version == env.MAIN_PYTHON_VERSION }}
|
|
||||||
run: coveralls
|
|
||||||
|
|
||||||
build_doc_dryrun:
|
|
||||||
# ensures that readthedocs can be built continuously
|
|
||||||
# to avoid that it breaks when new releases are being created
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
- name: install python dep
|
|
||||||
run: |
|
|
||||||
pip install -e .
|
|
||||||
pip install -r docs/requirements.txt
|
|
||||||
- name: build doc
|
|
||||||
run: |
|
|
||||||
cd docs
|
|
||||||
make html-readthedocs
|
|
||||||
|
|
||||||
build-n-publish-dummy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [linting, test, build_doc_dryrun]
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@master
|
|
||||||
- name: Set up Python 3.7
|
|
||||||
uses: actions/setup-python@v1
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
- name: build dummy wheel for test-pypi
|
|
||||||
run: |
|
|
||||||
pip install wheel
|
|
||||||
python setup.py egg_info -b ".dev`date '+%Y%m%d%H%M%S'`" build sdist bdist_wheel
|
|
||||||
# - name: publish test-pypi
|
|
||||||
# # Although working and recommended, test-pypi has a limit
|
|
||||||
# # in the size of projects so it's better to avoid publishing
|
|
||||||
# # until there is a way to garbage collect these dummy releases
|
|
||||||
# uses: pypa/gh-action-pypi-publish@master
|
|
||||||
# with:
|
|
||||||
# password: ${{ secrets.test_pypi_token }}
|
|
||||||
# repository_url: https://test.pypi.org/legacy/
|
|
||||||
|
|
||||||
build-n-publish:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [linting, test, build_doc_dryrun, build-n-publish-dummy]
|
|
||||||
if: github.event_name == 'create' && startsWith(github.ref, 'refs/tags/v')
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@master
|
|
||||||
- name: Set up Python 3.7
|
|
||||||
uses: actions/setup-python@v1
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
# todo separate build from publish
|
|
||||||
# https://stackoverflow.com/questions/59349905/which-properties-does-github-event-in-a-github-workflow-have
|
|
||||||
- name: build dummy wheel for test-pypi
|
|
||||||
run: |
|
|
||||||
pip install wheel
|
|
||||||
python setup.py sdist bdist_wheel
|
|
||||||
- name: publish pypi
|
|
||||||
uses: pypa/gh-action-pypi-publish@master
|
|
||||||
with:
|
|
||||||
password: ${{ secrets.pypi_token }}
|
|
5
.github/workflows/install_ci_python_dep.sh
vendored
5
.github/workflows/install_ci_python_dep.sh
vendored
@ -1,5 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
pip install --upgrade pip
|
|
||||||
pip install coveralls
|
|
||||||
pip install pre-commit
|
|
||||||
pip install tox
|
|
18
.github/workflows/install_mongo.sh
vendored
18
.github/workflows/install_mongo.sh
vendored
@ -1,18 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
MONGODB=$1
|
|
||||||
|
|
||||||
# Mongo > 4.0 follows different name convention for download links
|
|
||||||
mongo_build=mongodb-linux-x86_64-${MONGODB}
|
|
||||||
|
|
||||||
if [[ "$MONGODB" == *"4.2"* ]]; then
|
|
||||||
mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest
|
|
||||||
elif [[ "$MONGODB" == *"4.4"* ]]; then
|
|
||||||
mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest
|
|
||||||
fi
|
|
||||||
|
|
||||||
wget http://fastdl.mongodb.org/linux/$mongo_build.tgz
|
|
||||||
tar xzf $mongo_build.tgz
|
|
||||||
|
|
||||||
mongodb_dir=$(find ${PWD}/ -type d -name "mongodb-linux-x86_64*")
|
|
||||||
$mongodb_dir/bin/mongod --version
|
|
9
.github/workflows/start_mongo.sh
vendored
9
.github/workflows/start_mongo.sh
vendored
@ -1,9 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
MONGODB=$1
|
|
||||||
|
|
||||||
mongodb_dir=$(find ${PWD}/ -type d -name "mongodb-linux-x86_64*")
|
|
||||||
|
|
||||||
mkdir $mongodb_dir/data
|
|
||||||
$mongodb_dir/bin/mongod --dbpath $mongodb_dir/data --logpath $mongodb_dir/mongodb.log --fork
|
|
||||||
mongo --eval 'db.version();' # Make sure mongo is awake
|
|
11
.gitignore
vendored
11
.gitignore
vendored
@ -1,15 +1,8 @@
|
|||||||
|
.*
|
||||||
!.gitignore
|
!.gitignore
|
||||||
*~
|
*~
|
||||||
*.py[co]
|
*.py[co]
|
||||||
.*.sw[po]
|
.*.sw[po]
|
||||||
.cache/
|
|
||||||
.coverage
|
|
||||||
.coveragerc
|
|
||||||
.env
|
|
||||||
.idea/
|
|
||||||
.pytest_cache/
|
|
||||||
.tox/
|
|
||||||
.eggs/
|
|
||||||
*.egg
|
*.egg
|
||||||
docs/.build
|
docs/.build
|
||||||
docs/_build
|
docs/_build
|
||||||
@ -20,6 +13,8 @@ env/
|
|||||||
.settings
|
.settings
|
||||||
.project
|
.project
|
||||||
.pydevproject
|
.pydevproject
|
||||||
|
tests/test_bugfix.py
|
||||||
htmlcov/
|
htmlcov/
|
||||||
venv
|
venv
|
||||||
venv3
|
venv3
|
||||||
|
scratchpad
|
||||||
|
22
.landscape.yml
Normal file
22
.landscape.yml
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
pylint:
|
||||||
|
disable:
|
||||||
|
# We use this a lot (e.g. via document._meta)
|
||||||
|
- protected-access
|
||||||
|
|
||||||
|
options:
|
||||||
|
additional-builtins:
|
||||||
|
# add xrange and long as valid built-ins. In Python 3, xrange is
|
||||||
|
# translated into range and long is translated into int via 2to3 (see
|
||||||
|
# "use_2to3" in setup.py). This should be removed when we drop Python
|
||||||
|
# 2 support (which probably won't happen any time soon).
|
||||||
|
- xrange
|
||||||
|
- long
|
||||||
|
|
||||||
|
pyflakes:
|
||||||
|
disable:
|
||||||
|
# undefined variables are already covered by pylint (and exclude
|
||||||
|
# xrange & long)
|
||||||
|
- F821
|
||||||
|
|
||||||
|
ignore-paths:
|
||||||
|
- benchmark.py
|
@ -1,26 +0,0 @@
|
|||||||
fail_fast: false
|
|
||||||
repos:
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
||||||
rev: v4.0.1
|
|
||||||
hooks:
|
|
||||||
- id: check-merge-conflict
|
|
||||||
- id: debug-statements
|
|
||||||
- id: trailing-whitespace
|
|
||||||
- id: end-of-file-fixer
|
|
||||||
- repo: https://github.com/ambv/black
|
|
||||||
rev: 21.5b2
|
|
||||||
hooks:
|
|
||||||
- id: black
|
|
||||||
- repo: https://gitlab.com/pycqa/flake8
|
|
||||||
rev: 3.9.2
|
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
|
||||||
rev: v2.19.1
|
|
||||||
hooks:
|
|
||||||
- id: pyupgrade
|
|
||||||
args: [--py36-plus]
|
|
||||||
- repo: https://github.com/pycqa/isort
|
|
||||||
rev: 5.8.0
|
|
||||||
hooks:
|
|
||||||
- id: isort
|
|
@ -1,20 +0,0 @@
|
|||||||
# .readthedocs.yml
|
|
||||||
# Read the Docs configuration file
|
|
||||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
|
||||||
|
|
||||||
# Required
|
|
||||||
version: 2
|
|
||||||
|
|
||||||
# Build documentation in the docs/ directory with Sphinx
|
|
||||||
sphinx:
|
|
||||||
configuration: docs/conf.py
|
|
||||||
|
|
||||||
# Optionally set the version of Python and requirements required to build your docs
|
|
||||||
python:
|
|
||||||
version: 3.7
|
|
||||||
install:
|
|
||||||
- requirements: docs/requirements.txt
|
|
||||||
# docs/conf.py is importing mongoengine
|
|
||||||
# so mongoengine needs to be installed as well
|
|
||||||
- method: setuptools
|
|
||||||
path: .
|
|
114
.travis.yml
Normal file
114
.travis.yml
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
# For full coverage, we'd have to test all supported Python, MongoDB, and
|
||||||
|
# PyMongo combinations. However, that would result in an overly long build
|
||||||
|
# with a very large number of jobs, hence we only test a subset of all the
|
||||||
|
# combinations:
|
||||||
|
# * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup,
|
||||||
|
# tested against Python v2.7, v3.5, v3.6, v3.7, v3.8 and PyPy.
|
||||||
|
# * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo
|
||||||
|
# combination: MongoDB v3.4, PyMongo v3.4, Python v2.7.
|
||||||
|
# * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8.
|
||||||
|
#
|
||||||
|
# We should periodically check MongoDB Server versions supported by MongoDB
|
||||||
|
# Inc., add newly released versions to the test matrix, and remove versions
|
||||||
|
# which have reached their End of Life. See:
|
||||||
|
# 1. https://www.mongodb.com/support-policy.
|
||||||
|
# 2. https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility
|
||||||
|
#
|
||||||
|
# Reminder: Update README.rst if you change MongoDB versions we test.
|
||||||
|
|
||||||
|
|
||||||
|
language: python
|
||||||
|
python:
|
||||||
|
- 2.7
|
||||||
|
- 3.5
|
||||||
|
- 3.6
|
||||||
|
- 3.7
|
||||||
|
- 3.8
|
||||||
|
- pypy
|
||||||
|
- pypy3
|
||||||
|
|
||||||
|
dist: xenial
|
||||||
|
|
||||||
|
env:
|
||||||
|
global:
|
||||||
|
- MONGODB_3_4=3.4.17
|
||||||
|
- MONGODB_3_6=3.6.12
|
||||||
|
- PYMONGO_3_9=3.9
|
||||||
|
- PYMONGO_3_6=3.6
|
||||||
|
- PYMONGO_3_4=3.4
|
||||||
|
matrix:
|
||||||
|
- MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_9}
|
||||||
|
|
||||||
|
matrix:
|
||||||
|
|
||||||
|
# Finish the build as soon as one job fails
|
||||||
|
fast_finish: true
|
||||||
|
|
||||||
|
include:
|
||||||
|
- python: 2.7
|
||||||
|
env: MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_4}
|
||||||
|
- python: 3.7
|
||||||
|
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6}
|
||||||
|
- python: 3.7
|
||||||
|
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9}
|
||||||
|
|
||||||
|
install:
|
||||||
|
# Install Mongo
|
||||||
|
- wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz
|
||||||
|
- tar xzf mongodb-linux-x86_64-${MONGODB}.tgz
|
||||||
|
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version
|
||||||
|
# Install Python dependencies.
|
||||||
|
- pip install --upgrade pip
|
||||||
|
- pip install coveralls
|
||||||
|
- pip install flake8 flake8-import-order
|
||||||
|
- pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0
|
||||||
|
- pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
||||||
|
# tox dryrun to setup the tox venv (we run a mock test).
|
||||||
|
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder"
|
||||||
|
# Install black for Python v3.7 only.
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then pip install black; fi
|
||||||
|
|
||||||
|
before_script:
|
||||||
|
- mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data
|
||||||
|
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for Python 2.7 only
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then black --check .; else echo "black only runs on py37"; fi # Run black for Python 3.7 only
|
||||||
|
- mongo --eval 'db.version();' # Make sure mongo is awake
|
||||||
|
|
||||||
|
script:
|
||||||
|
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine"
|
||||||
|
|
||||||
|
# For now only submit coveralls for Python v2.7. Python v3.x currently shows
|
||||||
|
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
|
||||||
|
# code in a separate dir and runs tests on that.
|
||||||
|
after_success:
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; else echo "coveralls only sent for py27"; fi
|
||||||
|
|
||||||
|
notifications:
|
||||||
|
irc: irc.freenode.org#mongoengine
|
||||||
|
|
||||||
|
# Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z)
|
||||||
|
branches:
|
||||||
|
only:
|
||||||
|
- master
|
||||||
|
- /^v.*$/
|
||||||
|
|
||||||
|
# Whenever a new release is created via GitHub, publish it on PyPI.
|
||||||
|
deploy:
|
||||||
|
provider: pypi
|
||||||
|
user: the_drow
|
||||||
|
password:
|
||||||
|
secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek=
|
||||||
|
|
||||||
|
# Create a source distribution and a pure python wheel for faster installs.
|
||||||
|
distributions: "sdist bdist_wheel"
|
||||||
|
|
||||||
|
# Only deploy on tagged commits (aka GitHub releases) and only for the parent
|
||||||
|
# repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4.
|
||||||
|
# We run Travis against many different Python, PyMongo, and MongoDB versions
|
||||||
|
# and we don't want the deploy to occur multiple times).
|
||||||
|
on:
|
||||||
|
tags: true
|
||||||
|
repo: MongoEngine/mongoengine
|
||||||
|
condition: ($PYMONGO = ${PYMONGO_3_9}) && ($MONGODB = ${MONGODB_3_4})
|
||||||
|
python: 2.7
|
8
AUTHORS
8
AUTHORS
@ -255,11 +255,3 @@ that much better:
|
|||||||
* Filip Kucharczyk (https://github.com/Pacu2)
|
* Filip Kucharczyk (https://github.com/Pacu2)
|
||||||
* Eric Timmons (https://github.com/daewok)
|
* Eric Timmons (https://github.com/daewok)
|
||||||
* Matthew Simpson (https://github.com/mcsimps2)
|
* Matthew Simpson (https://github.com/mcsimps2)
|
||||||
* Leonardo Domingues (https://github.com/leodmgs)
|
|
||||||
* Agustin Barto (https://github.com/abarto)
|
|
||||||
* Stankiewicz Mateusz (https://github.com/mas15)
|
|
||||||
* Felix Schultheiß (https://github.com/felix-smashdocs)
|
|
||||||
* Jan Stein (https://github.com/janste63)
|
|
||||||
* Timothé Perez (https://github.com/AchilleAsh)
|
|
||||||
* oleksandr-l5 (https://github.com/oleksandr-l5)
|
|
||||||
* Ido Shraga (https://github.com/idoshr)
|
|
||||||
|
@ -20,47 +20,19 @@ post to the `user group <http://groups.google.com/group/mongoengine-users>`
|
|||||||
Supported Interpreters
|
Supported Interpreters
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
MongoEngine supports CPython 3.5 and newer as well as Pypy3.
|
MongoEngine supports CPython 2.7 and newer. Language
|
||||||
Language features not supported by all interpreters can not be used.
|
features not supported by all interpreters can not be used.
|
||||||
|
The codebase is written in python 2 so you must be using python 2
|
||||||
Python3 codebase
|
when developing new features. Compatibility of the library with Python 3
|
||||||
----------------------
|
relies on the 2to3 package that gets executed as part of the installation
|
||||||
|
build. You should ensure that your code is properly converted by
|
||||||
Since 0.20, the codebase is exclusively Python 3.
|
`2to3 <http://docs.python.org/library/2to3.html>`_.
|
||||||
|
|
||||||
Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs.
|
|
||||||
Travis runs the tests against the main Python 3.x versions.
|
|
||||||
|
|
||||||
|
|
||||||
Style Guide
|
Style Guide
|
||||||
-----------
|
-----------
|
||||||
|
|
||||||
MongoEngine's codebase is auto-formatted with `black <https://github.com/python/black>`_, imports are ordered with `isort <https://pycqa.github.io/isort/>`_
|
MongoEngine uses `black <https://github.com/python/black>`_ for code
|
||||||
and other tools like flake8 are also used. Those tools will run as part of the CI and will fail in case the code is not formatted properly.
|
formatting.
|
||||||
|
|
||||||
To install all development tools, simply run the following commands:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ python -m pip install -r requirements-dev.txt
|
|
||||||
|
|
||||||
|
|
||||||
You can install `pre-commit <https://pre-commit.com/>`_ into your git hooks,
|
|
||||||
to automatically check and fix any formatting issue before creating a
|
|
||||||
git commit.
|
|
||||||
|
|
||||||
To enable ``pre-commit`` simply run:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ pre-commit install
|
|
||||||
|
|
||||||
See the ``.pre-commit-config.yaml`` configuration file for more information
|
|
||||||
on how it works.
|
|
||||||
|
|
||||||
pre-commit will now run upon every commit and will reject anything that doesn't comply.
|
|
||||||
|
|
||||||
You can also run all the checks with ``pre-commit run -a``, this is what is used in the CI.
|
|
||||||
|
|
||||||
Testing
|
Testing
|
||||||
-------
|
-------
|
||||||
|
22
README.rst
22
README.rst
@ -12,8 +12,9 @@ MongoEngine
|
|||||||
.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master
|
.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master
|
||||||
:target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master
|
:target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master
|
||||||
|
|
||||||
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat
|
||||||
:target: https://github.com/ambv/black
|
:target: https://landscape.io/github/MongoEngine/mongoengine/master
|
||||||
|
:alt: Code Health
|
||||||
|
|
||||||
About
|
About
|
||||||
=====
|
=====
|
||||||
@ -25,15 +26,15 @@ an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_.
|
|||||||
|
|
||||||
Supported MongoDB Versions
|
Supported MongoDB Versions
|
||||||
==========================
|
==========================
|
||||||
MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions
|
MongoEngine is currently tested against MongoDB v3.4 and v3.6. Future versions
|
||||||
should be supported as well, but aren't actively tested at the moment. Make
|
should be supported as well, but aren't actively tested at the moment. Make
|
||||||
sure to open an issue or submit a pull request if you experience any problems
|
sure to open an issue or submit a pull request if you experience any problems
|
||||||
with MongoDB version > 4.0.
|
with MongoDB version > 3.6.
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
||||||
`pip <https://pip.pypa.io/>`_. You can then use ``python -m pip install -U mongoengine``.
|
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
|
||||||
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||||
and thus you can use ``easy_install -U mongoengine``. Another option is
|
and thus you can use ``easy_install -U mongoengine``. Another option is
|
||||||
`pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine``
|
`pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine``
|
||||||
@ -41,14 +42,13 @@ to both create the virtual environment and install the package. Otherwise, you c
|
|||||||
download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
|
download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
|
||||||
run ``python setup.py install``.
|
run ``python setup.py install``.
|
||||||
|
|
||||||
The support for Python2 was dropped with MongoEngine 0.20.0
|
|
||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
============
|
============
|
||||||
All of the dependencies can easily be installed via `python -m pip <https://pip.pypa.io/>`_.
|
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_.
|
||||||
At the very least, you'll need these two packages to use MongoEngine:
|
At the very least, you'll need these two packages to use MongoEngine:
|
||||||
|
|
||||||
- pymongo>=3.4
|
- pymongo>=3.4
|
||||||
|
- six>=1.10.0
|
||||||
|
|
||||||
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
||||||
|
|
||||||
@ -58,10 +58,6 @@ If you need to use an ``ImageField`` or ``ImageGridFsProxy``:
|
|||||||
|
|
||||||
- Pillow>=2.0.0
|
- Pillow>=2.0.0
|
||||||
|
|
||||||
If you need to use signals:
|
|
||||||
|
|
||||||
- blinker>=1.3
|
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
========
|
========
|
||||||
Some simple examples of what MongoEngine code looks like:
|
Some simple examples of what MongoEngine code looks like:
|
||||||
@ -129,7 +125,7 @@ installed in your environment and then:
|
|||||||
.. code-block:: shell
|
.. code-block:: shell
|
||||||
|
|
||||||
# Install tox
|
# Install tox
|
||||||
$ python -m pip install tox
|
$ pip install tox
|
||||||
# Run the test suites
|
# Run the test suites
|
||||||
$ tox
|
$ tox
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ def test_basic():
|
|||||||
|
|
||||||
print(
|
print(
|
||||||
"Doc setattr: %.3fus"
|
"Doc setattr: %.3fus"
|
||||||
% (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6) # noqa B010
|
% (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6)
|
||||||
)
|
)
|
||||||
|
|
||||||
print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6))
|
print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6))
|
||||||
|
@ -4,14 +4,12 @@ import timeit
|
|||||||
def main():
|
def main():
|
||||||
setup = """
|
setup = """
|
||||||
from pymongo import MongoClient
|
from pymongo import MongoClient
|
||||||
|
|
||||||
connection = MongoClient()
|
connection = MongoClient()
|
||||||
connection.drop_database('mongoengine_benchmark_test')
|
connection.drop_database('mongoengine_benchmark_test')
|
||||||
"""
|
"""
|
||||||
|
|
||||||
stmt = """
|
stmt = """
|
||||||
from pymongo import MongoClient
|
from pymongo import MongoClient
|
||||||
|
|
||||||
connection = MongoClient()
|
connection = MongoClient()
|
||||||
|
|
||||||
db = connection.mongoengine_benchmark_test
|
db = connection.mongoengine_benchmark_test
|
||||||
@ -31,7 +29,7 @@ myNoddys = noddy.find()
|
|||||||
print("-" * 100)
|
print("-" * 100)
|
||||||
print("PyMongo: Creating 10000 dictionaries.")
|
print("PyMongo: Creating 10000 dictionaries.")
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print(f"{t.timeit(1)}s")
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
stmt = """
|
stmt = """
|
||||||
from pymongo import MongoClient, WriteConcern
|
from pymongo import MongoClient, WriteConcern
|
||||||
@ -54,11 +52,10 @@ myNoddys = noddy.find()
|
|||||||
print("-" * 100)
|
print("-" * 100)
|
||||||
print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print(f"{t.timeit(1)}s")
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
setup = """
|
setup = """
|
||||||
from pymongo import MongoClient
|
from pymongo import MongoClient
|
||||||
|
|
||||||
connection = MongoClient()
|
connection = MongoClient()
|
||||||
connection.drop_database('mongoengine_benchmark_test')
|
connection.drop_database('mongoengine_benchmark_test')
|
||||||
connection.close()
|
connection.close()
|
||||||
@ -84,7 +81,7 @@ myNoddys = Noddy.objects()
|
|||||||
print("-" * 100)
|
print("-" * 100)
|
||||||
print("MongoEngine: Creating 10000 dictionaries.")
|
print("MongoEngine: Creating 10000 dictionaries.")
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print(f"{t.timeit(1)}s")
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
stmt = """
|
stmt = """
|
||||||
for i in range(10000):
|
for i in range(10000):
|
||||||
@ -102,7 +99,7 @@ myNoddys = Noddy.objects()
|
|||||||
print("-" * 100)
|
print("-" * 100)
|
||||||
print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).")
|
print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).")
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print(f"{t.timeit(1)}s")
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
stmt = """
|
stmt = """
|
||||||
for i in range(10000):
|
for i in range(10000):
|
||||||
@ -118,7 +115,7 @@ myNoddys = Noddy.objects()
|
|||||||
print("-" * 100)
|
print("-" * 100)
|
||||||
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print(f"{t.timeit(1)}s")
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
stmt = """
|
stmt = """
|
||||||
for i in range(10000):
|
for i in range(10000):
|
||||||
@ -136,7 +133,7 @@ myNoddys = Noddy.objects()
|
|||||||
'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).'
|
'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).'
|
||||||
)
|
)
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print(f"{t.timeit(1)}s")
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
stmt = """
|
stmt = """
|
||||||
for i in range(10000):
|
for i in range(10000):
|
||||||
@ -154,7 +151,7 @@ myNoddys = Noddy.objects()
|
|||||||
'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).'
|
'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).'
|
||||||
)
|
)
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print(f"{t.timeit(1)}s")
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -33,14 +33,8 @@ clean:
|
|||||||
html:
|
html:
|
||||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. Check $(BUILDDIR)/html/index.html"
|
|
||||||
|
|
||||||
html-readthedocs:
|
|
||||||
$(SPHINXBUILD) -T -E -b readthedocs $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
|
|
||||||
|
|
||||||
dirhtml:
|
dirhtml:
|
||||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||||
@echo
|
@echo
|
||||||
|
@ -75,7 +75,6 @@ Fields
|
|||||||
.. autoclass:: mongoengine.fields.StringField
|
.. autoclass:: mongoengine.fields.StringField
|
||||||
.. autoclass:: mongoengine.fields.URLField
|
.. autoclass:: mongoengine.fields.URLField
|
||||||
.. autoclass:: mongoengine.fields.EmailField
|
.. autoclass:: mongoengine.fields.EmailField
|
||||||
.. autoclass:: mongoengine.fields.EnumField
|
|
||||||
.. autoclass:: mongoengine.fields.IntField
|
.. autoclass:: mongoengine.fields.IntField
|
||||||
.. autoclass:: mongoengine.fields.LongField
|
.. autoclass:: mongoengine.fields.LongField
|
||||||
.. autoclass:: mongoengine.fields.FloatField
|
.. autoclass:: mongoengine.fields.FloatField
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
|
|
||||||
|
|
||||||
=========
|
=========
|
||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
@ -7,72 +6,10 @@ Changelog
|
|||||||
Development
|
Development
|
||||||
===========
|
===========
|
||||||
- (Fill this out as you fix issues and develop your features).
|
- (Fill this out as you fix issues and develop your features).
|
||||||
- EnumField improvements: now `choices` limits the values of an enum to allow
|
|
||||||
- Fix deepcopy of EmbeddedDocument #2202
|
|
||||||
- Fix error when using precision=0 with DecimalField #2535
|
|
||||||
- Add support for regex and whole word text search query #2568
|
|
||||||
|
|
||||||
Changes in 0.23.1
|
|
||||||
===========
|
|
||||||
- Bug fix: ignore LazyReferenceFields when clearing _changed_fields #2484
|
|
||||||
- Improve connection doc #2481
|
|
||||||
|
|
||||||
Changes in 0.23.0
|
|
||||||
=================
|
|
||||||
- Bugfix: manually setting SequenceField in DynamicDocument doesn't increment the counter #2471
|
|
||||||
- Add MongoDB 4.2 and 4.4 to CI
|
|
||||||
- Add support for allowDiskUse on querysets #2468
|
|
||||||
|
|
||||||
Changes in 0.22.1
|
|
||||||
=================
|
|
||||||
- Declare that Py3.5 is not supported in package metadata #2449
|
|
||||||
- Moved CI from Travis to Github-Actions
|
|
||||||
|
|
||||||
Changes in 0.22.0
|
|
||||||
=================
|
|
||||||
- Fix LazyReferenceField dereferencing in embedded documents #2426
|
|
||||||
- Fix regarding the recent use of Cursor.__spec in .count() that was interfering with mongomock #2425
|
|
||||||
- Drop support for Python 3.5 by introducing f-strings in the codebase
|
|
||||||
|
|
||||||
Changes in 0.21.0
|
|
||||||
=================
|
|
||||||
- Bug fix in DynamicDocument which is not parsing known fields in constructor like Document do #2412
|
|
||||||
- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count
|
|
||||||
and Cursor.count that got deprecated in pymongo >= 3.7.
|
|
||||||
This should have a negative impact on performance of count see Issue #2219
|
|
||||||
- Fix a bug that made the queryset drop the read_preference after clone().
|
|
||||||
- Remove Py3.5 from CI as it reached EOL and add Python 3.9
|
|
||||||
- Fix some issues related with db_field/field conflict in constructor #2414
|
|
||||||
- BREAKING CHANGE: Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311
|
|
||||||
- Bug fix in ListField when updating the first item, it was saving the whole list, instead of
|
|
||||||
just replacing the first item (as usually done when updating 1 item of the list) #2392
|
|
||||||
- Add EnumField: ``mongoengine.fields.EnumField``
|
|
||||||
- Refactoring - Remove useless code related to Document.__only_fields and Queryset.only_fields
|
|
||||||
- Fix query transformation regarding special operators #2365
|
|
||||||
- Bug Fix: Document.save() fails when shard_key is not _id #2154
|
|
||||||
|
|
||||||
Changes in 0.20.0
|
|
||||||
=================
|
|
||||||
- ATTENTION: Drop support for Python2
|
|
||||||
- Add Mongo 4.0 to Travis
|
|
||||||
- Fix error when setting a string as a ComplexDateTimeField #2253
|
|
||||||
- Bump development Status classifier to Production/Stable #2232
|
|
||||||
- Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630
|
|
||||||
- Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264
|
|
||||||
- Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267
|
|
||||||
- DictField validate failed without default connection (bug introduced in 0.19.0) #2239
|
|
||||||
- Remove methods that were deprecated years ago:
|
|
||||||
- name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field
|
|
||||||
- Queryset.slave_okay() was deprecated since pymongo3
|
|
||||||
- dropDups was dropped with MongoDB3
|
|
||||||
- ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes``
|
|
||||||
- Added pre-commit for development/CI #2212
|
|
||||||
- Renamed requirements-lint.txt to requirements-dev.txt #2212
|
|
||||||
- Support for setting ReadConcern #2255
|
|
||||||
|
|
||||||
Changes in 0.19.1
|
Changes in 0.19.1
|
||||||
=================
|
=================
|
||||||
- Tests require Pillow < 7.0.0 as it dropped Python2 support
|
- Requires Pillow < 7.0.0 as it dropped Python2 support
|
||||||
- DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of
|
- DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of
|
||||||
pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079
|
pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079
|
||||||
|
|
||||||
@ -500,6 +437,9 @@ Changes in 0.8.3
|
|||||||
- Document.select_related() now respects ``db_alias`` (#377)
|
- Document.select_related() now respects ``db_alias`` (#377)
|
||||||
- Reload uses shard_key if applicable (#384)
|
- Reload uses shard_key if applicable (#384)
|
||||||
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
|
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
|
||||||
|
|
||||||
|
**Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3
|
||||||
|
|
||||||
- Fixed pickling dynamic documents ``_dynamic_fields`` (#387)
|
- Fixed pickling dynamic documents ``_dynamic_fields`` (#387)
|
||||||
- Fixed ListField setslice and delslice dirty tracking (#390)
|
- Fixed ListField setslice and delslice dirty tracking (#390)
|
||||||
- Added Django 1.5 PY3 support (#392)
|
- Added Django 1.5 PY3 support (#392)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
# MongoEngine documentation build configuration file, created by
|
# MongoEngine documentation build configuration file, created by
|
||||||
# sphinx-quickstart on Sun Nov 22 18:14:13 2009.
|
# sphinx-quickstart on Sun Nov 22 18:14:13 2009.
|
||||||
@ -26,7 +27,7 @@ sys.path.insert(0, os.path.abspath(".."))
|
|||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo", "readthedocs_ext.readthedocs"]
|
extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo"]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ["_templates"]
|
templates_path = ["_templates"]
|
||||||
@ -41,8 +42,8 @@ source_suffix = ".rst"
|
|||||||
master_doc = "index"
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = "MongoEngine"
|
project = u"MongoEngine"
|
||||||
copyright = "2009, MongoEngine Authors" # noqa: A001
|
copyright = u"2009, MongoEngine Authors"
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
@ -10,3 +10,4 @@ If this is a requirement for your project, check the alternative: `uMongo`_ and
|
|||||||
|
|
||||||
.. _uMongo: https://umongo.readthedocs.io/
|
.. _uMongo: https://umongo.readthedocs.io/
|
||||||
.. _MotorEngine: https://motorengine.readthedocs.io/
|
.. _MotorEngine: https://motorengine.readthedocs.io/
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ Connecting to MongoDB
|
|||||||
=====================
|
=====================
|
||||||
|
|
||||||
Connections in MongoEngine are registered globally and are identified with aliases.
|
Connections in MongoEngine are registered globally and are identified with aliases.
|
||||||
If no ``alias`` is provided during the connection, it will use "default" as alias.
|
If no `alias` is provided during the connection, it will use "default" as alias.
|
||||||
|
|
||||||
To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect`
|
To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect`
|
||||||
function. The first argument is the name of the database to connect to::
|
function. The first argument is the name of the database to connect to::
|
||||||
@ -14,66 +14,25 @@ function. The first argument is the name of the database to connect to::
|
|||||||
connect('project1')
|
connect('project1')
|
||||||
|
|
||||||
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
||||||
on **localhost** on port **27017**.
|
on **localhost** on port **27017**. If MongoDB is running elsewhere, you should
|
||||||
|
provide the :attr:`host` and :attr:`port` arguments to
|
||||||
|
:func:`~mongoengine.connect`::
|
||||||
|
|
||||||
If MongoDB is running elsewhere, you need to provide details on how to connect. There are two ways of
|
connect('project1', host='192.168.1.35', port=12345)
|
||||||
doing this. Using a connection string in URI format (**this is the preferred method**) or individual attributes
|
|
||||||
provided as keyword arguments.
|
|
||||||
|
|
||||||
Connect with URI string
|
|
||||||
=======================
|
|
||||||
|
|
||||||
When using a connection string in URI format you should specify the connection details
|
|
||||||
as the :attr:`host` to :func:`~mongoengine.connect`. In a web application context for instance, the URI
|
|
||||||
is typically read from the config file::
|
|
||||||
|
|
||||||
connect(host="mongodb://127.0.0.1:27017/my_db")
|
|
||||||
|
|
||||||
If the database requires authentication, you can specify it in the
|
|
||||||
URI. As each database can have its own users configured, you need to tell MongoDB
|
|
||||||
where to look for the user you are working with, that's what the ``?authSource=admin`` bit
|
|
||||||
of the MongoDB connection string is for::
|
|
||||||
|
|
||||||
# Connects to 'my_db' database by authenticating
|
|
||||||
# with given credentials against the 'admin' database (by default as authSource isn't provided)
|
|
||||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db")
|
|
||||||
|
|
||||||
# Equivalent to previous connection but explicitly states that
|
|
||||||
# it should use admin as the authentication source database
|
|
||||||
connect(host="mongodb://my_user:my_password@hostname:port/my_db?authSource=admin")
|
|
||||||
|
|
||||||
# Connects to 'my_db' database by authenticating
|
|
||||||
# with given credentials against that same database
|
|
||||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db?authSource=my_db")
|
|
||||||
|
|
||||||
The URI string can also be used to configure advanced parameters like ssl, replicaSet, etc. For more
|
|
||||||
information or example about URI string, you can refer to the `official doc <https://docs.mongodb.com/manual/reference/connection-string/>`_::
|
|
||||||
|
|
||||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db?authSource=admin&ssl=true&replicaSet=globaldb")
|
|
||||||
|
|
||||||
.. note:: URI containing SRV records (e.g "mongodb+srv://server.example.com/") can be used as well
|
|
||||||
|
|
||||||
Connect with keyword attributes
|
|
||||||
===============================
|
|
||||||
|
|
||||||
The second option for specifying the connection details is to provide the information as keyword
|
|
||||||
attributes to :func:`~mongoengine.connect`::
|
|
||||||
|
|
||||||
connect('my_db', host='127.0.0.1', port=27017)
|
|
||||||
|
|
||||||
If the database requires authentication, :attr:`username`, :attr:`password`
|
If the database requires authentication, :attr:`username`, :attr:`password`
|
||||||
and :attr:`authentication_source` arguments should be provided::
|
and :attr:`authentication_source` arguments should be provided::
|
||||||
|
|
||||||
connect('my_db', username='my_user', password='my_password', authentication_source='admin')
|
connect('project1', username='webapp', password='pwd123', authentication_source='admin')
|
||||||
|
|
||||||
The set of attributes that :func:`~mongoengine.connect` recognizes includes but is not limited to:
|
URI style connections are also supported -- just supply the URI as
|
||||||
:attr:`host`, :attr:`port`, :attr:`read_preference`, :attr:`username`, :attr:`password`, :attr:`authentication_source`, :attr:`authentication_mechanism`,
|
the :attr:`host` to
|
||||||
:attr:`replicaset`, :attr:`tls`, etc. Most of the parameters accepted by `pymongo.MongoClient <https://pymongo.readthedocs.io/en/stable/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient>`_
|
:func:`~mongoengine.connect`::
|
||||||
can be used with :func:`~mongoengine.connect` and will simply be forwarded when instantiating the `pymongo.MongoClient`.
|
|
||||||
|
connect('project1', host='mongodb://localhost/database_name')
|
||||||
|
|
||||||
.. note:: Database, username and password from URI string overrides
|
.. note:: Database, username and password from URI string overrides
|
||||||
corresponding parameters in :func:`~mongoengine.connect`, this should
|
corresponding parameters in :func:`~mongoengine.connect`: ::
|
||||||
obviously be avoided: ::
|
|
||||||
|
|
||||||
connect(
|
connect(
|
||||||
db='test',
|
db='test',
|
||||||
@ -82,19 +41,28 @@ can be used with :func:`~mongoengine.connect` and will simply be forwarded when
|
|||||||
host='mongodb://admin:qwerty@localhost/production'
|
host='mongodb://admin:qwerty@localhost/production'
|
||||||
)
|
)
|
||||||
|
|
||||||
will establish connection to ``production`` database using ``admin`` username and ``qwerty`` password.
|
will establish connection to ``production`` database using
|
||||||
|
``admin`` username and ``qwerty`` password.
|
||||||
|
|
||||||
.. note:: Calling :func:`~mongoengine.connect` without argument will establish
|
.. note:: Calling :func:`~mongoengine.connect` without argument will establish
|
||||||
a connection to the "test" database by default
|
a connection to the "test" database by default
|
||||||
|
|
||||||
Read Preferences
|
Replica Sets
|
||||||
================
|
============
|
||||||
|
|
||||||
As stated above, Read preferences are supported through the connection but also via individual
|
MongoEngine supports connecting to replica sets::
|
||||||
|
|
||||||
|
from mongoengine import connect
|
||||||
|
|
||||||
|
# Regular connect
|
||||||
|
connect('dbname', replicaset='rs-name')
|
||||||
|
|
||||||
|
# MongoDB URI-style connect
|
||||||
|
connect(host='mongodb://localhost/dbname?replicaSet=rs-name')
|
||||||
|
|
||||||
|
Read preferences are supported through the connection or via individual
|
||||||
queries by passing the read_preference ::
|
queries by passing the read_preference ::
|
||||||
|
|
||||||
from pymongo import ReadPreference
|
|
||||||
|
|
||||||
Bar.objects().read_preference(ReadPreference.PRIMARY)
|
Bar.objects().read_preference(ReadPreference.PRIMARY)
|
||||||
Bar.objects(read_preference=ReadPreference.PRIMARY)
|
Bar.objects(read_preference=ReadPreference.PRIMARY)
|
||||||
|
|
||||||
|
@ -27,8 +27,6 @@ objects** as class attributes to the document class::
|
|||||||
As BSON (the binary format for storing data in mongodb) is order dependent,
|
As BSON (the binary format for storing data in mongodb) is order dependent,
|
||||||
documents are serialized based on their field order.
|
documents are serialized based on their field order.
|
||||||
|
|
||||||
.. _dynamic-document-schemas:
|
|
||||||
|
|
||||||
Dynamic document schemas
|
Dynamic document schemas
|
||||||
========================
|
========================
|
||||||
One of the benefits of MongoDB is dynamic schemas for a collection, whilst data
|
One of the benefits of MongoDB is dynamic schemas for a collection, whilst data
|
||||||
@ -78,7 +76,6 @@ are as follows:
|
|||||||
* :class:`~mongoengine.fields.EmailField`
|
* :class:`~mongoengine.fields.EmailField`
|
||||||
* :class:`~mongoengine.fields.EmbeddedDocumentField`
|
* :class:`~mongoengine.fields.EmbeddedDocumentField`
|
||||||
* :class:`~mongoengine.fields.EmbeddedDocumentListField`
|
* :class:`~mongoengine.fields.EmbeddedDocumentListField`
|
||||||
* :class:`~mongoengine.fields.EnumField`
|
|
||||||
* :class:`~mongoengine.fields.FileField`
|
* :class:`~mongoengine.fields.FileField`
|
||||||
* :class:`~mongoengine.fields.FloatField`
|
* :class:`~mongoengine.fields.FloatField`
|
||||||
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
||||||
@ -233,9 +230,6 @@ document class as the first argument::
|
|||||||
comment2 = Comment(content='Nice article!')
|
comment2 = Comment(content='Nice article!')
|
||||||
page = Page(comments=[comment1, comment2])
|
page = Page(comments=[comment1, comment2])
|
||||||
|
|
||||||
Embedded documents can also leverage the flexibility of :ref:`dynamic-document-schemas:`
|
|
||||||
by inheriting :class:`~mongoengine.DynamicEmbeddedDocument`.
|
|
||||||
|
|
||||||
Dictionary Fields
|
Dictionary Fields
|
||||||
-----------------
|
-----------------
|
||||||
Often, an embedded document may be used instead of a dictionary – generally
|
Often, an embedded document may be used instead of a dictionary – generally
|
||||||
@ -295,12 +289,12 @@ as the constructor's argument::
|
|||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
|
|
||||||
.. _many-to-many-with-listfields:
|
.. _one-to-many-with-listfields:
|
||||||
|
|
||||||
Many to Many with ListFields
|
One to Many with ListFields
|
||||||
'''''''''''''''''''''''''''
|
'''''''''''''''''''''''''''
|
||||||
|
|
||||||
If you are implementing a many to many relationship via a list of references,
|
If you are implementing a one to many relationship via a list of references,
|
||||||
then the references are stored as DBRefs and to query you need to pass an
|
then the references are stored as DBRefs and to query you need to pass an
|
||||||
instance of the object to the query::
|
instance of the object to the query::
|
||||||
|
|
||||||
@ -341,6 +335,7 @@ supplying the :attr:`reverse_delete_rule` attributes on the
|
|||||||
:class:`ReferenceField` definition, like this::
|
:class:`ReferenceField` definition, like this::
|
||||||
|
|
||||||
class ProfilePage(Document):
|
class ProfilePage(Document):
|
||||||
|
...
|
||||||
employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE)
|
employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE)
|
||||||
|
|
||||||
The declaration in this example means that when an :class:`Employee` object is
|
The declaration in this example means that when an :class:`Employee` object is
|
||||||
@ -431,15 +426,28 @@ either a single field name, or a list or tuple of field names::
|
|||||||
first_name = StringField()
|
first_name = StringField()
|
||||||
last_name = StringField(unique_with='first_name')
|
last_name = StringField(unique_with='first_name')
|
||||||
|
|
||||||
|
Skipping Document validation on save
|
||||||
|
------------------------------------
|
||||||
|
You can also skip the whole document validation process by setting
|
||||||
|
``validate=False`` when calling the :meth:`~mongoengine.document.Document.save`
|
||||||
|
method::
|
||||||
|
|
||||||
|
class Recipient(Document):
|
||||||
|
name = StringField()
|
||||||
|
email = EmailField()
|
||||||
|
|
||||||
|
recipient = Recipient(name='admin', email='root@localhost')
|
||||||
|
recipient.save() # will raise a ValidationError while
|
||||||
|
recipient.save(validate=False) # won't
|
||||||
|
|
||||||
Document collections
|
Document collections
|
||||||
====================
|
====================
|
||||||
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
||||||
will have their own **collection** in the database. The name of the collection
|
will have their own **collection** in the database. The name of the collection
|
||||||
is by default the name of the class converted to snake_case (e.g if your Document class
|
is by default the name of the class, converted to lowercase (so in the example
|
||||||
is named `CompanyUser`, the corresponding collection would be `company_user`). If you need
|
above, the collection would be called `page`). If you need to change the name
|
||||||
to change the name of the collection (e.g. to use MongoEngine with an existing database),
|
of the collection (e.g. to use MongoEngine with an existing database), then
|
||||||
then create a class dictionary attribute called :attr:`meta` on your document, and
|
create a class dictionary attribute called :attr:`meta` on your document, and
|
||||||
set :attr:`collection` to the name of the collection that you want your
|
set :attr:`collection` to the name of the collection that you want your
|
||||||
document class to use::
|
document class to use::
|
||||||
|
|
||||||
@ -477,7 +485,7 @@ dictionary containing a full index definition.
|
|||||||
|
|
||||||
A direction may be specified on fields by prefixing the field name with a
|
A direction may be specified on fields by prefixing the field name with a
|
||||||
**+** (for ascending) or a **-** sign (for descending). Note that direction
|
**+** (for ascending) or a **-** sign (for descending). Note that direction
|
||||||
only matters on compound indexes. Text indexes may be specified by prefixing
|
only matters on multi-field indexes. Text indexes may be specified by prefixing
|
||||||
the field name with a **$**. Hashed indexes may be specified by prefixing
|
the field name with a **$**. Hashed indexes may be specified by prefixing
|
||||||
the field name with a **#**::
|
the field name with a **#**::
|
||||||
|
|
||||||
@ -488,14 +496,14 @@ the field name with a **#**::
|
|||||||
created = DateTimeField()
|
created = DateTimeField()
|
||||||
meta = {
|
meta = {
|
||||||
'indexes': [
|
'indexes': [
|
||||||
'title', # single-field index
|
'title',
|
||||||
'$title', # text index
|
'$title', # text index
|
||||||
'#title', # hashed index
|
'#title', # hashed index
|
||||||
('title', '-rating'), # compound index
|
('title', '-rating'),
|
||||||
('category', '_cls'), # compound index
|
('category', '_cls'),
|
||||||
{
|
{
|
||||||
'fields': ['created'],
|
'fields': ['created'],
|
||||||
'expireAfterSeconds': 3600 # ttl index
|
'expireAfterSeconds': 3600
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@ -547,6 +555,7 @@ There are a few top level defaults for all indexes that can be set::
|
|||||||
'index_background': True,
|
'index_background': True,
|
||||||
'index_cls': False,
|
'index_cls': False,
|
||||||
'auto_create_index': True,
|
'auto_create_index': True,
|
||||||
|
'index_drop_dups': True,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -565,6 +574,11 @@ There are a few top level defaults for all indexes that can be set::
|
|||||||
in systems where indexes are managed separately. Disabling this will improve
|
in systems where indexes are managed separately. Disabling this will improve
|
||||||
performance.
|
performance.
|
||||||
|
|
||||||
|
:attr:`index_drop_dups` (Optional)
|
||||||
|
Set the default value for if an index should drop duplicates
|
||||||
|
Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||||
|
and has no effect
|
||||||
|
|
||||||
|
|
||||||
Compound Indexes and Indexing sub documents
|
Compound Indexes and Indexing sub documents
|
||||||
-------------------------------------------
|
-------------------------------------------
|
||||||
@ -628,8 +642,8 @@ point. To create a geospatial index you must prefix the field with the
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
Time To Live (TTL) indexes
|
Time To Live indexes
|
||||||
--------------------------
|
--------------------
|
||||||
|
|
||||||
A special index type that allows you to automatically expire data from a
|
A special index type that allows you to automatically expire data from a
|
||||||
collection after a given period. See the official
|
collection after a given period. See the official
|
||||||
|
@ -41,6 +41,35 @@ already exist, then any changes will be updated atomically. For example::
|
|||||||
.. seealso::
|
.. seealso::
|
||||||
:ref:`guide-atomic-updates`
|
:ref:`guide-atomic-updates`
|
||||||
|
|
||||||
|
Pre save data validation and cleaning
|
||||||
|
-------------------------------------
|
||||||
|
MongoEngine allows you to create custom cleaning rules for your documents when
|
||||||
|
calling :meth:`~mongoengine.Document.save`. By providing a custom
|
||||||
|
:meth:`~mongoengine.Document.clean` method you can do any pre validation / data
|
||||||
|
cleaning.
|
||||||
|
|
||||||
|
This might be useful if you want to ensure a default value based on other
|
||||||
|
document values for example::
|
||||||
|
|
||||||
|
class Essay(Document):
|
||||||
|
status = StringField(choices=('Published', 'Draft'), required=True)
|
||||||
|
pub_date = DateTimeField()
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
"""Ensures that only published essays have a `pub_date` and
|
||||||
|
automatically sets `pub_date` if essay is published and `pub_date`
|
||||||
|
is not set"""
|
||||||
|
if self.status == 'Draft' and self.pub_date is not None:
|
||||||
|
msg = 'Draft entries should not have a publication date.'
|
||||||
|
raise ValidationError(msg)
|
||||||
|
# Set the pub_date for published items if not set.
|
||||||
|
if self.status == 'Published' and self.pub_date is None:
|
||||||
|
self.pub_date = datetime.now()
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Cleaning is only called if validation is turned on and when calling
|
||||||
|
:meth:`~mongoengine.Document.save`.
|
||||||
|
|
||||||
Cascading Saves
|
Cascading Saves
|
||||||
---------------
|
---------------
|
||||||
If your document contains :class:`~mongoengine.fields.ReferenceField` or
|
If your document contains :class:`~mongoengine.fields.ReferenceField` or
|
||||||
|
@ -2,6 +2,8 @@
|
|||||||
GridFS
|
GridFS
|
||||||
======
|
======
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
Writing
|
Writing
|
||||||
-------
|
-------
|
||||||
|
|
||||||
|
@ -10,10 +10,8 @@ User Guide
|
|||||||
defining-documents
|
defining-documents
|
||||||
document-instances
|
document-instances
|
||||||
querying
|
querying
|
||||||
validation
|
|
||||||
gridfs
|
gridfs
|
||||||
signals
|
signals
|
||||||
text-indexes
|
text-indexes
|
||||||
migration
|
|
||||||
logging-monitoring
|
logging-monitoring
|
||||||
mongomock
|
mongomock
|
||||||
|
@ -12,7 +12,7 @@ MongoEngine is available on PyPI, so you can use :program:`pip`:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ python -m pip install mongoengine
|
$ pip install mongoengine
|
||||||
|
|
||||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||||
|
@ -1,308 +0,0 @@
|
|||||||
===================
|
|
||||||
Documents migration
|
|
||||||
===================
|
|
||||||
|
|
||||||
The structure of your documents and their associated mongoengine schemas are likely
|
|
||||||
to change over the lifetime of an application. This section provides guidance and
|
|
||||||
recommendations on how to deal with migrations.
|
|
||||||
|
|
||||||
Due to the very flexible nature of mongodb, migrations of models aren't trivial and
|
|
||||||
for people that know about `alembic` for `sqlalchemy`, there is unfortunately no equivalent
|
|
||||||
library that will manage the migration in an automatic fashion for mongoengine.
|
|
||||||
|
|
||||||
Example 1: Addition of a field
|
|
||||||
==============================
|
|
||||||
|
|
||||||
Let's start by taking a simple example of a model change and review the different option you
|
|
||||||
have to deal with the migration.
|
|
||||||
|
|
||||||
Let's assume we start with the following schema and save an instance:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
User(name="John Doe").save()
|
|
||||||
|
|
||||||
# print the objects as they exist in mongodb
|
|
||||||
print(User.objects().as_pymongo()) # [{u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John Doe'}]
|
|
||||||
|
|
||||||
On the next version of your application, let's now assume that a new field `enabled` gets added to the
|
|
||||||
existing ``User`` model with a `default=True`. Thus you simply update the ``User`` class to the following:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField(required=True)
|
|
||||||
enabled = BooleanField(default=True)
|
|
||||||
|
|
||||||
Without applying any migration, we now reload an object from the database into the ``User`` class
|
|
||||||
and checks its `enabled` attribute:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
assert User.objects.count() == 1
|
|
||||||
user = User.objects().first()
|
|
||||||
assert user.enabled is True
|
|
||||||
assert User.objects(enabled=True).count() == 0 # uh?
|
|
||||||
assert User.objects(enabled=False).count() == 0 # uh?
|
|
||||||
|
|
||||||
# this is consistent with what we have in the database
|
|
||||||
# in fact, 'enabled' does not exist
|
|
||||||
print(User.objects().as_pymongo().first()) # {u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John'}
|
|
||||||
assert User.objects(enabled=None).count() == 1
|
|
||||||
|
|
||||||
As you can see, even if the document wasn't updated, mongoengine applies the default value seamlessly when it
|
|
||||||
loads the pymongo dict into a ``User`` instance. At first sight it looks like you don't need to migrate the
|
|
||||||
existing documents when adding new fields but this actually leads to inconsistencies when it comes to querying.
|
|
||||||
|
|
||||||
In fact, when querying, mongoengine isn't trying to account for the default value of the new field and so
|
|
||||||
if you don't actually migrate the existing documents, you are taking a risk that querying/updating
|
|
||||||
will be missing relevant record.
|
|
||||||
|
|
||||||
When adding fields/modifying default values, you can use any of the following to do the migration
|
|
||||||
as a standalone script:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# Use mongoengine to set a default value for a given field
|
|
||||||
User.objects().update(enabled=True)
|
|
||||||
# or use pymongo
|
|
||||||
user_coll = User._get_collection()
|
|
||||||
user_coll.update_many({}, {'$set': {'enabled': True}})
|
|
||||||
|
|
||||||
|
|
||||||
Example 2: Inheritance change
|
|
||||||
=============================
|
|
||||||
|
|
||||||
Let's consider the following example:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Human(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class Jedi(Human):
|
|
||||||
dark_side = BooleanField()
|
|
||||||
light_saber_color = StringField()
|
|
||||||
|
|
||||||
Jedi(name="Darth Vader", dark_side=True, light_saber_color="red").save()
|
|
||||||
Jedi(name="Obi Wan Kenobi", dark_side=False, light_saber_color="blue").save()
|
|
||||||
|
|
||||||
assert Human.objects.count() == 2
|
|
||||||
assert Jedi.objects.count() == 2
|
|
||||||
|
|
||||||
# Let's check how these documents got stored in mongodb
|
|
||||||
print(Jedi.objects.as_pymongo())
|
|
||||||
# [
|
|
||||||
# {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'},
|
|
||||||
# {'_id': ObjectId('5fac4ac4f61d7fb06046e0fa'), '_cls': 'Human.Jedi', 'name': 'Obi Wan Kenobi', 'dark_side': False, 'light_saber_color': 'blue'}
|
|
||||||
# ]
|
|
||||||
|
|
||||||
As you can observe, when you use inheritance, MongoEngine stores a field named '_cls' behind the scene to keep
|
|
||||||
track of the Document class.
|
|
||||||
|
|
||||||
Let's now take the scenario that you want to refactor the inheritance schema and:
|
|
||||||
- Have the Jedi's with dark_side=True/False become GoodJedi's/DarkSith
|
|
||||||
- get rid of the 'dark_side' field
|
|
||||||
|
|
||||||
move to the following schemas:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# unchanged
|
|
||||||
class Human(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
# attribute 'dark_side' removed
|
|
||||||
class GoodJedi(Human):
|
|
||||||
light_saber_color = StringField()
|
|
||||||
|
|
||||||
# new class
|
|
||||||
class BadSith(Human):
|
|
||||||
light_saber_color = StringField()
|
|
||||||
|
|
||||||
MongoEngine doesn't know about the change or how to map them with the existing data
|
|
||||||
so if you don't apply any migration, you will observe a strange behavior, as if the collection was suddenly
|
|
||||||
empty.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# As a reminder, the documents that we inserted
|
|
||||||
# have the _cls field = 'Human.Jedi'
|
|
||||||
|
|
||||||
# Following has no match
|
|
||||||
# because the query that is used behind the scene is
|
|
||||||
# filtering on {'_cls': 'Human.GoodJedi'}
|
|
||||||
assert GoodJedi.objects().count() == 0
|
|
||||||
|
|
||||||
# Following has also no match
|
|
||||||
# because it is filtering on {'_cls': {'$in': ('Human', 'Human.GoodJedi', 'Human.BadSith')}}
|
|
||||||
# which has no match
|
|
||||||
assert Human.objects.count() == 0
|
|
||||||
assert Human.objects.first() is None
|
|
||||||
|
|
||||||
# If we bypass MongoEngine and make use of underlying driver (PyMongo)
|
|
||||||
# we can see that the documents are there
|
|
||||||
humans_coll = Human._get_collection()
|
|
||||||
assert humans_coll.count_documents({}) == 2
|
|
||||||
# print first document
|
|
||||||
print(humans_coll.find_one())
|
|
||||||
# {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'}
|
|
||||||
|
|
||||||
As you can see, first obvious problem is that we need to modify '_cls' values based on existing values of
|
|
||||||
'dark_side' documents.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
humans_coll = Human._get_collection()
|
|
||||||
old_class = 'Human.Jedi'
|
|
||||||
good_jedi_class = 'Human.GoodJedi'
|
|
||||||
bad_sith_class = 'Human.BadSith'
|
|
||||||
humans_coll.update_many({'_cls': old_class, 'dark_side': False}, {'$set': {'_cls': good_jedi_class}})
|
|
||||||
humans_coll.update_many({'_cls': old_class, 'dark_side': True}, {'$set': {'_cls': bad_sith_class}})
|
|
||||||
|
|
||||||
Let's now check if querying improved in MongoEngine:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
assert GoodJedi.objects().count() == 1 # Hoorah!
|
|
||||||
assert BadSith.objects().count() == 1 # Hoorah!
|
|
||||||
assert Human.objects.count() == 2 # Hoorah!
|
|
||||||
|
|
||||||
# let's now check that documents load correctly
|
|
||||||
jedi = GoodJedi.objects().first()
|
|
||||||
# raises FieldDoesNotExist: The fields "{'dark_side'}" do not exist on the document "Human.GoodJedi"
|
|
||||||
|
|
||||||
In fact we only took care of renaming the _cls values but we havn't removed the 'dark_side' fields
|
|
||||||
which does not exist anymore on the GoodJedi's and BadSith's models.
|
|
||||||
Let's remove the field from the collections:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
humans_coll = Human._get_collection()
|
|
||||||
humans_coll.update_many({}, {'$unset': {'dark_side': 1}})
|
|
||||||
|
|
||||||
.. note:: We did this migration in 2 different steps for the sake of example but it could have been combined
|
|
||||||
with the migration of the _cls fields: ::
|
|
||||||
|
|
||||||
humans_coll.update_many(
|
|
||||||
{'_cls': old_class, 'dark_side': False},
|
|
||||||
{
|
|
||||||
'$set': {'_cls': good_jedi_class},
|
|
||||||
'$unset': {'dark_side': 1}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
And verify that the documents now load correctly:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
jedi = GoodJedi.objects().first()
|
|
||||||
assert jedi.name == "Obi Wan Kenobi"
|
|
||||||
|
|
||||||
sith = BadSith.objects().first()
|
|
||||||
assert sith.name == "Darth Vader"
|
|
||||||
|
|
||||||
|
|
||||||
An other way of dealing with this migration is to iterate over
|
|
||||||
the documents and update/replace them one by one. This is way slower but
|
|
||||||
it is often useful for complex migrations of Document models.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
for doc in humans_coll.find():
|
|
||||||
if doc['_cls'] == 'Human.Jedi':
|
|
||||||
doc['_cls'] = 'Human.BadSith' if doc['dark_side'] else 'Human.GoodJedi'
|
|
||||||
doc.pop('dark_side')
|
|
||||||
humans_coll.replace_one({'_id': doc['_id']}, doc)
|
|
||||||
|
|
||||||
.. warning:: Be aware of this `flaw <https://groups.google.com/g/mongodb-user/c/AFC1ia7MHzk>`_ if you modify documents while iterating
|
|
||||||
|
|
||||||
Example 4: Index removal
|
|
||||||
========================
|
|
||||||
|
|
||||||
If you remove an index from your Document class, or remove an indexed Field from your Document class,
|
|
||||||
you'll need to manually drop the corresponding index. MongoEngine will not do that for you.
|
|
||||||
|
|
||||||
The way to deal with this case is to identify the name of the index to drop with `index_information()`, and then drop
|
|
||||||
it with `drop_index()`
|
|
||||||
|
|
||||||
Let's for instance assume that you start with the following Document class
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField(index=True)
|
|
||||||
|
|
||||||
meta = {"indexes": ["name"]}
|
|
||||||
|
|
||||||
User(name="John Doe").save()
|
|
||||||
|
|
||||||
As soon as you start interacting with the Document collection (when `.save()` is called in this case),
|
|
||||||
it would create the following indexes:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
print(User._get_collection().index_information())
|
|
||||||
# {
|
|
||||||
# '_id_': {'key': [('_id', 1)], 'v': 2},
|
|
||||||
# 'name_1': {'background': False, 'key': [('name', 1)], 'v': 2},
|
|
||||||
# }
|
|
||||||
|
|
||||||
Thus: '_id' which is the default index and 'name_1' which is our custom index.
|
|
||||||
If you would remove the 'name' field or its index, you would have to call:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
User._get_collection().drop_index('name_1')
|
|
||||||
|
|
||||||
.. note:: When adding new fields or new indexes, MongoEngine will take care of creating them
|
|
||||||
(unless `auto_create_index` is disabled) ::
|
|
||||||
|
|
||||||
Recommendations
|
|
||||||
===============
|
|
||||||
|
|
||||||
- Write migration scripts whenever you do changes to the model schemas
|
|
||||||
- Using :class:`~mongoengine.DynamicDocument` or ``meta = {"strict": False}`` may help to avoid some migrations or to have the 2 versions of your application to co-exist.
|
|
||||||
- Write post-processing checks to verify that migrations script worked. See below
|
|
||||||
|
|
||||||
Post-processing checks
|
|
||||||
======================
|
|
||||||
|
|
||||||
The following recipe can be used to sanity check a Document collection after you applied migration.
|
|
||||||
It does not make any assumption on what was migrated, it will fetch 1000 objects randomly and
|
|
||||||
run some quick checks on the documents to make sure the document looks OK. As it is, it will fail
|
|
||||||
on the first occurrence of an error but this is something that can be adapted based on your needs.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
def get_random_oids(collection, sample_size):
|
|
||||||
pipeline = [{"$project": {'_id': 1}}, {"$sample": {"size": sample_size}}]
|
|
||||||
return [s['_id'] for s in collection.aggregate(pipeline)]
|
|
||||||
|
|
||||||
def get_random_documents(DocCls, sample_size):
|
|
||||||
doc_collection = DocCls._get_collection()
|
|
||||||
random_oids = get_random_oids(doc_collection, sample_size)
|
|
||||||
return DocCls.objects(id__in=random_oids)
|
|
||||||
|
|
||||||
def check_documents(DocCls, sample_size):
|
|
||||||
for doc in get_random_documents(DocCls, sample_size):
|
|
||||||
# general validation (types and values)
|
|
||||||
doc.validate()
|
|
||||||
|
|
||||||
# load all subfields,
|
|
||||||
# this may trigger additional queries if you have ReferenceFields
|
|
||||||
# so it may be slow
|
|
||||||
for field in doc._fields:
|
|
||||||
try:
|
|
||||||
getattr(doc, field)
|
|
||||||
except Exception:
|
|
||||||
LOG.warning(f"Could not load field {field} in Document {doc.id}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
check_documents(Human, sample_size=1000)
|
|
@ -86,10 +86,6 @@ expressions:
|
|||||||
* ``istartswith`` -- string field starts with value (case insensitive)
|
* ``istartswith`` -- string field starts with value (case insensitive)
|
||||||
* ``endswith`` -- string field ends with value
|
* ``endswith`` -- string field ends with value
|
||||||
* ``iendswith`` -- string field ends with value (case insensitive)
|
* ``iendswith`` -- string field ends with value (case insensitive)
|
||||||
* ``wholeword`` -- string field contains whole word
|
|
||||||
* ``iwholeword`` -- string field contains whole word (case insensitive)
|
|
||||||
* ``regex`` -- string field match by regex
|
|
||||||
* ``iregex`` -- string field match by regex (case insensitive)
|
|
||||||
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
||||||
|
|
||||||
|
|
||||||
@ -243,7 +239,7 @@ Limiting and skipping results
|
|||||||
Just as with traditional ORMs, you may limit the number of results returned or
|
Just as with traditional ORMs, you may limit the number of results returned or
|
||||||
skip a number or results in you query.
|
skip a number or results in you query.
|
||||||
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
||||||
:meth:`~mongoengine.queryset.QuerySet.skip` methods are available on
|
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
||||||
:class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax
|
:class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax
|
||||||
is preferred for achieving this::
|
is preferred for achieving this::
|
||||||
|
|
||||||
@ -547,10 +543,7 @@ Documents may be updated atomically by using the
|
|||||||
There are several different "modifiers" that you may use with these methods:
|
There are several different "modifiers" that you may use with these methods:
|
||||||
|
|
||||||
* ``set`` -- set a particular value
|
* ``set`` -- set a particular value
|
||||||
* ``set_on_insert`` -- set only if this is new document `need to add upsert=True`_
|
|
||||||
* ``unset`` -- delete a particular value (since MongoDB v1.3)
|
* ``unset`` -- delete a particular value (since MongoDB v1.3)
|
||||||
* ``max`` -- update only if value is bigger
|
|
||||||
* ``min`` -- update only if value is smaller
|
|
||||||
* ``inc`` -- increment a value by a given amount
|
* ``inc`` -- increment a value by a given amount
|
||||||
* ``dec`` -- decrement a value by a given amount
|
* ``dec`` -- decrement a value by a given amount
|
||||||
* ``push`` -- append a value to a list
|
* ``push`` -- append a value to a list
|
||||||
@ -559,7 +552,6 @@ There are several different "modifiers" that you may use with these methods:
|
|||||||
* ``pull`` -- remove a value from a list
|
* ``pull`` -- remove a value from a list
|
||||||
* ``pull_all`` -- remove several values from a list
|
* ``pull_all`` -- remove several values from a list
|
||||||
* ``add_to_set`` -- add value to a list only if its not in the list already
|
* ``add_to_set`` -- add value to a list only if its not in the list already
|
||||||
* ``rename`` -- rename the key name
|
|
||||||
|
|
||||||
.. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/
|
.. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/
|
||||||
|
|
||||||
@ -617,7 +609,7 @@ to push values with index::
|
|||||||
.. note::
|
.. note::
|
||||||
Currently only top level lists are handled, future versions of mongodb /
|
Currently only top level lists are handled, future versions of mongodb /
|
||||||
pymongo plan to support nested positional operators. See `The $ positional
|
pymongo plan to support nested positional operators. See `The $ positional
|
||||||
operator <https://docs.mongodb.com/manual/tutorial/update-documents/#Updating-The%24positionaloperator>`_.
|
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
||||||
|
|
||||||
Server-side javascript execution
|
Server-side javascript execution
|
||||||
================================
|
================================
|
||||||
|
@ -1,122 +0,0 @@
|
|||||||
====================
|
|
||||||
Document Validation
|
|
||||||
====================
|
|
||||||
|
|
||||||
By design, MongoEngine strictly validates the documents right before they are inserted in MongoDB
|
|
||||||
and makes sure they are consistent with the fields defined in your models.
|
|
||||||
|
|
||||||
MongoEngine makes the assumption that the documents that exists in the DB are compliant with the schema.
|
|
||||||
This means that Mongoengine will not validate a document when an object is loaded from the DB into an instance
|
|
||||||
of your model but this operation may fail under some circumstances (e.g. if there is a field in
|
|
||||||
the document fetched from the database that is not defined in your model).
|
|
||||||
|
|
||||||
|
|
||||||
Built-in validation
|
|
||||||
===================
|
|
||||||
|
|
||||||
Mongoengine provides different fields that encapsulate the corresponding validation
|
|
||||||
out of the box. Validation runs when calling `.validate()` or `.save()`
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
from mongoengine import Document, EmailField
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
email = EmailField()
|
|
||||||
age = IntField(min_value=0, max_value=99)
|
|
||||||
|
|
||||||
user = User(email='invalid@', age=24)
|
|
||||||
user.validate() # raises ValidationError (Invalid email address: ['email'])
|
|
||||||
user.save() # raises ValidationError (Invalid email address: ['email'])
|
|
||||||
|
|
||||||
user2 = User(email='john.doe@garbage.com', age=1000)
|
|
||||||
user2.save() # raises ValidationError (Integer value is too large: ['age'])
|
|
||||||
|
|
||||||
Custom validation
|
|
||||||
=================
|
|
||||||
|
|
||||||
The following feature can be used to customize the validation:
|
|
||||||
|
|
||||||
* Field `validation` parameter
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
def not_john_doe(name):
|
|
||||||
if name == 'John Doe':
|
|
||||||
raise ValidationError("John Doe is not a valid name")
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
full_name = StringField(validation=not_john_doe)
|
|
||||||
|
|
||||||
Person(full_name='Billy Doe').save()
|
|
||||||
Person(full_name='John Doe').save() # raises ValidationError (John Doe is not a valid name)
|
|
||||||
|
|
||||||
|
|
||||||
* Document `clean` method
|
|
||||||
|
|
||||||
This method is called as part of :meth:`~mongoengine.document.Document.save` and should be used to provide
|
|
||||||
custom model validation and/or to modify some of the field values prior to validation.
|
|
||||||
For instance, you could use it to automatically provide a value for a field, or to do validation
|
|
||||||
that requires access to more than a single field.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Essay(Document):
|
|
||||||
status = StringField(choices=('Published', 'Draft'), required=True)
|
|
||||||
pub_date = DateTimeField()
|
|
||||||
|
|
||||||
def clean(self):
|
|
||||||
# Validate that only published essays have a `pub_date`
|
|
||||||
if self.status == 'Draft' and self.pub_date is not None:
|
|
||||||
raise ValidationError('Draft entries should not have a publication date.')
|
|
||||||
# Set the pub_date for published items if not set.
|
|
||||||
if self.status == 'Published' and self.pub_date is None:
|
|
||||||
self.pub_date = datetime.now()
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
Cleaning is only called if validation is turned on and when calling
|
|
||||||
:meth:`~mongoengine.Document.save`.
|
|
||||||
|
|
||||||
* Adding custom Field classes
|
|
||||||
|
|
||||||
We recommend as much as possible to use fields provided by MongoEngine. However, it is also possible
|
|
||||||
to subclass a Field and encapsulate some validation by overriding the `validate` method
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class AgeField(IntField):
|
|
||||||
|
|
||||||
def validate(self, value):
|
|
||||||
super(AgeField, self).validate(value) # let IntField.validate run first
|
|
||||||
if value == 60:
|
|
||||||
self.error('60 is not allowed')
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
age = AgeField(min_value=0, max_value=99)
|
|
||||||
|
|
||||||
Person(age=20).save() # passes
|
|
||||||
Person(age=1000).save() # raises ValidationError (Integer value is too large: ['age'])
|
|
||||||
Person(age=60).save() # raises ValidationError (Person:None) (60 is not allowed: ['age'])
|
|
||||||
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
When overriding `validate`, use `self.error("your-custom-error")` instead of raising ValidationError explicitly,
|
|
||||||
it will provide a better context with the error message
|
|
||||||
|
|
||||||
Skipping validation
|
|
||||||
====================
|
|
||||||
|
|
||||||
Although discouraged as it allows to violate fields constraints, if for some reason you need to disable
|
|
||||||
the validation and cleaning of a document when you call :meth:`~mongoengine.document.Document.save`, you can use `.save(validate=False)`.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
age = IntField(max_value=100)
|
|
||||||
|
|
||||||
Person(age=1000).save() # raises ValidationError (Integer value is too large)
|
|
||||||
|
|
||||||
Person(age=1000).save(validate=False)
|
|
||||||
person = Person.objects.first()
|
|
||||||
assert person.age == 1000
|
|
@ -7,7 +7,7 @@ MongoDB. To install it, simply run
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ python -m pip install -U mongoengine
|
$ pip install -U mongoengine
|
||||||
|
|
||||||
:doc:`tutorial`
|
:doc:`tutorial`
|
||||||
A quick tutorial building a tumblelog to get you up and running with
|
A quick tutorial building a tumblelog to get you up and running with
|
||||||
@ -91,3 +91,4 @@ Indices and tables
|
|||||||
* :ref:`genindex`
|
* :ref:`genindex`
|
||||||
* :ref:`modindex`
|
* :ref:`modindex`
|
||||||
* :ref:`search`
|
* :ref:`search`
|
||||||
|
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
Sphinx==3.3.0
|
|
||||||
sphinx-rtd-theme==0.5.0
|
|
||||||
readthedocs-sphinx-ext==2.1.1
|
|
@ -18,7 +18,7 @@ location --- running it locally will be easier, but if that is not an option
|
|||||||
then it may be run on a remote server. If you haven't installed MongoEngine,
|
then it may be run on a remote server. If you haven't installed MongoEngine,
|
||||||
simply use pip to install it like so::
|
simply use pip to install it like so::
|
||||||
|
|
||||||
$ python -m pip install mongoengine
|
$ pip install mongoengine
|
||||||
|
|
||||||
Before we can start using MongoEngine, we need to tell it how to connect to our
|
Before we can start using MongoEngine, we need to tell it how to connect to our
|
||||||
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
||||||
|
@ -52,7 +52,7 @@ rename its occurrences.
|
|||||||
This release includes a major rehaul of MongoEngine's code quality and
|
This release includes a major rehaul of MongoEngine's code quality and
|
||||||
introduces a few breaking changes. It also touches many different parts of
|
introduces a few breaking changes. It also touches many different parts of
|
||||||
the package and although all the changes have been tested and scrutinized,
|
the package and although all the changes have been tested and scrutinized,
|
||||||
you're encouraged to thoroughly test the upgrade.
|
you're encouraged to thorougly test the upgrade.
|
||||||
|
|
||||||
First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`.
|
First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`.
|
||||||
If you import or catch this exception, you'll need to rename it in your code.
|
If you import or catch this exception, you'll need to rename it in your code.
|
||||||
@ -85,10 +85,10 @@ by default from now on.
|
|||||||
|
|
||||||
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
|
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
|
||||||
|
|
||||||
python -m pip uninstall pymongo
|
pip uninstall pymongo
|
||||||
python -m pip uninstall mongoengine
|
pip uninstall mongoengine
|
||||||
python -m pip install pymongo==2.8
|
pip install pymongo==2.8
|
||||||
python -m pip install mongoengine
|
pip install mongoengine
|
||||||
|
|
||||||
0.8.7
|
0.8.7
|
||||||
*****
|
*****
|
||||||
@ -153,7 +153,7 @@ inherited classes like so: ::
|
|||||||
|
|
||||||
# 4. Remove indexes
|
# 4. Remove indexes
|
||||||
info = collection.index_information()
|
info = collection.index_information()
|
||||||
indexes_to_drop = [key for key, value in info.items()
|
indexes_to_drop = [key for key, value in info.iteritems()
|
||||||
if '_types' in dict(value['key'])]
|
if '_types' in dict(value['key'])]
|
||||||
for index in indexes_to_drop:
|
for index in indexes_to_drop:
|
||||||
collection.drop_index(index)
|
collection.drop_index(index)
|
||||||
|
@ -1,23 +1,22 @@
|
|||||||
# Import submodules so that we can expose their __all__
|
# Import submodules so that we can expose their __all__
|
||||||
from mongoengine import (
|
from mongoengine import connection
|
||||||
connection,
|
from mongoengine import document
|
||||||
document,
|
from mongoengine import errors
|
||||||
errors,
|
from mongoengine import fields
|
||||||
fields,
|
from mongoengine import queryset
|
||||||
queryset,
|
from mongoengine import signals
|
||||||
signals,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Import everything from each submodule so that it can be accessed via
|
# Import everything from each submodule so that it can be accessed via
|
||||||
# mongoengine, e.g. instead of `from mongoengine.connection import connect`,
|
# mongoengine, e.g. instead of `from mongoengine.connection import connect`,
|
||||||
# users can simply use `from mongoengine import connect`, or even
|
# users can simply use `from mongoengine import connect`, or even
|
||||||
# `from mongoengine import *` and then `connect('testdb')`.
|
# `from mongoengine import *` and then `connect('testdb')`.
|
||||||
from mongoengine.connection import * # noqa: F401
|
from mongoengine.connection import *
|
||||||
from mongoengine.document import * # noqa: F401
|
from mongoengine.document import *
|
||||||
from mongoengine.errors import * # noqa: F401
|
from mongoengine.errors import *
|
||||||
from mongoengine.fields import * # noqa: F401
|
from mongoengine.fields import *
|
||||||
from mongoengine.queryset import * # noqa: F401
|
from mongoengine.queryset import *
|
||||||
from mongoengine.signals import * # noqa: F401
|
from mongoengine.signals import *
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
list(document.__all__)
|
list(document.__all__)
|
||||||
@ -29,7 +28,7 @@ __all__ = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
VERSION = (0, 23, 1)
|
VERSION = (0, 19, 1)
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
import weakref
|
import weakref
|
||||||
|
|
||||||
from bson import DBRef
|
from bson import DBRef
|
||||||
|
import six
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||||
@ -51,7 +53,7 @@ class BaseDict(dict):
|
|||||||
if isinstance(instance, BaseDocument):
|
if isinstance(instance, BaseDocument):
|
||||||
self._instance = weakref.proxy(instance)
|
self._instance = weakref.proxy(instance)
|
||||||
self._name = name
|
self._name = name
|
||||||
super().__init__(dict_items)
|
super(BaseDict, self).__init__(dict_items)
|
||||||
|
|
||||||
def get(self, key, default=None):
|
def get(self, key, default=None):
|
||||||
# get does not use __getitem__ by default so we must override it as well
|
# get does not use __getitem__ by default so we must override it as well
|
||||||
@ -61,18 +63,18 @@ class BaseDict(dict):
|
|||||||
return default
|
return default
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
value = super().__getitem__(key)
|
value = super(BaseDict, self).__getitem__(key)
|
||||||
|
|
||||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
value = BaseDict(value, None, f"{self._name}.{key}")
|
value = BaseDict(value, None, "%s.%s" % (self._name, key))
|
||||||
super().__setitem__(key, value)
|
super(BaseDict, self).__setitem__(key, value)
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||||
value = BaseList(value, None, f"{self._name}.{key}")
|
value = BaseList(value, None, "%s.%s" % (self._name, key))
|
||||||
super().__setitem__(key, value)
|
super(BaseDict, self).__setitem__(key, value)
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -97,7 +99,7 @@ class BaseDict(dict):
|
|||||||
def _mark_as_changed(self, key=None):
|
def _mark_as_changed(self, key=None):
|
||||||
if hasattr(self._instance, "_mark_as_changed"):
|
if hasattr(self._instance, "_mark_as_changed"):
|
||||||
if key:
|
if key:
|
||||||
self._instance._mark_as_changed(f"{self._name}.{key}")
|
self._instance._mark_as_changed("%s.%s" % (self._name, key))
|
||||||
else:
|
else:
|
||||||
self._instance._mark_as_changed(self._name)
|
self._instance._mark_as_changed(self._name)
|
||||||
|
|
||||||
@ -115,13 +117,13 @@ class BaseList(list):
|
|||||||
if isinstance(instance, BaseDocument):
|
if isinstance(instance, BaseDocument):
|
||||||
self._instance = weakref.proxy(instance)
|
self._instance = weakref.proxy(instance)
|
||||||
self._name = name
|
self._name = name
|
||||||
super().__init__(list_items)
|
super(BaseList, self).__init__(list_items)
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
# change index to positive value because MongoDB does not support negative one
|
# change index to positive value because MongoDB does not support negative one
|
||||||
if isinstance(key, int) and key < 0:
|
if isinstance(key, int) and key < 0:
|
||||||
key = len(self) + key
|
key = len(self) + key
|
||||||
value = super().__getitem__(key)
|
value = super(BaseList, self).__getitem__(key)
|
||||||
|
|
||||||
if isinstance(key, slice):
|
if isinstance(key, slice):
|
||||||
# When receiving a slice operator, we don't convert the structure and bind
|
# When receiving a slice operator, we don't convert the structure and bind
|
||||||
@ -133,18 +135,19 @@ class BaseList(list):
|
|||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
# Replace dict by BaseDict
|
# Replace dict by BaseDict
|
||||||
value = BaseDict(value, None, f"{self._name}.{key}")
|
value = BaseDict(value, None, "%s.%s" % (self._name, key))
|
||||||
super().__setitem__(key, value)
|
super(BaseList, self).__setitem__(key, value)
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||||
# Replace list by BaseList
|
# Replace list by BaseList
|
||||||
value = BaseList(value, None, f"{self._name}.{key}")
|
value = BaseList(value, None, "%s.%s" % (self._name, key))
|
||||||
super().__setitem__(key, value)
|
super(BaseList, self).__setitem__(key, value)
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
yield from super().__iter__()
|
for v in super(BaseList, self).__iter__():
|
||||||
|
yield v
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
self.instance = None
|
self.instance = None
|
||||||
@ -162,7 +165,7 @@ class BaseList(list):
|
|||||||
# instead, we simply marks the whole list as changed
|
# instead, we simply marks the whole list as changed
|
||||||
changed_key = None
|
changed_key = None
|
||||||
|
|
||||||
result = super().__setitem__(key, value)
|
result = super(BaseList, self).__setitem__(key, value)
|
||||||
self._mark_as_changed(changed_key)
|
self._mark_as_changed(changed_key)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -177,17 +180,30 @@ class BaseList(list):
|
|||||||
__iadd__ = mark_as_changed_wrapper(list.__iadd__)
|
__iadd__ = mark_as_changed_wrapper(list.__iadd__)
|
||||||
__imul__ = mark_as_changed_wrapper(list.__imul__)
|
__imul__ = mark_as_changed_wrapper(list.__imul__)
|
||||||
|
|
||||||
|
if six.PY2:
|
||||||
|
# Under py3 __setslice__, __delslice__ and __getslice__
|
||||||
|
# are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter
|
||||||
|
# so we mimic this under python 2
|
||||||
|
def __setslice__(self, i, j, sequence):
|
||||||
|
return self.__setitem__(slice(i, j), sequence)
|
||||||
|
|
||||||
|
def __delslice__(self, i, j):
|
||||||
|
return self.__delitem__(slice(i, j))
|
||||||
|
|
||||||
|
def __getslice__(self, i, j):
|
||||||
|
return self.__getitem__(slice(i, j))
|
||||||
|
|
||||||
def _mark_as_changed(self, key=None):
|
def _mark_as_changed(self, key=None):
|
||||||
if hasattr(self._instance, "_mark_as_changed"):
|
if hasattr(self._instance, "_mark_as_changed"):
|
||||||
if key is not None:
|
if key:
|
||||||
self._instance._mark_as_changed(f"{self._name}.{key % len(self)}")
|
self._instance._mark_as_changed("%s.%s" % (self._name, key % len(self)))
|
||||||
else:
|
else:
|
||||||
self._instance._mark_as_changed(self._name)
|
self._instance._mark_as_changed(self._name)
|
||||||
|
|
||||||
|
|
||||||
class EmbeddedDocumentList(BaseList):
|
class EmbeddedDocumentList(BaseList):
|
||||||
def __init__(self, list_items, instance, name):
|
def __init__(self, list_items, instance, name):
|
||||||
super().__init__(list_items, instance, name)
|
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||||
self._instance = instance
|
self._instance = instance
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -197,7 +213,7 @@ class EmbeddedDocumentList(BaseList):
|
|||||||
"""
|
"""
|
||||||
for key, expected_value in kwargs.items():
|
for key, expected_value in kwargs.items():
|
||||||
doc_val = getattr(embedded_doc, key)
|
doc_val = getattr(embedded_doc, key)
|
||||||
if doc_val != expected_value and str(doc_val) != expected_value:
|
if doc_val != expected_value and six.text_type(doc_val) != expected_value:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -213,7 +229,7 @@ class EmbeddedDocumentList(BaseList):
|
|||||||
Filters the list by only including embedded documents with the
|
Filters the list by only including embedded documents with the
|
||||||
given keyword arguments.
|
given keyword arguments.
|
||||||
|
|
||||||
This method only supports simple comparison (e.g. .filter(name='John Doe'))
|
This method only supports simple comparison (e.g: .filter(name='John Doe'))
|
||||||
and does not support operators like __gte, __lte, __icontains like queryset.filter does
|
and does not support operators like __gte, __lte, __icontains like queryset.filter does
|
||||||
|
|
||||||
:param kwargs: The keyword arguments corresponding to the fields to
|
:param kwargs: The keyword arguments corresponding to the fields to
|
||||||
@ -287,11 +303,11 @@ class EmbeddedDocumentList(BaseList):
|
|||||||
|
|
||||||
def create(self, **values):
|
def create(self, **values):
|
||||||
"""
|
"""
|
||||||
Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList.
|
Creates a new embedded document and saves it to the database.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
the instance of the EmbeddedDocument is not automatically saved to the database.
|
The embedded document changes are not automatically saved
|
||||||
You still need to call .save() on the parent Document.
|
to the database after calling this method.
|
||||||
|
|
||||||
:param values: A dictionary of values for the embedded document.
|
:param values: A dictionary of values for the embedded document.
|
||||||
:return: The new embedded document instance.
|
:return: The new embedded document instance.
|
||||||
@ -352,13 +368,13 @@ class EmbeddedDocumentList(BaseList):
|
|||||||
return len(values)
|
return len(values)
|
||||||
|
|
||||||
|
|
||||||
class StrictDict:
|
class StrictDict(object):
|
||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
_special_fields = {"get", "pop", "iteritems", "items", "keys", "create"}
|
_special_fields = {"get", "pop", "iteritems", "items", "keys", "create"}
|
||||||
_classes = {}
|
_classes = {}
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
for k, v in kwargs.items():
|
for k, v in iteritems(kwargs):
|
||||||
setattr(self, k, v)
|
setattr(self, k, v)
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
@ -406,13 +422,13 @@ class StrictDict:
|
|||||||
return (key for key in self.__slots__ if hasattr(self, key))
|
return (key for key in self.__slots__ if hasattr(self, key))
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(list(self.items()))
|
return len(list(iteritems(self)))
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return list(self.items()) == list(other.items())
|
return self.items() == other.items()
|
||||||
|
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return not (self == other)
|
return self.items() != other.items()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, allowed_keys):
|
def create(cls, allowed_keys):
|
||||||
@ -427,7 +443,7 @@ class StrictDict:
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "{%s}" % ", ".join(
|
return "{%s}" % ", ".join(
|
||||||
f'"{k!s}": {v!r}' for k, v in self.items()
|
'"{0!s}": {1!r}'.format(k, v) for k, v in self.items()
|
||||||
)
|
)
|
||||||
|
|
||||||
cls._classes[allowed_keys] = SpecificStrictDict
|
cls._classes[allowed_keys] = SpecificStrictDict
|
||||||
@ -452,7 +468,9 @@ class LazyReference(DBRef):
|
|||||||
self.document_type = document_type
|
self.document_type = document_type
|
||||||
self._cached_doc = cached_doc
|
self._cached_doc = cached_doc
|
||||||
self.passthrough = passthrough
|
self.passthrough = passthrough
|
||||||
super().__init__(self.document_type._get_collection_name(), pk)
|
super(LazyReference, self).__init__(
|
||||||
|
self.document_type._get_collection_name(), pk
|
||||||
|
)
|
||||||
|
|
||||||
def __getitem__(self, name):
|
def __getitem__(self, name):
|
||||||
if not self.passthrough:
|
if not self.passthrough:
|
||||||
@ -470,4 +488,4 @@ class LazyReference(DBRef):
|
|||||||
raise AttributeError()
|
raise AttributeError()
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<LazyReference({self.document_type}, {self.pk!r})>"
|
return "<LazyReference(%s, %r)>" % (self.document_type, self.pk)
|
||||||
|
@ -2,8 +2,10 @@ import copy
|
|||||||
import numbers
|
import numbers
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
|
from bson import DBRef, ObjectId, SON, json_util
|
||||||
import pymongo
|
import pymongo
|
||||||
from bson import SON, DBRef, ObjectId, json_util
|
import six
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
from mongoengine.base.common import get_document
|
from mongoengine.base.common import get_document
|
||||||
@ -23,13 +25,14 @@ from mongoengine.errors import (
|
|||||||
OperationError,
|
OperationError,
|
||||||
ValidationError,
|
ValidationError,
|
||||||
)
|
)
|
||||||
|
from mongoengine.python_support import Hashable
|
||||||
|
|
||||||
__all__ = ("BaseDocument", "NON_FIELD_ERRORS")
|
__all__ = ("BaseDocument", "NON_FIELD_ERRORS")
|
||||||
|
|
||||||
NON_FIELD_ERRORS = "__all__"
|
NON_FIELD_ERRORS = "__all__"
|
||||||
|
|
||||||
|
|
||||||
class BaseDocument:
|
class BaseDocument(object):
|
||||||
# TODO simplify how `_changed_fields` is used.
|
# TODO simplify how `_changed_fields` is used.
|
||||||
# Currently, handling of `_changed_fields` seems unnecessarily convoluted:
|
# Currently, handling of `_changed_fields` seems unnecessarily convoluted:
|
||||||
# 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's
|
# 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's
|
||||||
@ -63,6 +66,8 @@ class BaseDocument:
|
|||||||
It may contain additional reserved keywords, e.g. "__auto_convert".
|
It may contain additional reserved keywords, e.g. "__auto_convert".
|
||||||
:param __auto_convert: If True, supplied values will be converted
|
:param __auto_convert: If True, supplied values will be converted
|
||||||
to Python-type values via each field's `to_python` method.
|
to Python-type values via each field's `to_python` method.
|
||||||
|
:param __only_fields: A set of fields that have been loaded for
|
||||||
|
this document. Empty if all fields have been loaded.
|
||||||
:param _created: Indicates whether this is a brand new document
|
:param _created: Indicates whether this is a brand new document
|
||||||
or whether it's already been persisted before. Defaults to true.
|
or whether it's already been persisted before. Defaults to true.
|
||||||
"""
|
"""
|
||||||
@ -77,6 +82,8 @@ class BaseDocument:
|
|||||||
|
|
||||||
__auto_convert = values.pop("__auto_convert", True)
|
__auto_convert = values.pop("__auto_convert", True)
|
||||||
|
|
||||||
|
__only_fields = set(values.pop("__only_fields", values))
|
||||||
|
|
||||||
_created = values.pop("_created", True)
|
_created = values.pop("_created", True)
|
||||||
|
|
||||||
signals.pre_init.send(self.__class__, document=self, values=values)
|
signals.pre_init.send(self.__class__, document=self, values=values)
|
||||||
@ -85,10 +92,12 @@ class BaseDocument:
|
|||||||
# if so raise an Exception.
|
# if so raise an Exception.
|
||||||
if not self._dynamic and (self._meta.get("strict", True) or _created):
|
if not self._dynamic and (self._meta.get("strict", True) or _created):
|
||||||
_undefined_fields = set(values.keys()) - set(
|
_undefined_fields = set(values.keys()) - set(
|
||||||
list(self._fields.keys()) + ["id", "pk", "_cls", "_text_score"]
|
self._fields.keys() + ["id", "pk", "_cls", "_text_score"]
|
||||||
)
|
)
|
||||||
if _undefined_fields:
|
if _undefined_fields:
|
||||||
msg = f'The fields "{_undefined_fields}" do not exist on the document "{self._class_name}"'
|
msg = ('The fields "{0}" do not exist on the document "{1}"').format(
|
||||||
|
_undefined_fields, self._class_name
|
||||||
|
)
|
||||||
raise FieldDoesNotExist(msg)
|
raise FieldDoesNotExist(msg)
|
||||||
|
|
||||||
if self.STRICT and not self._dynamic:
|
if self.STRICT and not self._dynamic:
|
||||||
@ -98,32 +107,37 @@ class BaseDocument:
|
|||||||
|
|
||||||
self._dynamic_fields = SON()
|
self._dynamic_fields = SON()
|
||||||
|
|
||||||
# Assign default values for fields
|
# Assign default values to the instance.
|
||||||
# not set in the constructor
|
# We set default values only for fields loaded from DB. See
|
||||||
for field_name in self._fields:
|
# https://github.com/mongoengine/mongoengine/issues/399 for more info.
|
||||||
if field_name in values:
|
for key, field in iteritems(self._fields):
|
||||||
|
if self._db_field_map.get(key, key) in __only_fields:
|
||||||
continue
|
continue
|
||||||
value = getattr(self, field_name, None)
|
value = getattr(self, key, None)
|
||||||
setattr(self, field_name, value)
|
setattr(self, key, value)
|
||||||
|
|
||||||
if "_cls" not in values:
|
if "_cls" not in values:
|
||||||
self._cls = self._class_name
|
self._cls = self._class_name
|
||||||
|
|
||||||
# Set actual values
|
# Set passed values after initialisation
|
||||||
dynamic_data = {}
|
if self._dynamic:
|
||||||
FileField = _import_class("FileField")
|
dynamic_data = {}
|
||||||
for key, value in values.items():
|
for key, value in iteritems(values):
|
||||||
field = self._fields.get(key)
|
if key in self._fields or key == "_id":
|
||||||
if field or key in ("id", "pk", "_cls"):
|
setattr(self, key, value)
|
||||||
if __auto_convert and value is not None:
|
else:
|
||||||
if field and not isinstance(field, FileField):
|
dynamic_data[key] = value
|
||||||
value = field.to_python(value)
|
else:
|
||||||
setattr(self, key, value)
|
FileField = _import_class("FileField")
|
||||||
else:
|
for key, value in iteritems(values):
|
||||||
if self._dynamic:
|
key = self._reverse_db_field_map.get(key, key)
|
||||||
dynamic_data[key] = value
|
if key in self._fields or key in ("id", "pk", "_cls"):
|
||||||
|
if __auto_convert and value is not None:
|
||||||
|
field = self._fields.get(key)
|
||||||
|
if field and not isinstance(field, FileField):
|
||||||
|
value = field.to_python(value)
|
||||||
|
setattr(self, key, value)
|
||||||
else:
|
else:
|
||||||
# For strict Document
|
|
||||||
self._data[key] = value
|
self._data[key] = value
|
||||||
|
|
||||||
# Set any get_<field>_display methods
|
# Set any get_<field>_display methods
|
||||||
@ -131,7 +145,7 @@ class BaseDocument:
|
|||||||
|
|
||||||
if self._dynamic:
|
if self._dynamic:
|
||||||
self._dynamic_lock = False
|
self._dynamic_lock = False
|
||||||
for key, value in dynamic_data.items():
|
for key, value in iteritems(dynamic_data):
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
|
||||||
# Flag initialised
|
# Flag initialised
|
||||||
@ -149,13 +163,13 @@ class BaseDocument:
|
|||||||
default = default()
|
default = default()
|
||||||
setattr(self, field_name, default)
|
setattr(self, field_name, default)
|
||||||
else:
|
else:
|
||||||
super().__delattr__(*args, **kwargs)
|
super(BaseDocument, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
def __setattr__(self, name, value):
|
def __setattr__(self, name, value):
|
||||||
# Handle dynamic data only if an initialised dynamic document
|
# Handle dynamic data only if an initialised dynamic document
|
||||||
if self._dynamic and not self._dynamic_lock:
|
if self._dynamic and not self._dynamic_lock:
|
||||||
|
|
||||||
if name not in self._fields_ordered and not name.startswith("_"):
|
if not hasattr(self, name) and not name.startswith("_"):
|
||||||
DynamicField = _import_class("DynamicField")
|
DynamicField = _import_class("DynamicField")
|
||||||
field = DynamicField(db_field=name, null=True)
|
field = DynamicField(db_field=name, null=True)
|
||||||
field.name = name
|
field.name = name
|
||||||
@ -196,9 +210,9 @@ class BaseDocument:
|
|||||||
and self__created
|
and self__created
|
||||||
and name == self._meta.get("id_field")
|
and name == self._meta.get("id_field")
|
||||||
):
|
):
|
||||||
super().__setattr__("_created", False)
|
super(BaseDocument, self).__setattr__("_created", False)
|
||||||
|
|
||||||
super().__setattr__(name, value)
|
super(BaseDocument, self).__setattr__(name, value)
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
data = {}
|
data = {}
|
||||||
@ -228,10 +242,10 @@ class BaseDocument:
|
|||||||
setattr(self, k, data[k])
|
setattr(self, k, data[k])
|
||||||
if "_fields_ordered" in data:
|
if "_fields_ordered" in data:
|
||||||
if self._dynamic:
|
if self._dynamic:
|
||||||
self._fields_ordered = data["_fields_ordered"]
|
setattr(self, "_fields_ordered", data["_fields_ordered"])
|
||||||
else:
|
else:
|
||||||
_super_fields_ordered = type(self)._fields_ordered
|
_super_fields_ordered = type(self)._fields_ordered
|
||||||
self._fields_ordered = _super_fields_ordered
|
setattr(self, "_fields_ordered", _super_fields_ordered)
|
||||||
|
|
||||||
dynamic_fields = data.get("_dynamic_fields") or SON()
|
dynamic_fields = data.get("_dynamic_fields") or SON()
|
||||||
for k in dynamic_fields.keys():
|
for k in dynamic_fields.keys():
|
||||||
@ -241,7 +255,8 @@ class BaseDocument:
|
|||||||
return iter(self._fields_ordered)
|
return iter(self._fields_ordered)
|
||||||
|
|
||||||
def __getitem__(self, name):
|
def __getitem__(self, name):
|
||||||
"""Dictionary-style field access, return a field's value if present."""
|
"""Dictionary-style field access, return a field's value if present.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
if name in self._fields_ordered:
|
if name in self._fields_ordered:
|
||||||
return getattr(self, name)
|
return getattr(self, name)
|
||||||
@ -250,7 +265,8 @@ class BaseDocument:
|
|||||||
raise KeyError(name)
|
raise KeyError(name)
|
||||||
|
|
||||||
def __setitem__(self, name, value):
|
def __setitem__(self, name, value):
|
||||||
"""Dictionary-style field access, set a field's value."""
|
"""Dictionary-style field access, set a field's value.
|
||||||
|
"""
|
||||||
# Ensure that the field exists before settings its value
|
# Ensure that the field exists before settings its value
|
||||||
if not self._dynamic and name not in self._fields:
|
if not self._dynamic and name not in self._fields:
|
||||||
raise KeyError(name)
|
raise KeyError(name)
|
||||||
@ -272,13 +288,16 @@ class BaseDocument:
|
|||||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||||
u = "[Bad Unicode data]"
|
u = "[Bad Unicode data]"
|
||||||
repr_type = str if u is None else type(u)
|
repr_type = str if u is None else type(u)
|
||||||
return repr_type(f"<{self.__class__.__name__}: {u}>")
|
return repr_type("<%s: %s>" % (self.__class__.__name__, u))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
# TODO this could be simpler?
|
# TODO this could be simpler?
|
||||||
if hasattr(self, "__unicode__"):
|
if hasattr(self, "__unicode__"):
|
||||||
return self.__unicode__()
|
if six.PY3:
|
||||||
return "%s object" % self.__class__.__name__
|
return self.__unicode__()
|
||||||
|
else:
|
||||||
|
return six.text_type(self).encode("utf-8")
|
||||||
|
return six.text_type("%s object" % self.__class__.__name__)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if (
|
if (
|
||||||
@ -300,8 +319,7 @@ class BaseDocument:
|
|||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
"""
|
"""
|
||||||
Hook for doing document level data cleaning (usually validation or assignment)
|
Hook for doing document level data cleaning before validation is run.
|
||||||
before validation is run.
|
|
||||||
|
|
||||||
Any ValidationError raised by this method will not be associated with
|
Any ValidationError raised by this method will not be associated with
|
||||||
a particular field; it will have a special-case association with the
|
a particular field; it will have a special-case association with the
|
||||||
@ -428,7 +446,7 @@ class BaseDocument:
|
|||||||
pk = self.pk
|
pk = self.pk
|
||||||
elif self._instance and hasattr(self._instance, "pk"):
|
elif self._instance and hasattr(self._instance, "pk"):
|
||||||
pk = self._instance.pk
|
pk = self._instance.pk
|
||||||
message = f"ValidationError ({self._class_name}:{pk}) "
|
message = "ValidationError (%s:%s) " % (self._class_name, pk)
|
||||||
raise ValidationError(message, errors=errors)
|
raise ValidationError(message, errors=errors)
|
||||||
|
|
||||||
def to_json(self, *args, **kwargs):
|
def to_json(self, *args, **kwargs):
|
||||||
@ -501,7 +519,7 @@ class BaseDocument:
|
|||||||
if "." in key:
|
if "." in key:
|
||||||
key, rest = key.split(".", 1)
|
key, rest = key.split(".", 1)
|
||||||
key = self._db_field_map.get(key, key)
|
key = self._db_field_map.get(key, key)
|
||||||
key = f"{key}.{rest}"
|
key = "%s.%s" % (key, rest)
|
||||||
else:
|
else:
|
||||||
key = self._db_field_map.get(key, key)
|
key = self._db_field_map.get(key, key)
|
||||||
|
|
||||||
@ -524,9 +542,6 @@ class BaseDocument:
|
|||||||
"""Using _get_changed_fields iterate and remove any fields that
|
"""Using _get_changed_fields iterate and remove any fields that
|
||||||
are marked as changed.
|
are marked as changed.
|
||||||
"""
|
"""
|
||||||
ReferenceField = _import_class("ReferenceField")
|
|
||||||
GenericReferenceField = _import_class("GenericReferenceField")
|
|
||||||
|
|
||||||
for changed in self._get_changed_fields():
|
for changed in self._get_changed_fields():
|
||||||
parts = changed.split(".")
|
parts = changed.split(".")
|
||||||
data = self
|
data = self
|
||||||
@ -539,8 +554,7 @@ class BaseDocument:
|
|||||||
elif isinstance(data, dict):
|
elif isinstance(data, dict):
|
||||||
data = data.get(part, None)
|
data = data.get(part, None)
|
||||||
else:
|
else:
|
||||||
field_name = data._reverse_db_field_map.get(part, part)
|
data = getattr(data, part, None)
|
||||||
data = getattr(data, field_name, None)
|
|
||||||
|
|
||||||
if not isinstance(data, LazyReference) and hasattr(
|
if not isinstance(data, LazyReference) and hasattr(
|
||||||
data, "_changed_fields"
|
data, "_changed_fields"
|
||||||
@ -549,40 +563,10 @@ class BaseDocument:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
data._changed_fields = []
|
data._changed_fields = []
|
||||||
elif isinstance(data, (list, tuple, dict)):
|
|
||||||
if hasattr(data, "field") and isinstance(
|
|
||||||
data.field, (ReferenceField, GenericReferenceField)
|
|
||||||
):
|
|
||||||
continue
|
|
||||||
BaseDocument._nestable_types_clear_changed_fields(data)
|
|
||||||
|
|
||||||
self._changed_fields = []
|
self._changed_fields = []
|
||||||
|
|
||||||
@staticmethod
|
def _nestable_types_changed_fields(self, changed_fields, base_key, data):
|
||||||
def _nestable_types_clear_changed_fields(data):
|
|
||||||
"""Inspect nested data for changed fields
|
|
||||||
|
|
||||||
:param data: data to inspect for changes
|
|
||||||
"""
|
|
||||||
Document = _import_class("Document")
|
|
||||||
|
|
||||||
# Loop list / dict fields as they contain documents
|
|
||||||
# Determine the iterator to use
|
|
||||||
if not hasattr(data, "items"):
|
|
||||||
iterator = enumerate(data)
|
|
||||||
else:
|
|
||||||
iterator = data.items()
|
|
||||||
|
|
||||||
for _index_or_key, value in iterator:
|
|
||||||
if hasattr(value, "_get_changed_fields") and not isinstance(
|
|
||||||
value, Document
|
|
||||||
): # don't follow references
|
|
||||||
value._clear_changed_fields()
|
|
||||||
elif isinstance(value, (list, tuple, dict)):
|
|
||||||
BaseDocument._nestable_types_clear_changed_fields(value)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _nestable_types_changed_fields(changed_fields, base_key, data):
|
|
||||||
"""Inspect nested data for changed fields
|
"""Inspect nested data for changed fields
|
||||||
|
|
||||||
:param changed_fields: Previously collected changed fields
|
:param changed_fields: Previously collected changed fields
|
||||||
@ -594,10 +578,10 @@ class BaseDocument:
|
|||||||
if not hasattr(data, "items"):
|
if not hasattr(data, "items"):
|
||||||
iterator = enumerate(data)
|
iterator = enumerate(data)
|
||||||
else:
|
else:
|
||||||
iterator = data.items()
|
iterator = iteritems(data)
|
||||||
|
|
||||||
for index_or_key, value in iterator:
|
for index_or_key, value in iterator:
|
||||||
item_key = f"{base_key}{index_or_key}."
|
item_key = "%s%s." % (base_key, index_or_key)
|
||||||
# don't check anything lower if this key is already marked
|
# don't check anything lower if this key is already marked
|
||||||
# as changed.
|
# as changed.
|
||||||
if item_key[:-1] in changed_fields:
|
if item_key[:-1] in changed_fields:
|
||||||
@ -605,18 +589,15 @@ class BaseDocument:
|
|||||||
|
|
||||||
if hasattr(value, "_get_changed_fields"):
|
if hasattr(value, "_get_changed_fields"):
|
||||||
changed = value._get_changed_fields()
|
changed = value._get_changed_fields()
|
||||||
changed_fields += [f"{item_key}{k}" for k in changed if k]
|
changed_fields += ["%s%s" % (item_key, k) for k in changed if k]
|
||||||
elif isinstance(value, (list, tuple, dict)):
|
elif isinstance(value, (list, tuple, dict)):
|
||||||
BaseDocument._nestable_types_changed_fields(
|
self._nestable_types_changed_fields(changed_fields, item_key, value)
|
||||||
changed_fields, item_key, value
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_changed_fields(self):
|
def _get_changed_fields(self):
|
||||||
"""Return a list of all fields that have explicitly been changed."""
|
"""Return a list of all fields that have explicitly been changed.
|
||||||
|
"""
|
||||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
LazyReferenceField = _import_class("LazyReferenceField")
|
|
||||||
ReferenceField = _import_class("ReferenceField")
|
ReferenceField = _import_class("ReferenceField")
|
||||||
GenericLazyReferenceField = _import_class("GenericLazyReferenceField")
|
|
||||||
GenericReferenceField = _import_class("GenericReferenceField")
|
GenericReferenceField = _import_class("GenericReferenceField")
|
||||||
SortedListField = _import_class("SortedListField")
|
SortedListField = _import_class("SortedListField")
|
||||||
|
|
||||||
@ -639,16 +620,10 @@ class BaseDocument:
|
|||||||
if isinstance(data, EmbeddedDocument):
|
if isinstance(data, EmbeddedDocument):
|
||||||
# Find all embedded fields that have been changed
|
# Find all embedded fields that have been changed
|
||||||
changed = data._get_changed_fields()
|
changed = data._get_changed_fields()
|
||||||
changed_fields += [f"{key}{k}" for k in changed if k]
|
changed_fields += ["%s%s" % (key, k) for k in changed if k]
|
||||||
elif isinstance(data, (list, tuple, dict)):
|
elif isinstance(data, (list, tuple, dict)):
|
||||||
if hasattr(field, "field") and isinstance(
|
if hasattr(field, "field") and isinstance(
|
||||||
field.field,
|
field.field, (ReferenceField, GenericReferenceField)
|
||||||
(
|
|
||||||
LazyReferenceField,
|
|
||||||
ReferenceField,
|
|
||||||
GenericLazyReferenceField,
|
|
||||||
GenericReferenceField,
|
|
||||||
),
|
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
elif isinstance(field, SortedListField) and field._ordering:
|
elif isinstance(field, SortedListField) and field._ordering:
|
||||||
@ -695,7 +670,7 @@ class BaseDocument:
|
|||||||
del set_data["_id"]
|
del set_data["_id"]
|
||||||
|
|
||||||
# Determine if any changed items were actually unset.
|
# Determine if any changed items were actually unset.
|
||||||
for path, value in list(set_data.items()):
|
for path, value in set_data.items():
|
||||||
if value or isinstance(
|
if value or isinstance(
|
||||||
value, (numbers.Number, bool)
|
value, (numbers.Number, bool)
|
||||||
): # Account for 0 and True that are truthy
|
): # Account for 0 and True that are truthy
|
||||||
@ -751,8 +726,11 @@ class BaseDocument:
|
|||||||
return cls._meta.get("collection", None)
|
return cls._meta.get("collection", None)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _from_son(cls, son, _auto_dereference=True, created=False):
|
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
|
||||||
"""Create an instance of a Document (subclass) from a PyMongo SON (dict)"""
|
"""Create an instance of a Document (subclass) from a PyMongo SON."""
|
||||||
|
if not only_fields:
|
||||||
|
only_fields = []
|
||||||
|
|
||||||
if son and not isinstance(son, dict):
|
if son and not isinstance(son, dict):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"The source SON object needs to be of type 'dict' but a '%s' was found"
|
"The source SON object needs to be of type 'dict' but a '%s' was found"
|
||||||
@ -765,10 +743,8 @@ class BaseDocument:
|
|||||||
|
|
||||||
# Convert SON to a data dict, making sure each key is a string and
|
# Convert SON to a data dict, making sure each key is a string and
|
||||||
# corresponds to the right db field.
|
# corresponds to the right db field.
|
||||||
# This is needed as _from_son is currently called both from BaseDocument.__init__
|
|
||||||
# and from EmbeddedDocumentField.to_python
|
|
||||||
data = {}
|
data = {}
|
||||||
for key, value in son.items():
|
for key, value in iteritems(son):
|
||||||
key = str(key)
|
key = str(key)
|
||||||
key = cls._db_field_map.get(key, key)
|
key = cls._db_field_map.get(key, key)
|
||||||
data[key] = value
|
data[key] = value
|
||||||
@ -783,7 +759,7 @@ class BaseDocument:
|
|||||||
if not _auto_dereference:
|
if not _auto_dereference:
|
||||||
fields = copy.deepcopy(fields)
|
fields = copy.deepcopy(fields)
|
||||||
|
|
||||||
for field_name, field in fields.items():
|
for field_name, field in iteritems(fields):
|
||||||
field._auto_dereference = _auto_dereference
|
field._auto_dereference = _auto_dereference
|
||||||
if field.db_field in data:
|
if field.db_field in data:
|
||||||
value = data[field.db_field]
|
value = data[field.db_field]
|
||||||
@ -797,8 +773,10 @@ class BaseDocument:
|
|||||||
errors_dict[field_name] = e
|
errors_dict[field_name] = e
|
||||||
|
|
||||||
if errors_dict:
|
if errors_dict:
|
||||||
errors = "\n".join([f"Field '{k}' - {v}" for k, v in errors_dict.items()])
|
errors = "\n".join(
|
||||||
msg = "Invalid data to create a `{}` instance.\n{}".format(
|
["Field '%s' - %s" % (k, v) for k, v in errors_dict.items()]
|
||||||
|
)
|
||||||
|
msg = "Invalid data to create a `%s` instance.\n%s" % (
|
||||||
cls._class_name,
|
cls._class_name,
|
||||||
errors,
|
errors,
|
||||||
)
|
)
|
||||||
@ -806,9 +784,11 @@ class BaseDocument:
|
|||||||
|
|
||||||
# In STRICT documents, remove any keys that aren't in cls._fields
|
# In STRICT documents, remove any keys that aren't in cls._fields
|
||||||
if cls.STRICT:
|
if cls.STRICT:
|
||||||
data = {k: v for k, v in data.items() if k in cls._fields}
|
data = {k: v for k, v in iteritems(data) if k in cls._fields}
|
||||||
|
|
||||||
obj = cls(__auto_convert=False, _created=created, **data)
|
obj = cls(
|
||||||
|
__auto_convert=False, _created=created, __only_fields=only_fields, **data
|
||||||
|
)
|
||||||
obj._changed_fields = []
|
obj._changed_fields = []
|
||||||
if not _auto_dereference:
|
if not _auto_dereference:
|
||||||
obj._fields = fields
|
obj._fields = fields
|
||||||
@ -851,7 +831,7 @@ class BaseDocument:
|
|||||||
@classmethod
|
@classmethod
|
||||||
def _build_index_spec(cls, spec):
|
def _build_index_spec(cls, spec):
|
||||||
"""Build a PyMongo index spec from a MongoEngine index spec."""
|
"""Build a PyMongo index spec from a MongoEngine index spec."""
|
||||||
if isinstance(spec, str):
|
if isinstance(spec, six.string_types):
|
||||||
spec = {"fields": [spec]}
|
spec = {"fields": [spec]}
|
||||||
elif isinstance(spec, (list, tuple)):
|
elif isinstance(spec, (list, tuple)):
|
||||||
spec = {"fields": list(spec)}
|
spec = {"fields": list(spec)}
|
||||||
@ -948,7 +928,7 @@ class BaseDocument:
|
|||||||
|
|
||||||
# Add any unique_with fields to the back of the index spec
|
# Add any unique_with fields to the back of the index spec
|
||||||
if field.unique_with:
|
if field.unique_with:
|
||||||
if isinstance(field.unique_with, str):
|
if isinstance(field.unique_with, six.string_types):
|
||||||
field.unique_with = [field.unique_with]
|
field.unique_with = [field.unique_with]
|
||||||
|
|
||||||
# Convert unique_with field names to real field names
|
# Convert unique_with field names to real field names
|
||||||
@ -968,7 +948,9 @@ class BaseDocument:
|
|||||||
unique_fields += unique_with
|
unique_fields += unique_with
|
||||||
|
|
||||||
# Add the new index to the list
|
# Add the new index to the list
|
||||||
fields = [(f"{namespace}{f}", pymongo.ASCENDING) for f in unique_fields]
|
fields = [
|
||||||
|
("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields
|
||||||
|
]
|
||||||
index = {"fields": fields, "unique": True, "sparse": sparse}
|
index = {"fields": fields, "unique": True, "sparse": sparse}
|
||||||
unique_indexes.append(index)
|
unique_indexes.append(index)
|
||||||
|
|
||||||
@ -1004,7 +986,9 @@ class BaseDocument:
|
|||||||
"PolygonField",
|
"PolygonField",
|
||||||
)
|
)
|
||||||
|
|
||||||
geo_field_types = tuple(_import_class(field) for field in geo_field_type_names)
|
geo_field_types = tuple(
|
||||||
|
[_import_class(field) for field in geo_field_type_names]
|
||||||
|
)
|
||||||
|
|
||||||
for field in cls._fields.values():
|
for field in cls._fields.values():
|
||||||
if not isinstance(field, geo_field_types):
|
if not isinstance(field, geo_field_types):
|
||||||
@ -1022,7 +1006,7 @@ class BaseDocument:
|
|||||||
elif field._geo_index:
|
elif field._geo_index:
|
||||||
field_name = field.db_field
|
field_name = field.db_field
|
||||||
if parent_field:
|
if parent_field:
|
||||||
field_name = f"{parent_field}.{field_name}"
|
field_name = "%s.%s" % (parent_field, field_name)
|
||||||
geo_indices.append({"fields": [(field_name, field._geo_index)]})
|
geo_indices.append({"fields": [(field_name, field._geo_index)]})
|
||||||
|
|
||||||
return geo_indices
|
return geo_indices
|
||||||
@ -1160,7 +1144,8 @@ class BaseDocument:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _translate_field_name(cls, field, sep="."):
|
def _translate_field_name(cls, field, sep="."):
|
||||||
"""Translate a field attribute name to a database field name."""
|
"""Translate a field attribute name to a database field name.
|
||||||
|
"""
|
||||||
parts = field.split(sep)
|
parts = field.split(sep)
|
||||||
parts = [f.db_field for f in cls._lookup_field(parts)]
|
parts = [f.db_field for f in cls._lookup_field(parts)]
|
||||||
return ".".join(parts)
|
return ".".join(parts)
|
||||||
@ -1190,6 +1175,9 @@ class BaseDocument:
|
|||||||
else [value]
|
else [value]
|
||||||
)
|
)
|
||||||
return sep.join(
|
return sep.join(
|
||||||
[str(dict(field.choices).get(val, val)) for val in values or []]
|
[
|
||||||
|
six.text_type(dict(field.choices).get(val, val))
|
||||||
|
for val in values or []
|
||||||
|
]
|
||||||
)
|
)
|
||||||
return value
|
return value
|
||||||
|
@ -1,27 +1,28 @@
|
|||||||
import operator
|
import operator
|
||||||
|
import warnings
|
||||||
import weakref
|
import weakref
|
||||||
|
|
||||||
|
from bson import DBRef, ObjectId, SON
|
||||||
import pymongo
|
import pymongo
|
||||||
from bson import SON, DBRef, ObjectId
|
import six
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine.base.common import UPDATE_OPERATORS
|
from mongoengine.base.common import UPDATE_OPERATORS
|
||||||
from mongoengine.base.datastructures import (
|
from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList
|
||||||
BaseDict,
|
|
||||||
BaseList,
|
|
||||||
EmbeddedDocumentList,
|
|
||||||
)
|
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.errors import DeprecatedError, ValidationError
|
from mongoengine.errors import DeprecatedError, ValidationError
|
||||||
|
|
||||||
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
||||||
|
|
||||||
|
|
||||||
class BaseField:
|
class BaseField(object):
|
||||||
"""A base class for fields in a MongoDB document. Instances of this class
|
"""A base class for fields in a MongoDB document. Instances of this class
|
||||||
may be added to subclasses of `Document` to define a document's schema.
|
may be added to subclasses of `Document` to define a document's schema.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5 - added verbose and help text
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = None # set in TopLevelDocumentMetaclass
|
name = None
|
||||||
_geo_index = False
|
_geo_index = False
|
||||||
_auto_gen = False # Call `generate` to generate a value
|
_auto_gen = False # Call `generate` to generate a value
|
||||||
_auto_dereference = True
|
_auto_dereference = True
|
||||||
@ -35,6 +36,7 @@ class BaseField:
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
db_field=None,
|
db_field=None,
|
||||||
|
name=None,
|
||||||
required=False,
|
required=False,
|
||||||
default=None,
|
default=None,
|
||||||
unique=False,
|
unique=False,
|
||||||
@ -44,11 +46,12 @@ class BaseField:
|
|||||||
choices=None,
|
choices=None,
|
||||||
null=False,
|
null=False,
|
||||||
sparse=False,
|
sparse=False,
|
||||||
**kwargs,
|
**kwargs
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
:param db_field: The database field to store this field in
|
:param db_field: The database field to store this field in
|
||||||
(defaults to the name of the field)
|
(defaults to the name of the field)
|
||||||
|
:param name: Deprecated - use db_field
|
||||||
:param required: If the field is required. Whether it has to have a
|
:param required: If the field is required. Whether it has to have a
|
||||||
value or not. Defaults to False.
|
value or not. Defaults to False.
|
||||||
:param default: (optional) The default value for this field if no value
|
:param default: (optional) The default value for this field if no value
|
||||||
@ -72,8 +75,11 @@ class BaseField:
|
|||||||
existing attributes. Common metadata includes `verbose_name` and
|
existing attributes. Common metadata includes `verbose_name` and
|
||||||
`help_text`.
|
`help_text`.
|
||||||
"""
|
"""
|
||||||
self.db_field = db_field if not primary_key else "_id"
|
self.db_field = (db_field or name) if not primary_key else "_id"
|
||||||
|
|
||||||
|
if name:
|
||||||
|
msg = 'Field\'s "name" attribute deprecated in favour of "db_field"'
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
self.required = required or primary_key
|
self.required = required or primary_key
|
||||||
self.default = default
|
self.default = default
|
||||||
self.unique = bool(unique or unique_with)
|
self.unique = bool(unique or unique_with)
|
||||||
@ -86,11 +92,13 @@ class BaseField:
|
|||||||
self._owner_document = None
|
self._owner_document = None
|
||||||
|
|
||||||
# Make sure db_field is a string (if it's explicitly defined).
|
# Make sure db_field is a string (if it's explicitly defined).
|
||||||
if self.db_field is not None and not isinstance(self.db_field, str):
|
if self.db_field is not None and not isinstance(
|
||||||
|
self.db_field, six.string_types
|
||||||
|
):
|
||||||
raise TypeError("db_field should be a string.")
|
raise TypeError("db_field should be a string.")
|
||||||
|
|
||||||
# Make sure db_field doesn't contain any forbidden characters.
|
# Make sure db_field doesn't contain any forbidden characters.
|
||||||
if isinstance(self.db_field, str) and (
|
if isinstance(self.db_field, six.string_types) and (
|
||||||
"." in self.db_field
|
"." in self.db_field
|
||||||
or "\0" in self.db_field
|
or "\0" in self.db_field
|
||||||
or self.db_field.startswith("$")
|
or self.db_field.startswith("$")
|
||||||
@ -121,7 +129,8 @@ class BaseField:
|
|||||||
BaseField.creation_counter += 1
|
BaseField.creation_counter += 1
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance, owner):
|
||||||
"""Descriptor for retrieving a value from a field in a document."""
|
"""Descriptor for retrieving a value from a field in a document.
|
||||||
|
"""
|
||||||
if instance is None:
|
if instance is None:
|
||||||
# Document class being used rather than a document object
|
# Document class being used rather than a document object
|
||||||
return self
|
return self
|
||||||
@ -212,12 +221,14 @@ class BaseField:
|
|||||||
# Choices which are other types of Documents
|
# Choices which are other types of Documents
|
||||||
if isinstance(value, (Document, EmbeddedDocument)):
|
if isinstance(value, (Document, EmbeddedDocument)):
|
||||||
if not any(isinstance(value, c) for c in choice_list):
|
if not any(isinstance(value, c) for c in choice_list):
|
||||||
self.error("Value must be an instance of %s" % (choice_list))
|
self.error(
|
||||||
|
"Value must be an instance of %s" % (six.text_type(choice_list))
|
||||||
|
)
|
||||||
# Choices which are types other than Documents
|
# Choices which are types other than Documents
|
||||||
else:
|
else:
|
||||||
values = value if isinstance(value, (list, tuple)) else [value]
|
values = value if isinstance(value, (list, tuple)) else [value]
|
||||||
if len(set(values) - set(choice_list)):
|
if len(set(values) - set(choice_list)):
|
||||||
self.error("Value must be one of %s" % str(choice_list))
|
self.error("Value must be one of %s" % six.text_type(choice_list))
|
||||||
|
|
||||||
def _validate(self, value, **kwargs):
|
def _validate(self, value, **kwargs):
|
||||||
# Check the Choices Constraint
|
# Check the Choices Constraint
|
||||||
@ -265,22 +276,11 @@ class ComplexBaseField(BaseField):
|
|||||||
Allows for nesting of embedded documents inside complex types.
|
Allows for nesting of embedded documents inside complex types.
|
||||||
Handles the lazy dereferencing of a queryset by lazily dereferencing all
|
Handles the lazy dereferencing of a queryset by lazily dereferencing all
|
||||||
items in a list / dict rather than one at a time.
|
items in a list / dict rather than one at a time.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, field=None, **kwargs):
|
field = None
|
||||||
self.field = field
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _lazy_load_refs(instance, name, ref_values, *, max_depth):
|
|
||||||
_dereference = _import_class("DeReference")()
|
|
||||||
documents = _dereference(
|
|
||||||
ref_values,
|
|
||||||
max_depth=max_depth,
|
|
||||||
instance=instance,
|
|
||||||
name=name,
|
|
||||||
)
|
|
||||||
return documents
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance, owner):
|
||||||
"""Descriptor to automatically dereference references."""
|
"""Descriptor to automatically dereference references."""
|
||||||
@ -299,20 +299,24 @@ class ComplexBaseField(BaseField):
|
|||||||
or isinstance(self.field, (GenericReferenceField, ReferenceField))
|
or isinstance(self.field, (GenericReferenceField, ReferenceField))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_dereference = _import_class("DeReference")()
|
||||||
|
|
||||||
if (
|
if (
|
||||||
instance._initialised
|
instance._initialised
|
||||||
and dereference
|
and dereference
|
||||||
and instance._data.get(self.name)
|
and instance._data.get(self.name)
|
||||||
and not getattr(instance._data[self.name], "_dereferenced", False)
|
and not getattr(instance._data[self.name], "_dereferenced", False)
|
||||||
):
|
):
|
||||||
ref_values = instance._data.get(self.name)
|
instance._data[self.name] = _dereference(
|
||||||
instance._data[self.name] = self._lazy_load_refs(
|
instance._data.get(self.name),
|
||||||
ref_values=ref_values, instance=instance, name=self.name, max_depth=1
|
max_depth=1,
|
||||||
|
instance=instance,
|
||||||
|
name=self.name,
|
||||||
)
|
)
|
||||||
if hasattr(instance._data[self.name], "_dereferenced"):
|
if hasattr(instance._data[self.name], "_dereferenced"):
|
||||||
instance._data[self.name]._dereferenced = True
|
instance._data[self.name]._dereferenced = True
|
||||||
|
|
||||||
value = super().__get__(instance, owner)
|
value = super(ComplexBaseField, self).__get__(instance, owner)
|
||||||
|
|
||||||
# Convert lists / values so we can watch for any changes on them
|
# Convert lists / values so we can watch for any changes on them
|
||||||
if isinstance(value, (list, tuple)):
|
if isinstance(value, (list, tuple)):
|
||||||
@ -333,9 +337,7 @@ class ComplexBaseField(BaseField):
|
|||||||
and isinstance(value, (BaseList, BaseDict))
|
and isinstance(value, (BaseList, BaseDict))
|
||||||
and not value._dereferenced
|
and not value._dereferenced
|
||||||
):
|
):
|
||||||
value = self._lazy_load_refs(
|
value = _dereference(value, max_depth=1, instance=instance, name=self.name)
|
||||||
ref_values=value, instance=instance, name=self.name, max_depth=1
|
|
||||||
)
|
|
||||||
value._dereferenced = True
|
value._dereferenced = True
|
||||||
instance._data[self.name] = value
|
instance._data[self.name] = value
|
||||||
|
|
||||||
@ -343,7 +345,7 @@ class ComplexBaseField(BaseField):
|
|||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
"""Convert a MongoDB-compatible type to a Python type."""
|
"""Convert a MongoDB-compatible type to a Python type."""
|
||||||
if isinstance(value, str):
|
if isinstance(value, six.string_types):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if hasattr(value, "to_python"):
|
if hasattr(value, "to_python"):
|
||||||
@ -397,7 +399,7 @@ class ComplexBaseField(BaseField):
|
|||||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
GenericReferenceField = _import_class("GenericReferenceField")
|
GenericReferenceField = _import_class("GenericReferenceField")
|
||||||
|
|
||||||
if isinstance(value, str):
|
if isinstance(value, six.string_types):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if hasattr(value, "to_mongo"):
|
if hasattr(value, "to_mongo"):
|
||||||
@ -421,11 +423,11 @@ class ComplexBaseField(BaseField):
|
|||||||
if self.field:
|
if self.field:
|
||||||
value_dict = {
|
value_dict = {
|
||||||
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
||||||
for key, item in value.items()
|
for key, item in iteritems(value)
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
value_dict = {}
|
value_dict = {}
|
||||||
for k, v in value.items():
|
for k, v in iteritems(value):
|
||||||
if isinstance(v, Document):
|
if isinstance(v, Document):
|
||||||
# We need the id from the saved object to create the DBRef
|
# We need the id from the saved object to create the DBRef
|
||||||
if v.pk is None:
|
if v.pk is None:
|
||||||
@ -464,8 +466,8 @@ class ComplexBaseField(BaseField):
|
|||||||
"""If field is provided ensure the value is valid."""
|
"""If field is provided ensure the value is valid."""
|
||||||
errors = {}
|
errors = {}
|
||||||
if self.field:
|
if self.field:
|
||||||
if hasattr(value, "items"):
|
if hasattr(value, "iteritems") or hasattr(value, "items"):
|
||||||
sequence = value.items()
|
sequence = iteritems(value)
|
||||||
else:
|
else:
|
||||||
sequence = enumerate(value)
|
sequence = enumerate(value)
|
||||||
for k, v in sequence:
|
for k, v in sequence:
|
||||||
@ -478,7 +480,7 @@ class ComplexBaseField(BaseField):
|
|||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
field_class = self.field.__class__.__name__
|
field_class = self.field.__class__.__name__
|
||||||
self.error(f"Invalid {field_class} item ({value})", errors=errors)
|
self.error("Invalid %s item (%s)" % (field_class, value), errors=errors)
|
||||||
# Don't allow empty values if required
|
# Don't allow empty values if required
|
||||||
if self.required and not value:
|
if self.required and not value:
|
||||||
self.error("Field is required and cannot be empty")
|
self.error("Field is required and cannot be empty")
|
||||||
@ -511,9 +513,10 @@ class ObjectIdField(BaseField):
|
|||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
if not isinstance(value, ObjectId):
|
if not isinstance(value, ObjectId):
|
||||||
try:
|
try:
|
||||||
return ObjectId(str(value))
|
return ObjectId(six.text_type(value))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.error(str(e))
|
# e.message attribute has been deprecated since Python 2.6
|
||||||
|
self.error(six.text_type(e))
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
@ -521,13 +524,16 @@ class ObjectIdField(BaseField):
|
|||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
try:
|
try:
|
||||||
ObjectId(str(value))
|
ObjectId(six.text_type(value))
|
||||||
except Exception:
|
except Exception:
|
||||||
self.error("Invalid ObjectID")
|
self.error("Invalid Object ID")
|
||||||
|
|
||||||
|
|
||||||
class GeoJsonBaseField(BaseField):
|
class GeoJsonBaseField(BaseField):
|
||||||
"""A geo json field storing a geojson style object."""
|
"""A geo json field storing a geojson style object.
|
||||||
|
|
||||||
|
.. versionadded:: 0.8
|
||||||
|
"""
|
||||||
|
|
||||||
_geo_index = pymongo.GEOSPHERE
|
_geo_index = pymongo.GEOSPHERE
|
||||||
_type = "GeoBase"
|
_type = "GeoBase"
|
||||||
@ -540,14 +546,14 @@ class GeoJsonBaseField(BaseField):
|
|||||||
self._name = "%sField" % self._type
|
self._name = "%sField" % self._type
|
||||||
if not auto_index:
|
if not auto_index:
|
||||||
self._geo_index = False
|
self._geo_index = False
|
||||||
super().__init__(*args, **kwargs)
|
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
"""Validate the GeoJson object based on its type."""
|
"""Validate the GeoJson object based on its type."""
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
if set(value.keys()) == {"type", "coordinates"}:
|
if set(value.keys()) == {"type", "coordinates"}:
|
||||||
if value["type"] != self._type:
|
if value["type"] != self._type:
|
||||||
self.error(f'{self._name} type must be "{self._type}"')
|
self.error('%s type must be "%s"' % (self._name, self._type))
|
||||||
return self.validate(value["coordinates"])
|
return self.validate(value["coordinates"])
|
||||||
else:
|
else:
|
||||||
self.error(
|
self.error(
|
||||||
|
@ -1,12 +1,11 @@
|
|||||||
import itertools
|
import itertools
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
import six
|
||||||
|
from six import iteritems, itervalues
|
||||||
|
|
||||||
from mongoengine.base.common import _document_registry
|
from mongoengine.base.common import _document_registry
|
||||||
from mongoengine.base.fields import (
|
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||||
BaseField,
|
|
||||||
ComplexBaseField,
|
|
||||||
ObjectIdField,
|
|
||||||
)
|
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.errors import InvalidDocumentError
|
from mongoengine.errors import InvalidDocumentError
|
||||||
from mongoengine.queryset import (
|
from mongoengine.queryset import (
|
||||||
@ -16,6 +15,7 @@ from mongoengine.queryset import (
|
|||||||
QuerySetManager,
|
QuerySetManager,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass")
|
__all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass")
|
||||||
|
|
||||||
|
|
||||||
@ -25,7 +25,7 @@ class DocumentMetaclass(type):
|
|||||||
# TODO lower complexity of this method
|
# TODO lower complexity of this method
|
||||||
def __new__(mcs, name, bases, attrs):
|
def __new__(mcs, name, bases, attrs):
|
||||||
flattened_bases = mcs._get_bases(bases)
|
flattened_bases = mcs._get_bases(bases)
|
||||||
super_new = super().__new__
|
super_new = super(DocumentMetaclass, mcs).__new__
|
||||||
|
|
||||||
# If a base class just call super
|
# If a base class just call super
|
||||||
metaclass = attrs.get("my_metaclass")
|
metaclass = attrs.get("my_metaclass")
|
||||||
@ -69,7 +69,7 @@ class DocumentMetaclass(type):
|
|||||||
# Standard object mixin - merge in any Fields
|
# Standard object mixin - merge in any Fields
|
||||||
if not hasattr(base, "_meta"):
|
if not hasattr(base, "_meta"):
|
||||||
base_fields = {}
|
base_fields = {}
|
||||||
for attr_name, attr_value in base.__dict__.items():
|
for attr_name, attr_value in iteritems(base.__dict__):
|
||||||
if not isinstance(attr_value, BaseField):
|
if not isinstance(attr_value, BaseField):
|
||||||
continue
|
continue
|
||||||
attr_value.name = attr_name
|
attr_value.name = attr_name
|
||||||
@ -81,7 +81,7 @@ class DocumentMetaclass(type):
|
|||||||
|
|
||||||
# Discover any document fields
|
# Discover any document fields
|
||||||
field_names = {}
|
field_names = {}
|
||||||
for attr_name, attr_value in attrs.items():
|
for attr_name, attr_value in iteritems(attrs):
|
||||||
if not isinstance(attr_value, BaseField):
|
if not isinstance(attr_value, BaseField):
|
||||||
continue
|
continue
|
||||||
attr_value.name = attr_name
|
attr_value.name = attr_name
|
||||||
@ -111,7 +111,9 @@ class DocumentMetaclass(type):
|
|||||||
|
|
||||||
attrs["_fields_ordered"] = tuple(
|
attrs["_fields_ordered"] = tuple(
|
||||||
i[1]
|
i[1]
|
||||||
for i in sorted((v.creation_counter, v.name) for v in doc_fields.values())
|
for i in sorted(
|
||||||
|
(v.creation_counter, v.name) for v in itervalues(doc_fields)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
#
|
#
|
||||||
@ -171,8 +173,24 @@ class DocumentMetaclass(type):
|
|||||||
# Add class to the _document_registry
|
# Add class to the _document_registry
|
||||||
_document_registry[new_class._class_name] = new_class
|
_document_registry[new_class._class_name] = new_class
|
||||||
|
|
||||||
|
# In Python 2, User-defined methods objects have special read-only
|
||||||
|
# attributes 'im_func' and 'im_self' which contain the function obj
|
||||||
|
# and class instance object respectively. With Python 3 these special
|
||||||
|
# attributes have been replaced by __func__ and __self__. The Blinker
|
||||||
|
# module continues to use im_func and im_self, so the code below
|
||||||
|
# copies __func__ into im_func and __self__ into im_self for
|
||||||
|
# classmethod objects in Document derived classes.
|
||||||
|
if six.PY3:
|
||||||
|
for val in new_class.__dict__.values():
|
||||||
|
if isinstance(val, classmethod):
|
||||||
|
f = val.__get__(new_class)
|
||||||
|
if hasattr(f, "__func__") and not hasattr(f, "im_func"):
|
||||||
|
f.__dict__.update({"im_func": getattr(f, "__func__")})
|
||||||
|
if hasattr(f, "__self__") and not hasattr(f, "im_self"):
|
||||||
|
f.__dict__.update({"im_self": getattr(f, "__self__")})
|
||||||
|
|
||||||
# Handle delete rules
|
# Handle delete rules
|
||||||
for field in new_class._fields.values():
|
for field in itervalues(new_class._fields):
|
||||||
f = field
|
f = field
|
||||||
if f.owner_document is None:
|
if f.owner_document is None:
|
||||||
f.owner_document = new_class
|
f.owner_document = new_class
|
||||||
@ -234,7 +252,8 @@ class DocumentMetaclass(type):
|
|||||||
if base is object:
|
if base is object:
|
||||||
continue
|
continue
|
||||||
yield base
|
yield base
|
||||||
yield from mcs.__get_bases(base.__bases__)
|
for child_base in mcs.__get_bases(base.__bases__):
|
||||||
|
yield child_base
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _import_classes(mcs):
|
def _import_classes(mcs):
|
||||||
@ -252,7 +271,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
|
|
||||||
def __new__(mcs, name, bases, attrs):
|
def __new__(mcs, name, bases, attrs):
|
||||||
flattened_bases = mcs._get_bases(bases)
|
flattened_bases = mcs._get_bases(bases)
|
||||||
super_new = super().__new__
|
super_new = super(TopLevelDocumentMetaclass, mcs).__new__
|
||||||
|
|
||||||
# Set default _meta data if base class, otherwise get user defined meta
|
# Set default _meta data if base class, otherwise get user defined meta
|
||||||
if attrs.get("my_metaclass") == TopLevelDocumentMetaclass:
|
if attrs.get("my_metaclass") == TopLevelDocumentMetaclass:
|
||||||
@ -265,6 +284,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
"indexes": [], # indexes to be ensured at runtime
|
"indexes": [], # indexes to be ensured at runtime
|
||||||
"id_field": None,
|
"id_field": None,
|
||||||
"index_background": False,
|
"index_background": False,
|
||||||
|
"index_drop_dups": False,
|
||||||
"index_opts": None,
|
"index_opts": None,
|
||||||
"delete_rules": None,
|
"delete_rules": None,
|
||||||
# allow_inheritance can be True, False, and None. True means
|
# allow_inheritance can be True, False, and None. True means
|
||||||
@ -340,7 +360,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
# allow_inheritance to False. If the base Document allows inheritance,
|
# allow_inheritance to False. If the base Document allows inheritance,
|
||||||
# none of its subclasses can override allow_inheritance to False.
|
# none of its subclasses can override allow_inheritance to False.
|
||||||
simple_class = all(
|
simple_class = all(
|
||||||
b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")
|
[b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")]
|
||||||
)
|
)
|
||||||
if (
|
if (
|
||||||
not simple_class
|
not simple_class
|
||||||
@ -379,7 +399,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
new_class.objects = QuerySetManager()
|
new_class.objects = QuerySetManager()
|
||||||
|
|
||||||
# Validate the fields and set primary key if needed
|
# Validate the fields and set primary key if needed
|
||||||
for field_name, field in new_class._fields.items():
|
for field_name, field in iteritems(new_class._fields):
|
||||||
if field.primary_key:
|
if field.primary_key:
|
||||||
# Ensure only one primary key is set
|
# Ensure only one primary key is set
|
||||||
current_pk = new_class._meta.get("id_field")
|
current_pk = new_class._meta.get("id_field")
|
||||||
@ -442,8 +462,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
|
|
||||||
id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0)
|
id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0)
|
||||||
for i in itertools.count():
|
for i in itertools.count():
|
||||||
id_name = f"{id_basename}_{i}"
|
id_name = "{0}_{1}".format(id_basename, i)
|
||||||
id_db_name = f"{id_db_basename}_{i}"
|
id_db_name = "{0}_{1}".format(id_db_basename, i)
|
||||||
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
||||||
return id_name, id_db_name
|
return id_name, id_db_name
|
||||||
|
|
||||||
@ -456,7 +476,7 @@ class MetaDict(dict):
|
|||||||
_merge_options = ("indexes",)
|
_merge_options = ("indexes",)
|
||||||
|
|
||||||
def merge(self, new_options):
|
def merge(self, new_options):
|
||||||
for k, v in new_options.items():
|
for k, v in iteritems(new_options):
|
||||||
if k in self._merge_options:
|
if k in self._merge_options:
|
||||||
self[k] = self.get(k, []) + v
|
self[k] = self.get(k, []) + v
|
||||||
else:
|
else:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
class LazyRegexCompiler:
|
class LazyRegexCompiler(object):
|
||||||
"""Descriptor to allow lazy compilation of regex"""
|
"""Descriptor to allow lazy compilation of regex"""
|
||||||
|
|
||||||
def __init__(self, pattern, flags=0):
|
def __init__(self, pattern, flags=0):
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from pymongo import MongoClient, ReadPreference, uri_parser
|
from pymongo import MongoClient, ReadPreference, uri_parser
|
||||||
from pymongo.database import _check_name
|
from pymongo.database import _check_name
|
||||||
|
import six
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"DEFAULT_CONNECTION_NAME",
|
"DEFAULT_CONNECTION_NAME",
|
||||||
@ -38,8 +39,8 @@ def _check_db_name(name):
|
|||||||
"""Check if a database name is valid.
|
"""Check if a database name is valid.
|
||||||
This functionality is copied from pymongo Database class constructor.
|
This functionality is copied from pymongo Database class constructor.
|
||||||
"""
|
"""
|
||||||
if not isinstance(name, str):
|
if not isinstance(name, six.string_types):
|
||||||
raise TypeError("name must be an instance of %s" % str)
|
raise TypeError("name must be an instance of %s" % six.string_types)
|
||||||
elif name != "$external":
|
elif name != "$external":
|
||||||
_check_name(name)
|
_check_name(name)
|
||||||
|
|
||||||
@ -54,26 +55,28 @@ def _get_connection_settings(
|
|||||||
password=None,
|
password=None,
|
||||||
authentication_source=None,
|
authentication_source=None,
|
||||||
authentication_mechanism=None,
|
authentication_mechanism=None,
|
||||||
**kwargs,
|
**kwargs
|
||||||
):
|
):
|
||||||
"""Get the connection settings as a dict
|
"""Get the connection settings as a dict
|
||||||
|
|
||||||
:param db: the name of the database to use, for compatibility with connect
|
: param db: the name of the database to use, for compatibility with connect
|
||||||
:param name: the name of the specific database to use
|
: param name: the name of the specific database to use
|
||||||
:param host: the host name of the: program: `mongod` instance to connect to
|
: param host: the host name of the: program: `mongod` instance to connect to
|
||||||
:param port: the port that the: program: `mongod` instance is running on
|
: param port: the port that the: program: `mongod` instance is running on
|
||||||
:param read_preference: The read preference for the collection
|
: param read_preference: The read preference for the collection
|
||||||
:param username: username to authenticate with
|
: param username: username to authenticate with
|
||||||
:param password: password to authenticate with
|
: param password: password to authenticate with
|
||||||
:param authentication_source: database to authenticate against
|
: param authentication_source: database to authenticate against
|
||||||
:param authentication_mechanism: database authentication mechanisms.
|
: param authentication_mechanism: database authentication mechanisms.
|
||||||
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
||||||
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
||||||
:param is_mock: explicitly use mongomock for this connection
|
: param is_mock: explicitly use mongomock for this connection
|
||||||
(can also be done by using `mongomock: // ` as db host prefix)
|
(can also be done by using `mongomock: // ` as db host prefix)
|
||||||
:param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||||
for pymongo's `MongoClient` for a full list.
|
for pymongo's `MongoClient` for a full list.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.10.6 - added mongomock support
|
||||||
"""
|
"""
|
||||||
conn_settings = {
|
conn_settings = {
|
||||||
"name": name or db or DEFAULT_DATABASE_NAME,
|
"name": name or db or DEFAULT_DATABASE_NAME,
|
||||||
@ -90,7 +93,7 @@ def _get_connection_settings(
|
|||||||
conn_host = conn_settings["host"]
|
conn_host = conn_settings["host"]
|
||||||
|
|
||||||
# Host can be a list or a string, so if string, force to a list.
|
# Host can be a list or a string, so if string, force to a list.
|
||||||
if isinstance(conn_host, str):
|
if isinstance(conn_host, six.string_types):
|
||||||
conn_host = [conn_host]
|
conn_host = [conn_host]
|
||||||
|
|
||||||
resolved_hosts = []
|
resolved_hosts = []
|
||||||
@ -145,7 +148,7 @@ def _get_connection_settings(
|
|||||||
# TODO simplify the code below once we drop support for
|
# TODO simplify the code below once we drop support for
|
||||||
# PyMongo v3.4.
|
# PyMongo v3.4.
|
||||||
read_pf_mode = uri_options["readpreference"]
|
read_pf_mode = uri_options["readpreference"]
|
||||||
if isinstance(read_pf_mode, str):
|
if isinstance(read_pf_mode, six.string_types):
|
||||||
read_pf_mode = read_pf_mode.lower()
|
read_pf_mode = read_pf_mode.lower()
|
||||||
for preference in read_preferences:
|
for preference in read_preferences:
|
||||||
if (
|
if (
|
||||||
@ -177,27 +180,30 @@ def register_connection(
|
|||||||
password=None,
|
password=None,
|
||||||
authentication_source=None,
|
authentication_source=None,
|
||||||
authentication_mechanism=None,
|
authentication_mechanism=None,
|
||||||
**kwargs,
|
**kwargs
|
||||||
):
|
):
|
||||||
"""Register the connection settings.
|
"""Register the connection settings.
|
||||||
|
|
||||||
:param alias: the name that will be used to refer to this connection throughout MongoEngine
|
: param alias: the name that will be used to refer to this connection
|
||||||
:param db: the name of the database to use, for compatibility with connect
|
throughout MongoEngine
|
||||||
:param name: the name of the specific database to use
|
: param db: the name of the database to use, for compatibility with connect
|
||||||
:param host: the host name of the: program: `mongod` instance to connect to
|
: param name: the name of the specific database to use
|
||||||
:param port: the port that the: program: `mongod` instance is running on
|
: param host: the host name of the: program: `mongod` instance to connect to
|
||||||
:param read_preference: The read preference for the collection
|
: param port: the port that the: program: `mongod` instance is running on
|
||||||
:param username: username to authenticate with
|
: param read_preference: The read preference for the collection
|
||||||
:param password: password to authenticate with
|
: param username: username to authenticate with
|
||||||
:param authentication_source: database to authenticate against
|
: param password: password to authenticate with
|
||||||
:param authentication_mechanism: database authentication mechanisms.
|
: param authentication_source: database to authenticate against
|
||||||
|
: param authentication_mechanism: database authentication mechanisms.
|
||||||
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
||||||
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
||||||
:param is_mock: explicitly use mongomock for this connection
|
: param is_mock: explicitly use mongomock for this connection
|
||||||
(can also be done by using `mongomock: // ` as db host prefix)
|
(can also be done by using `mongomock: // ` as db host prefix)
|
||||||
:param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||||
for pymongo's `MongoClient` for a full list.
|
for pymongo's `MongoClient` for a full list.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.10.6 - added mongomock support
|
||||||
"""
|
"""
|
||||||
conn_settings = _get_connection_settings(
|
conn_settings = _get_connection_settings(
|
||||||
db=db,
|
db=db,
|
||||||
@ -209,15 +215,15 @@ def register_connection(
|
|||||||
password=password,
|
password=password,
|
||||||
authentication_source=authentication_source,
|
authentication_source=authentication_source,
|
||||||
authentication_mechanism=authentication_mechanism,
|
authentication_mechanism=authentication_mechanism,
|
||||||
**kwargs,
|
**kwargs
|
||||||
)
|
)
|
||||||
_connection_settings[alias] = conn_settings
|
_connection_settings[alias] = conn_settings
|
||||||
|
|
||||||
|
|
||||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||||
"""Close the connection with a given alias."""
|
"""Close the connection with a given alias."""
|
||||||
from mongoengine import Document
|
|
||||||
from mongoengine.base.common import _get_documents_by_db
|
from mongoengine.base.common import _get_documents_by_db
|
||||||
|
from mongoengine import Document
|
||||||
|
|
||||||
if alias in _connections:
|
if alias in _connections:
|
||||||
get_connection(alias=alias).close()
|
get_connection(alias=alias).close()
|
||||||
@ -312,7 +318,7 @@ def _create_connection(alias, connection_class, **connection_settings):
|
|||||||
try:
|
try:
|
||||||
return connection_class(**connection_settings)
|
return connection_class(**connection_settings)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ConnectionFailure(f"Cannot connect to database {alias} :\n{e}")
|
raise ConnectionFailure("Cannot connect to database %s :\n%s" % (alias, e))
|
||||||
|
|
||||||
|
|
||||||
def _find_existing_connection(connection_settings):
|
def _find_existing_connection(connection_settings):
|
||||||
@ -381,6 +387,8 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
|||||||
|
|
||||||
See the docstring for `register_connection` for more details about all
|
See the docstring for `register_connection` for more details about all
|
||||||
supported kwargs.
|
supported kwargs.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.6 - added multiple database support.
|
||||||
"""
|
"""
|
||||||
if alias in _connections:
|
if alias in _connections:
|
||||||
prev_conn_setting = _connection_settings[alias]
|
prev_conn_setting = _connection_settings[alias]
|
||||||
@ -388,8 +396,8 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
|||||||
|
|
||||||
if new_conn_settings != prev_conn_setting:
|
if new_conn_settings != prev_conn_setting:
|
||||||
err_msg = (
|
err_msg = (
|
||||||
"A different connection with alias `{}` was already "
|
u"A different connection with alias `{}` was already "
|
||||||
"registered. Use disconnect() first"
|
u"registered. Use disconnect() first"
|
||||||
).format(alias)
|
).format(alias)
|
||||||
raise ConnectionFailure(err_msg)
|
raise ConnectionFailure(err_msg)
|
||||||
else:
|
else:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
from pymongo.read_concern import ReadConcern
|
|
||||||
from pymongo.write_concern import WriteConcern
|
from pymongo.write_concern import WriteConcern
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||||
@ -14,11 +14,10 @@ __all__ = (
|
|||||||
"no_sub_classes",
|
"no_sub_classes",
|
||||||
"query_counter",
|
"query_counter",
|
||||||
"set_write_concern",
|
"set_write_concern",
|
||||||
"set_read_write_concern",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class switch_db:
|
class switch_db(object):
|
||||||
"""switch_db alias context manager.
|
"""switch_db alias context manager.
|
||||||
|
|
||||||
Example ::
|
Example ::
|
||||||
@ -59,7 +58,7 @@ class switch_db:
|
|||||||
self.cls._collection = self.collection
|
self.cls._collection = self.collection
|
||||||
|
|
||||||
|
|
||||||
class switch_collection:
|
class switch_collection(object):
|
||||||
"""switch_collection alias context manager.
|
"""switch_collection alias context manager.
|
||||||
|
|
||||||
Example ::
|
Example ::
|
||||||
@ -101,7 +100,7 @@ class switch_collection:
|
|||||||
self.cls._get_collection_name = self.ori_get_collection_name
|
self.cls._get_collection_name = self.ori_get_collection_name
|
||||||
|
|
||||||
|
|
||||||
class no_dereference:
|
class no_dereference(object):
|
||||||
"""no_dereference context manager.
|
"""no_dereference context manager.
|
||||||
|
|
||||||
Turns off all dereferencing in Documents for the duration of the context
|
Turns off all dereferencing in Documents for the duration of the context
|
||||||
@ -124,7 +123,7 @@ class no_dereference:
|
|||||||
|
|
||||||
self.deref_fields = [
|
self.deref_fields = [
|
||||||
k
|
k
|
||||||
for k, v in self.cls._fields.items()
|
for k, v in iteritems(self.cls._fields)
|
||||||
if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField))
|
if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField))
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -141,7 +140,7 @@ class no_dereference:
|
|||||||
return self.cls
|
return self.cls
|
||||||
|
|
||||||
|
|
||||||
class no_sub_classes:
|
class no_sub_classes(object):
|
||||||
"""no_sub_classes context manager.
|
"""no_sub_classes context manager.
|
||||||
|
|
||||||
Only returns instances of this class and no sub (inherited) classes::
|
Only returns instances of this class and no sub (inherited) classes::
|
||||||
@ -169,7 +168,7 @@ class no_sub_classes:
|
|||||||
self.cls._subclasses = self.cls_initial_subclasses
|
self.cls._subclasses = self.cls_initial_subclasses
|
||||||
|
|
||||||
|
|
||||||
class query_counter:
|
class query_counter(object):
|
||||||
"""Query_counter context manager to get the number of queries.
|
"""Query_counter context manager to get the number of queries.
|
||||||
This works by updating the `profiling_level` of the database so that all queries get logged,
|
This works by updating the `profiling_level` of the database so that all queries get logged,
|
||||||
resetting the db.system.profile collection at the beginning of the context and counting the new entries.
|
resetting the db.system.profile collection at the beginning of the context and counting the new entries.
|
||||||
@ -177,28 +176,15 @@ class query_counter:
|
|||||||
This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes
|
This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes
|
||||||
can interfere with it
|
can interfere with it
|
||||||
|
|
||||||
Usage:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
user = User(name='Bob')
|
|
||||||
assert q == 0 # no query fired yet
|
|
||||||
user.save()
|
|
||||||
assert q == 1 # 1 query was fired, an 'insert'
|
|
||||||
user_bis = User.objects().first()
|
|
||||||
assert q == 2 # a 2nd query was fired, a 'find_one'
|
|
||||||
|
|
||||||
Be aware that:
|
Be aware that:
|
||||||
|
- Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of
|
||||||
- Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches)
|
documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches)
|
||||||
- Some queries are ignored by default by the counter (killcursors, db.system.indexes)
|
- Some queries are ignored by default by the counter (killcursors, db.system.indexes)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, alias=DEFAULT_CONNECTION_NAME):
|
def __init__(self, alias=DEFAULT_CONNECTION_NAME):
|
||||||
|
"""Construct the query_counter
|
||||||
|
"""
|
||||||
self.db = get_db(alias=alias)
|
self.db = get_db(alias=alias)
|
||||||
self.initial_profiling_level = None
|
self.initial_profiling_level = None
|
||||||
self._ctx_query_counter = 0 # number of queries issued by the context
|
self._ctx_query_counter = 0 # number of queries issued by the context
|
||||||
@ -249,7 +235,7 @@ class query_counter:
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""repr query_counter as the number of queries."""
|
"""repr query_counter as the number of queries."""
|
||||||
return "%s" % self._get_count()
|
return u"%s" % self._get_count()
|
||||||
|
|
||||||
def _get_count(self):
|
def _get_count(self):
|
||||||
"""Get the number of queries by counting the current number of entries in db.system.profile
|
"""Get the number of queries by counting the current number of entries in db.system.profile
|
||||||
@ -271,21 +257,3 @@ def set_write_concern(collection, write_concerns):
|
|||||||
combined_concerns = dict(collection.write_concern.document.items())
|
combined_concerns = dict(collection.write_concern.document.items())
|
||||||
combined_concerns.update(write_concerns)
|
combined_concerns.update(write_concerns)
|
||||||
yield collection.with_options(write_concern=WriteConcern(**combined_concerns))
|
yield collection.with_options(write_concern=WriteConcern(**combined_concerns))
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def set_read_write_concern(collection, write_concerns, read_concerns):
|
|
||||||
combined_write_concerns = dict(collection.write_concern.document.items())
|
|
||||||
|
|
||||||
if write_concerns is not None:
|
|
||||||
combined_write_concerns.update(write_concerns)
|
|
||||||
|
|
||||||
combined_read_concerns = dict(collection.read_concern.document.items())
|
|
||||||
|
|
||||||
if read_concerns is not None:
|
|
||||||
combined_read_concerns.update(read_concerns)
|
|
||||||
|
|
||||||
yield collection.with_options(
|
|
||||||
write_concern=WriteConcern(**combined_write_concerns),
|
|
||||||
read_concern=ReadConcern(**combined_read_concerns),
|
|
||||||
)
|
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
from bson import SON, DBRef
|
from bson import DBRef, SON
|
||||||
|
import six
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine.base import (
|
from mongoengine.base import (
|
||||||
BaseDict,
|
BaseDict,
|
||||||
@ -10,16 +12,11 @@ from mongoengine.base import (
|
|||||||
from mongoengine.base.datastructures import LazyReference
|
from mongoengine.base.datastructures import LazyReference
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from mongoengine.document import Document, EmbeddedDocument
|
from mongoengine.document import Document, EmbeddedDocument
|
||||||
from mongoengine.fields import (
|
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
|
||||||
DictField,
|
|
||||||
ListField,
|
|
||||||
MapField,
|
|
||||||
ReferenceField,
|
|
||||||
)
|
|
||||||
from mongoengine.queryset import QuerySet
|
from mongoengine.queryset import QuerySet
|
||||||
|
|
||||||
|
|
||||||
class DeReference:
|
class DeReference(object):
|
||||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||||
"""
|
"""
|
||||||
Cheaply dereferences the items to a set depth.
|
Cheaply dereferences the items to a set depth.
|
||||||
@ -33,7 +30,7 @@ class DeReference:
|
|||||||
:class:`~mongoengine.base.ComplexBaseField`
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
:param get: A boolean determining if being called by __get__
|
:param get: A boolean determining if being called by __get__
|
||||||
"""
|
"""
|
||||||
if items is None or isinstance(items, str):
|
if items is None or isinstance(items, six.string_types):
|
||||||
return items
|
return items
|
||||||
|
|
||||||
# cheapest way to convert a queryset to a list
|
# cheapest way to convert a queryset to a list
|
||||||
@ -56,10 +53,10 @@ class DeReference:
|
|||||||
doc_type = doc_type.document_type
|
doc_type = doc_type.document_type
|
||||||
is_list = not hasattr(items, "items")
|
is_list = not hasattr(items, "items")
|
||||||
|
|
||||||
if is_list and all(i.__class__ == doc_type for i in items):
|
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||||
return items
|
return items
|
||||||
elif not is_list and all(
|
elif not is_list and all(
|
||||||
i.__class__ == doc_type for i in items.values()
|
[i.__class__ == doc_type for i in items.values()]
|
||||||
):
|
):
|
||||||
return items
|
return items
|
||||||
elif not field.dbref:
|
elif not field.dbref:
|
||||||
@ -82,7 +79,7 @@ class DeReference:
|
|||||||
|
|
||||||
def _get_items_from_dict(items):
|
def _get_items_from_dict(items):
|
||||||
new_items = {}
|
new_items = {}
|
||||||
for k, v in items.items():
|
for k, v in iteritems(items):
|
||||||
value = v
|
value = v
|
||||||
if isinstance(v, list):
|
if isinstance(v, list):
|
||||||
value = _get_items_from_list(v)
|
value = _get_items_from_list(v)
|
||||||
@ -123,7 +120,7 @@ class DeReference:
|
|||||||
depth += 1
|
depth += 1
|
||||||
for item in iterator:
|
for item in iterator:
|
||||||
if isinstance(item, (Document, EmbeddedDocument)):
|
if isinstance(item, (Document, EmbeddedDocument)):
|
||||||
for field_name, field in item._fields.items():
|
for field_name, field in iteritems(item._fields):
|
||||||
v = item._data.get(field_name, None)
|
v = item._data.get(field_name, None)
|
||||||
if isinstance(v, LazyReference):
|
if isinstance(v, LazyReference):
|
||||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
# LazyReference inherits DBRef but should not be dereferenced here !
|
||||||
@ -139,7 +136,7 @@ class DeReference:
|
|||||||
getattr(field, "field", None), "document_type", None
|
getattr(field, "field", None), "document_type", None
|
||||||
)
|
)
|
||||||
references = self._find_references(v, depth)
|
references = self._find_references(v, depth)
|
||||||
for key, refs in references.items():
|
for key, refs in iteritems(references):
|
||||||
if isinstance(
|
if isinstance(
|
||||||
field_cls, (Document, TopLevelDocumentMetaclass)
|
field_cls, (Document, TopLevelDocumentMetaclass)
|
||||||
):
|
):
|
||||||
@ -156,15 +153,16 @@ class DeReference:
|
|||||||
)
|
)
|
||||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||||
references = self._find_references(item, depth - 1)
|
references = self._find_references(item, depth - 1)
|
||||||
for key, refs in references.items():
|
for key, refs in iteritems(references):
|
||||||
reference_map.setdefault(key, set()).update(refs)
|
reference_map.setdefault(key, set()).update(refs)
|
||||||
|
|
||||||
return reference_map
|
return reference_map
|
||||||
|
|
||||||
def _fetch_objects(self, doc_type=None):
|
def _fetch_objects(self, doc_type=None):
|
||||||
"""Fetch all references and convert to their document objects"""
|
"""Fetch all references and convert to their document objects
|
||||||
|
"""
|
||||||
object_map = {}
|
object_map = {}
|
||||||
for collection, dbrefs in self.reference_map.items():
|
for collection, dbrefs in iteritems(self.reference_map):
|
||||||
|
|
||||||
# we use getattr instead of hasattr because hasattr swallows any exception under python2
|
# we use getattr instead of hasattr because hasattr swallows any exception under python2
|
||||||
# so it could hide nasty things without raising exceptions (cfr bug #1688))
|
# so it could hide nasty things without raising exceptions (cfr bug #1688))
|
||||||
@ -176,7 +174,7 @@ class DeReference:
|
|||||||
dbref for dbref in dbrefs if (col_name, dbref) not in object_map
|
dbref for dbref in dbrefs if (col_name, dbref) not in object_map
|
||||||
]
|
]
|
||||||
references = collection.objects.in_bulk(refs)
|
references = collection.objects.in_bulk(refs)
|
||||||
for key, doc in references.items():
|
for key, doc in iteritems(references):
|
||||||
object_map[(col_name, key)] = doc
|
object_map[(col_name, key)] = doc
|
||||||
else: # Generic reference: use the refs data to convert to document
|
else: # Generic reference: use the refs data to convert to document
|
||||||
if isinstance(doc_type, (ListField, DictField, MapField)):
|
if isinstance(doc_type, (ListField, DictField, MapField)):
|
||||||
@ -252,7 +250,7 @@ class DeReference:
|
|||||||
data = []
|
data = []
|
||||||
else:
|
else:
|
||||||
is_list = False
|
is_list = False
|
||||||
iterator = items.items()
|
iterator = iteritems(items)
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
depth += 1
|
depth += 1
|
||||||
@ -276,12 +274,14 @@ class DeReference:
|
|||||||
(v["_ref"].collection, v["_ref"].id), v
|
(v["_ref"].collection, v["_ref"].id), v
|
||||||
)
|
)
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
item_name = f"{name}.{k}.{field_name}"
|
item_name = six.text_type("{0}.{1}.{2}").format(
|
||||||
|
name, k, field_name
|
||||||
|
)
|
||||||
data[k]._data[field_name] = self._attach_objects(
|
data[k]._data[field_name] = self._attach_objects(
|
||||||
v, depth, instance=instance, name=item_name
|
v, depth, instance=instance, name=item_name
|
||||||
)
|
)
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
item_name = f"{name}.{k}" if name else name
|
item_name = "%s.%s" % (name, k) if name else name
|
||||||
data[k] = self._attach_objects(
|
data[k] = self._attach_objects(
|
||||||
v, depth - 1, instance=instance, name=item_name
|
v, depth - 1, instance=instance, name=item_name
|
||||||
)
|
)
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
import re
|
import re
|
||||||
|
import warnings
|
||||||
|
|
||||||
import pymongo
|
|
||||||
from bson.dbref import DBRef
|
from bson.dbref import DBRef
|
||||||
|
import pymongo
|
||||||
from pymongo.read_preferences import ReadPreference
|
from pymongo.read_preferences import ReadPreference
|
||||||
|
import six
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
from mongoengine.base import (
|
from mongoengine.base import (
|
||||||
@ -16,23 +19,14 @@ from mongoengine.base import (
|
|||||||
)
|
)
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||||
from mongoengine.context_managers import (
|
from mongoengine.context_managers import set_write_concern, switch_collection, switch_db
|
||||||
set_write_concern,
|
|
||||||
switch_collection,
|
|
||||||
switch_db,
|
|
||||||
)
|
|
||||||
from mongoengine.errors import (
|
from mongoengine.errors import (
|
||||||
InvalidDocumentError,
|
InvalidDocumentError,
|
||||||
InvalidQueryError,
|
InvalidQueryError,
|
||||||
SaveConditionError,
|
SaveConditionError,
|
||||||
)
|
)
|
||||||
from mongoengine.pymongo_support import list_collection_names
|
from mongoengine.pymongo_support import list_collection_names
|
||||||
from mongoengine.queryset import (
|
from mongoengine.queryset import NotUniqueError, OperationError, QuerySet, transform
|
||||||
NotUniqueError,
|
|
||||||
OperationError,
|
|
||||||
QuerySet,
|
|
||||||
transform,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
"Document",
|
"Document",
|
||||||
@ -50,7 +44,7 @@ def includes_cls(fields):
|
|||||||
"""Helper function used for ensuring and comparing indexes."""
|
"""Helper function used for ensuring and comparing indexes."""
|
||||||
first_field = None
|
first_field = None
|
||||||
if len(fields):
|
if len(fields):
|
||||||
if isinstance(fields[0], str):
|
if isinstance(fields[0], six.string_types):
|
||||||
first_field = fields[0]
|
first_field = fields[0]
|
||||||
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||||
first_field = fields[0][0]
|
first_field = fields[0][0]
|
||||||
@ -61,8 +55,8 @@ class InvalidCollectionError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass):
|
class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||||
r"""A :class:`~mongoengine.Document` that isn't stored in its own
|
"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||||
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
||||||
fields on :class:`~mongoengine.Document`\ s through the
|
fields on :class:`~mongoengine.Document`\ s through the
|
||||||
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
||||||
@ -77,6 +71,7 @@ class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass):
|
|||||||
|
|
||||||
__slots__ = ("_instance",)
|
__slots__ = ("_instance",)
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
my_metaclass = DocumentMetaclass
|
my_metaclass = DocumentMetaclass
|
||||||
|
|
||||||
@ -87,7 +82,7 @@ class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass):
|
|||||||
__hash__ = None
|
__hash__ = None
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super(EmbeddedDocument, self).__init__(*args, **kwargs)
|
||||||
self._instance = None
|
self._instance = None
|
||||||
self._changed_fields = []
|
self._changed_fields = []
|
||||||
|
|
||||||
@ -99,17 +94,8 @@ class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass):
|
|||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return not self.__eq__(other)
|
return not self.__eq__(other)
|
||||||
|
|
||||||
def __getstate__(self):
|
|
||||||
data = super().__getstate__()
|
|
||||||
data["_instance"] = None
|
|
||||||
return data
|
|
||||||
|
|
||||||
def __setstate__(self, state):
|
|
||||||
super().__setstate__(state)
|
|
||||||
self._instance = state["_instance"]
|
|
||||||
|
|
||||||
def to_mongo(self, *args, **kwargs):
|
def to_mongo(self, *args, **kwargs):
|
||||||
data = super().to_mongo(*args, **kwargs)
|
data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs)
|
||||||
|
|
||||||
# remove _id from the SON if it's in it and it's None
|
# remove _id from the SON if it's in it and it's None
|
||||||
if "_id" in data and data["_id"] is None:
|
if "_id" in data and data["_id"] is None:
|
||||||
@ -118,7 +104,7 @@ class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||||
"""The base class used for defining the structure and properties of
|
"""The base class used for defining the structure and properties of
|
||||||
collections of documents stored in MongoDB. Inherit from this class, and
|
collections of documents stored in MongoDB. Inherit from this class, and
|
||||||
add fields as class attributes to define a document's structure.
|
add fields as class attributes to define a document's structure.
|
||||||
@ -127,7 +113,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
|
|
||||||
By default, the MongoDB collection used to store documents created using a
|
By default, the MongoDB collection used to store documents created using a
|
||||||
:class:`~mongoengine.Document` subclass will be the name of the subclass
|
:class:`~mongoengine.Document` subclass will be the name of the subclass
|
||||||
converted to snake_case. A different collection may be specified by
|
converted to lowercase. A different collection may be specified by
|
||||||
providing :attr:`collection` to the :attr:`meta` dictionary in the class
|
providing :attr:`collection` to the :attr:`meta` dictionary in the class
|
||||||
definition.
|
definition.
|
||||||
|
|
||||||
@ -135,7 +121,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
create a specialised version of the document that will be stored in the
|
create a specialised version of the document that will be stored in the
|
||||||
same collection. To facilitate this behaviour a `_cls`
|
same collection. To facilitate this behaviour a `_cls`
|
||||||
field is added to documents (hidden though the MongoEngine interface).
|
field is added to documents (hidden though the MongoEngine interface).
|
||||||
To enable this behaviour set :attr:`allow_inheritance` to ``True`` in the
|
To enable this behaviourset :attr:`allow_inheritance` to ``True`` in the
|
||||||
:attr:`meta` dictionary.
|
:attr:`meta` dictionary.
|
||||||
|
|
||||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
||||||
@ -170,6 +156,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
in the :attr:`meta` dictionary.
|
in the :attr:`meta` dictionary.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
my_metaclass = TopLevelDocumentMetaclass
|
my_metaclass = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
@ -273,7 +260,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
return db.create_collection(collection_name, **opts)
|
return db.create_collection(collection_name, **opts)
|
||||||
|
|
||||||
def to_mongo(self, *args, **kwargs):
|
def to_mongo(self, *args, **kwargs):
|
||||||
data = super().to_mongo(*args, **kwargs)
|
data = super(Document, self).to_mongo(*args, **kwargs)
|
||||||
|
|
||||||
# If '_id' is None, try and set it from self._data. If that
|
# If '_id' is None, try and set it from self._data. If that
|
||||||
# doesn't exist either, remove '_id' from the SON completely.
|
# doesn't exist either, remove '_id' from the SON completely.
|
||||||
@ -341,11 +328,11 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
_refs=None,
|
_refs=None,
|
||||||
save_condition=None,
|
save_condition=None,
|
||||||
signal_kwargs=None,
|
signal_kwargs=None,
|
||||||
**kwargs,
|
**kwargs
|
||||||
):
|
):
|
||||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||||
document already exists, it will be updated, otherwise it will be
|
document already exists, it will be updated, otherwise it will be
|
||||||
created. Returns the saved object instance.
|
created.
|
||||||
|
|
||||||
:param force_insert: only try to create a new document, don't allow
|
:param force_insert: only try to create a new document, don't allow
|
||||||
updates of existing documents.
|
updates of existing documents.
|
||||||
@ -384,6 +371,15 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
meta['cascade'] = True. Also you can pass different kwargs to
|
meta['cascade'] = True. Also you can pass different kwargs to
|
||||||
the cascade save using cascade_kwargs which overwrites the
|
the cascade save using cascade_kwargs which overwrites the
|
||||||
existing kwargs with custom values.
|
existing kwargs with custom values.
|
||||||
|
.. versionchanged:: 0.8.5
|
||||||
|
Optional save_condition that only overwrites existing documents
|
||||||
|
if the condition is satisfied in the current db record.
|
||||||
|
.. versionchanged:: 0.10
|
||||||
|
:class:`OperationError` exception raised if save_condition fails.
|
||||||
|
.. versionchanged:: 0.10.1
|
||||||
|
:class: save_condition failure now raises a `SaveConditionError`
|
||||||
|
.. versionchanged:: 0.10.7
|
||||||
|
Add signal_kwargs argument
|
||||||
"""
|
"""
|
||||||
signal_kwargs = signal_kwargs or {}
|
signal_kwargs = signal_kwargs or {}
|
||||||
|
|
||||||
@ -435,16 +431,16 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
self.cascade_save(**kwargs)
|
self.cascade_save(**kwargs)
|
||||||
|
|
||||||
except pymongo.errors.DuplicateKeyError as err:
|
except pymongo.errors.DuplicateKeyError as err:
|
||||||
message = "Tried to save duplicate unique keys (%s)"
|
message = u"Tried to save duplicate unique keys (%s)"
|
||||||
raise NotUniqueError(message % err)
|
raise NotUniqueError(message % six.text_type(err))
|
||||||
except pymongo.errors.OperationFailure as err:
|
except pymongo.errors.OperationFailure as err:
|
||||||
message = "Could not save document (%s)"
|
message = "Could not save document (%s)"
|
||||||
if re.match("^E1100[01] duplicate key", str(err)):
|
if re.match("^E1100[01] duplicate key", six.text_type(err)):
|
||||||
# E11000 - duplicate key error index
|
# E11000 - duplicate key error index
|
||||||
# E11001 - duplicate key on update
|
# E11001 - duplicate key on update
|
||||||
message = "Tried to save duplicate unique keys (%s)"
|
message = u"Tried to save duplicate unique keys (%s)"
|
||||||
raise NotUniqueError(message % err)
|
raise NotUniqueError(message % six.text_type(err))
|
||||||
raise OperationError(message % err)
|
raise OperationError(message % six.text_type(err))
|
||||||
|
|
||||||
# Make sure we store the PK on this document now that it's saved
|
# Make sure we store the PK on this document now that it's saved
|
||||||
id_field = self._meta["id_field"]
|
id_field = self._meta["id_field"]
|
||||||
@ -472,9 +468,9 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
# insert_one will provoke UniqueError alongside save does not
|
# insert_one will provoke UniqueError alongside save does not
|
||||||
# therefore, it need to catch and call replace_one.
|
# therefore, it need to catch and call replace_one.
|
||||||
if "_id" in doc:
|
if "_id" in doc:
|
||||||
select_dict = {"_id": doc["_id"]}
|
raw_object = wc_collection.find_one_and_replace(
|
||||||
select_dict = self._integrate_shard_key(doc, select_dict)
|
{"_id": doc["_id"]}, doc
|
||||||
raw_object = wc_collection.find_one_and_replace(select_dict, doc)
|
)
|
||||||
if raw_object:
|
if raw_object:
|
||||||
return doc["_id"]
|
return doc["_id"]
|
||||||
|
|
||||||
@ -497,23 +493,6 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
|
|
||||||
return update_doc
|
return update_doc
|
||||||
|
|
||||||
def _integrate_shard_key(self, doc, select_dict):
|
|
||||||
"""Integrates the collection's shard key to the `select_dict`, which will be used for the query.
|
|
||||||
The value from the shard key is taken from the `doc` and finally the select_dict is returned.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Need to add shard key to query, or you get an error
|
|
||||||
shard_key = self._meta.get("shard_key", tuple())
|
|
||||||
for k in shard_key:
|
|
||||||
path = self._lookup_field(k.split("."))
|
|
||||||
actual_key = [p.db_field for p in path]
|
|
||||||
val = doc
|
|
||||||
for ak in actual_key:
|
|
||||||
val = val[ak]
|
|
||||||
select_dict[".".join(actual_key)] = val
|
|
||||||
|
|
||||||
return select_dict
|
|
||||||
|
|
||||||
def _save_update(self, doc, save_condition, write_concern):
|
def _save_update(self, doc, save_condition, write_concern):
|
||||||
"""Update an existing document.
|
"""Update an existing document.
|
||||||
|
|
||||||
@ -529,7 +508,15 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
|
|
||||||
select_dict["_id"] = object_id
|
select_dict["_id"] = object_id
|
||||||
|
|
||||||
select_dict = self._integrate_shard_key(doc, select_dict)
|
# Need to add shard key to query, or you get an error
|
||||||
|
shard_key = self._meta.get("shard_key", tuple())
|
||||||
|
for k in shard_key:
|
||||||
|
path = self._lookup_field(k.split("."))
|
||||||
|
actual_key = [p.db_field for p in path]
|
||||||
|
val = doc
|
||||||
|
for ak in actual_key:
|
||||||
|
val = val[ak]
|
||||||
|
select_dict[".".join(actual_key)] = val
|
||||||
|
|
||||||
update_doc = self._get_update_doc()
|
update_doc = self._get_update_doc()
|
||||||
if update_doc:
|
if update_doc:
|
||||||
@ -572,7 +559,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
if not getattr(ref, "_changed_fields", True):
|
if not getattr(ref, "_changed_fields", True):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ref_id = f"{ref.__class__.__name__},{str(ref._data)}"
|
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
||||||
if ref and ref_id not in _refs:
|
if ref and ref_id not in _refs:
|
||||||
_refs.append(ref_id)
|
_refs.append(ref_id)
|
||||||
kwargs["_refs"] = _refs
|
kwargs["_refs"] = _refs
|
||||||
@ -583,7 +570,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
def _qs(self):
|
def _qs(self):
|
||||||
"""Return the default queryset corresponding to this document."""
|
"""Return the default queryset corresponding to this document."""
|
||||||
if not hasattr(self, "__objects"):
|
if not hasattr(self, "__objects"):
|
||||||
self.__objects = QuerySet(self.__class__, self._get_collection())
|
self.__objects = QuerySet(self, self._get_collection())
|
||||||
return self.__objects
|
return self.__objects
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -638,13 +625,16 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
For example, ``save(..., w: 2, fsync: True)`` will
|
For example, ``save(..., w: 2, fsync: True)`` will
|
||||||
wait until at least two servers have recorded the write and
|
wait until at least two servers have recorded the write and
|
||||||
will force an fsync on the primary server.
|
will force an fsync on the primary server.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.10.7
|
||||||
|
Add signal_kwargs argument
|
||||||
"""
|
"""
|
||||||
signal_kwargs = signal_kwargs or {}
|
signal_kwargs = signal_kwargs or {}
|
||||||
signals.pre_delete.send(self.__class__, document=self, **signal_kwargs)
|
signals.pre_delete.send(self.__class__, document=self, **signal_kwargs)
|
||||||
|
|
||||||
# Delete FileFields separately
|
# Delete FileFields separately
|
||||||
FileField = _import_class("FileField")
|
FileField = _import_class("FileField")
|
||||||
for name, field in self._fields.items():
|
for name, field in iteritems(self._fields):
|
||||||
if isinstance(field, FileField):
|
if isinstance(field, FileField):
|
||||||
getattr(self, name).delete()
|
getattr(self, name).delete()
|
||||||
|
|
||||||
@ -653,7 +643,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
write_concern=write_concern, _from_doc_delete=True
|
write_concern=write_concern, _from_doc_delete=True
|
||||||
)
|
)
|
||||||
except pymongo.errors.OperationFailure as err:
|
except pymongo.errors.OperationFailure as err:
|
||||||
message = "Could not delete document (%s)" % err.args
|
message = u"Could not delete document (%s)" % err.message
|
||||||
raise OperationError(message)
|
raise OperationError(message)
|
||||||
signals.post_delete.send(self.__class__, document=self, **signal_kwargs)
|
signals.post_delete.send(self.__class__, document=self, **signal_kwargs)
|
||||||
|
|
||||||
@ -719,6 +709,8 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
def select_related(self, max_depth=1):
|
def select_related(self, max_depth=1):
|
||||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
||||||
a maximum depth in order to cut down the number queries to mongodb.
|
a maximum depth in order to cut down the number queries to mongodb.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
DeReference = _import_class("DeReference")
|
DeReference = _import_class("DeReference")
|
||||||
DeReference()([self], max_depth + 1)
|
DeReference()([self], max_depth + 1)
|
||||||
@ -729,6 +721,10 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
|
|
||||||
:param fields: (optional) args list of fields to reload
|
:param fields: (optional) args list of fields to reload
|
||||||
:param max_depth: (optional) depth of dereferencing to follow
|
:param max_depth: (optional) depth of dereferencing to follow
|
||||||
|
|
||||||
|
.. versionadded:: 0.1.2
|
||||||
|
.. versionchanged:: 0.6 Now chainable
|
||||||
|
.. versionchanged:: 0.9 Can provide specific fields to reload
|
||||||
"""
|
"""
|
||||||
max_depth = 1
|
max_depth = 1
|
||||||
if fields and isinstance(fields[0], int):
|
if fields and isinstance(fields[0], int):
|
||||||
@ -830,6 +826,9 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
|
|
||||||
Raises :class:`OperationError` if the document has no collection set
|
Raises :class:`OperationError` if the document has no collection set
|
||||||
(i.g. if it is `abstract`)
|
(i.g. if it is `abstract`)
|
||||||
|
|
||||||
|
.. versionchanged:: 0.10.7
|
||||||
|
:class:`OperationError` exception raised if no collection available
|
||||||
"""
|
"""
|
||||||
coll_name = cls._get_collection_name()
|
coll_name = cls._get_collection_name()
|
||||||
if not coll_name:
|
if not coll_name:
|
||||||
@ -852,13 +851,17 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
index_spec = cls._build_index_spec(keys)
|
index_spec = cls._build_index_spec(keys)
|
||||||
index_spec = index_spec.copy()
|
index_spec = index_spec.copy()
|
||||||
fields = index_spec.pop("fields")
|
fields = index_spec.pop("fields")
|
||||||
|
drop_dups = kwargs.get("drop_dups", False)
|
||||||
|
if drop_dups:
|
||||||
|
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
index_spec["background"] = background
|
index_spec["background"] = background
|
||||||
index_spec.update(kwargs)
|
index_spec.update(kwargs)
|
||||||
|
|
||||||
return cls._get_collection().create_index(fields, **index_spec)
|
return cls._get_collection().create_index(fields, **index_spec)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def ensure_index(cls, key_or_list, background=False, **kwargs):
|
def ensure_index(cls, key_or_list, drop_dups=False, background=False, **kwargs):
|
||||||
"""Ensure that the given indexes are in place. Deprecated in favour
|
"""Ensure that the given indexes are in place. Deprecated in favour
|
||||||
of create_index.
|
of create_index.
|
||||||
|
|
||||||
@ -866,7 +869,12 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
construct a multi-field index); keys may be prefixed with a **+**
|
construct a multi-field index); keys may be prefixed with a **+**
|
||||||
or a **-** to determine the index ordering
|
or a **-** to determine the index ordering
|
||||||
:param background: Allows index creation in the background
|
:param background: Allows index creation in the background
|
||||||
|
:param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value
|
||||||
|
will be removed if PyMongo3+ is used
|
||||||
"""
|
"""
|
||||||
|
if drop_dups:
|
||||||
|
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
return cls.create_index(key_or_list, background=background, **kwargs)
|
return cls.create_index(key_or_list, background=background, **kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -875,16 +883,16 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
|
|
||||||
Global defaults can be set in the meta - see :doc:`guide/defining-documents`
|
Global defaults can be set in the meta - see :doc:`guide/defining-documents`
|
||||||
|
|
||||||
By default, this will get called automatically upon first interaction with the
|
|
||||||
Document collection (query, save, etc) so unless you disabled `auto_create_index`, you
|
|
||||||
shouldn't have to call this manually.
|
|
||||||
|
|
||||||
.. note:: You can disable automatic index creation by setting
|
.. note:: You can disable automatic index creation by setting
|
||||||
`auto_create_index` to False in the documents meta data
|
`auto_create_index` to False in the documents meta data
|
||||||
"""
|
"""
|
||||||
background = cls._meta.get("index_background", False)
|
background = cls._meta.get("index_background", False)
|
||||||
|
drop_dups = cls._meta.get("index_drop_dups", False)
|
||||||
index_opts = cls._meta.get("index_opts") or {}
|
index_opts = cls._meta.get("index_opts") or {}
|
||||||
index_cls = cls._meta.get("index_cls", True)
|
index_cls = cls._meta.get("index_cls", True)
|
||||||
|
if drop_dups:
|
||||||
|
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
|
|
||||||
collection = cls._get_collection()
|
collection = cls._get_collection()
|
||||||
# 746: when connection is via mongos, the read preference is not necessarily an indication that
|
# 746: when connection is via mongos, the read preference is not necessarily an indication that
|
||||||
@ -928,10 +936,8 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def list_indexes(cls):
|
def list_indexes(cls):
|
||||||
"""Lists all indexes that should be created for the Document collection.
|
""" Lists all of the indexes that should be created for given
|
||||||
It includes all the indexes from super- and sub-classes.
|
collection. It includes all the indexes from super- and sub-classes.
|
||||||
|
|
||||||
Note that it will only return the indexes' fields, not the indexes' options
|
|
||||||
"""
|
"""
|
||||||
if cls._meta.get("abstract"):
|
if cls._meta.get("abstract"):
|
||||||
return []
|
return []
|
||||||
@ -986,16 +992,16 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
indexes.append(index)
|
indexes.append(index)
|
||||||
|
|
||||||
# finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed
|
# finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed
|
||||||
if [("_id", 1)] not in indexes:
|
if [(u"_id", 1)] not in indexes:
|
||||||
indexes.append([("_id", 1)])
|
indexes.append([(u"_id", 1)])
|
||||||
if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"):
|
if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"):
|
||||||
indexes.append([("_cls", 1)])
|
indexes.append([(u"_cls", 1)])
|
||||||
|
|
||||||
return indexes
|
return indexes
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def compare_indexes(cls):
|
def compare_indexes(cls):
|
||||||
"""Compares the indexes defined in MongoEngine with the ones
|
""" Compares the indexes defined in MongoEngine with the ones
|
||||||
existing in the database. Returns any missing/extra indexes.
|
existing in the database. Returns any missing/extra indexes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -1013,19 +1019,19 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
extra = [index for index in existing if index not in required]
|
extra = [index for index in existing if index not in required]
|
||||||
|
|
||||||
# if { _cls: 1 } is missing, make sure it's *really* necessary
|
# if { _cls: 1 } is missing, make sure it's *really* necessary
|
||||||
if [("_cls", 1)] in missing:
|
if [(u"_cls", 1)] in missing:
|
||||||
cls_obsolete = False
|
cls_obsolete = False
|
||||||
for index in existing:
|
for index in existing:
|
||||||
if includes_cls(index) and index not in extra:
|
if includes_cls(index) and index not in extra:
|
||||||
cls_obsolete = True
|
cls_obsolete = True
|
||||||
break
|
break
|
||||||
if cls_obsolete:
|
if cls_obsolete:
|
||||||
missing.remove([("_cls", 1)])
|
missing.remove([(u"_cls", 1)])
|
||||||
|
|
||||||
return {"missing": missing, "extra": extra}
|
return {"missing": missing, "extra": extra}
|
||||||
|
|
||||||
|
|
||||||
class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass):
|
class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)):
|
||||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||||
way as an ordinary document but has expanded style properties. Any data
|
way as an ordinary document but has expanded style properties. Any data
|
||||||
@ -1039,6 +1045,7 @@ class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass):
|
|||||||
There is one caveat on Dynamic Documents: undeclared fields cannot start with `_`
|
There is one caveat on Dynamic Documents: undeclared fields cannot start with `_`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
my_metaclass = TopLevelDocumentMetaclass
|
my_metaclass = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
@ -1053,15 +1060,16 @@ class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass):
|
|||||||
setattr(self, field_name, None)
|
setattr(self, field_name, None)
|
||||||
self._dynamic_fields[field_name].null = False
|
self._dynamic_fields[field_name].null = False
|
||||||
else:
|
else:
|
||||||
super().__delattr__(*args, **kwargs)
|
super(DynamicDocument, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass):
|
class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)):
|
||||||
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
||||||
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
||||||
information about dynamic documents.
|
information about dynamic documents.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
my_metaclass = DocumentMetaclass
|
my_metaclass = DocumentMetaclass
|
||||||
|
|
||||||
@ -1081,7 +1089,7 @@ class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass):
|
|||||||
setattr(self, field_name, None)
|
setattr(self, field_name, None)
|
||||||
|
|
||||||
|
|
||||||
class MapReduceDocument:
|
class MapReduceDocument(object):
|
||||||
"""A document returned from a map/reduce query.
|
"""A document returned from a map/reduce query.
|
||||||
|
|
||||||
:param collection: An instance of :class:`~pymongo.Collection`
|
:param collection: An instance of :class:`~pymongo.Collection`
|
||||||
@ -1090,6 +1098,8 @@ class MapReduceDocument:
|
|||||||
an ``ObjectId`` found in the given ``collection``,
|
an ``ObjectId`` found in the given ``collection``,
|
||||||
the object can be accessed via the ``object`` property.
|
the object can be accessed via the ``object`` property.
|
||||||
:param value: The result(s) for this key.
|
:param value: The result(s) for this key.
|
||||||
|
|
||||||
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, document, collection, key, value):
|
def __init__(self, document, collection, key, value):
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
import six
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
"NotRegistered",
|
"NotRegistered",
|
||||||
"InvalidDocumentError",
|
"InvalidDocumentError",
|
||||||
@ -17,15 +20,11 @@ __all__ = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class MongoEngineException(Exception):
|
class NotRegistered(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class NotRegistered(MongoEngineException):
|
class InvalidDocumentError(Exception):
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidDocumentError(MongoEngineException):
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -33,19 +32,19 @@ class LookUpError(AttributeError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DoesNotExist(MongoEngineException):
|
class DoesNotExist(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class MultipleObjectsReturned(MongoEngineException):
|
class MultipleObjectsReturned(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InvalidQueryError(MongoEngineException):
|
class InvalidQueryError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class OperationError(MongoEngineException):
|
class OperationError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -61,7 +60,7 @@ class SaveConditionError(OperationError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class FieldDoesNotExist(MongoEngineException):
|
class FieldDoesNotExist(Exception):
|
||||||
"""Raised when trying to set a field
|
"""Raised when trying to set a field
|
||||||
not declared in a :class:`~mongoengine.Document`
|
not declared in a :class:`~mongoengine.Document`
|
||||||
or an :class:`~mongoengine.EmbeddedDocument`.
|
or an :class:`~mongoengine.EmbeddedDocument`.
|
||||||
@ -88,24 +87,24 @@ class ValidationError(AssertionError):
|
|||||||
_message = None
|
_message = None
|
||||||
|
|
||||||
def __init__(self, message="", **kwargs):
|
def __init__(self, message="", **kwargs):
|
||||||
super().__init__(message)
|
super(ValidationError, self).__init__(message)
|
||||||
self.errors = kwargs.get("errors", {})
|
self.errors = kwargs.get("errors", {})
|
||||||
self.field_name = kwargs.get("field_name")
|
self.field_name = kwargs.get("field_name")
|
||||||
self.message = message
|
self.message = message
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.message)
|
return six.text_type(self.message)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"{self.__class__.__name__}({self.message},)"
|
return "%s(%s,)" % (self.__class__.__name__, self.message)
|
||||||
|
|
||||||
def __getattribute__(self, name):
|
def __getattribute__(self, name):
|
||||||
message = super().__getattribute__(name)
|
message = super(ValidationError, self).__getattribute__(name)
|
||||||
if name == "message":
|
if name == "message":
|
||||||
if self.field_name:
|
if self.field_name:
|
||||||
message = "%s" % message
|
message = "%s" % message
|
||||||
if self.errors:
|
if self.errors:
|
||||||
message = f"{message}({self._format_errors()})"
|
message = "%s(%s)" % (message, self._format_errors())
|
||||||
return message
|
return message
|
||||||
|
|
||||||
def _get_message(self):
|
def _get_message(self):
|
||||||
@ -127,12 +126,12 @@ class ValidationError(AssertionError):
|
|||||||
def build_dict(source):
|
def build_dict(source):
|
||||||
errors_dict = {}
|
errors_dict = {}
|
||||||
if isinstance(source, dict):
|
if isinstance(source, dict):
|
||||||
for field_name, error in source.items():
|
for field_name, error in iteritems(source):
|
||||||
errors_dict[field_name] = build_dict(error)
|
errors_dict[field_name] = build_dict(error)
|
||||||
elif isinstance(source, ValidationError) and source.errors:
|
elif isinstance(source, ValidationError) and source.errors:
|
||||||
return build_dict(source.errors)
|
return build_dict(source.errors)
|
||||||
else:
|
else:
|
||||||
return str(source)
|
return six.text_type(source)
|
||||||
|
|
||||||
return errors_dict
|
return errors_dict
|
||||||
|
|
||||||
@ -148,18 +147,18 @@ class ValidationError(AssertionError):
|
|||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
value = " ".join([generate_key(k) for k in value])
|
value = " ".join([generate_key(k) for k in value])
|
||||||
elif isinstance(value, dict):
|
elif isinstance(value, dict):
|
||||||
value = " ".join([generate_key(v, k) for k, v in value.items()])
|
value = " ".join([generate_key(v, k) for k, v in iteritems(value)])
|
||||||
|
|
||||||
results = f"{prefix}.{value}" if prefix else value
|
results = "%s.%s" % (prefix, value) if prefix else value
|
||||||
return results
|
return results
|
||||||
|
|
||||||
error_dict = defaultdict(list)
|
error_dict = defaultdict(list)
|
||||||
for k, v in self.to_dict().items():
|
for k, v in iteritems(self.to_dict()):
|
||||||
error_dict[generate_key(v)].append(k)
|
error_dict[generate_key(v)].append(k)
|
||||||
return " ".join([f"{k}: {v}" for k, v in error_dict.items()])
|
return " ".join(["%s: %s" % (k, v) for k, v in iteritems(error_dict)])
|
||||||
|
|
||||||
|
|
||||||
class DeprecatedError(MongoEngineException):
|
class DeprecatedError(Exception):
|
||||||
"""Raise when a user uses a feature that has been Deprecated"""
|
"""Raise when a user uses a feature that has been Deprecated"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -3,16 +3,15 @@ Helper functions, constants, and types to aid with MongoDB version support
|
|||||||
"""
|
"""
|
||||||
from mongoengine.connection import get_connection
|
from mongoengine.connection import get_connection
|
||||||
|
|
||||||
|
|
||||||
# Constant that can be used to compare the version retrieved with
|
# Constant that can be used to compare the version retrieved with
|
||||||
# get_mongodb_version()
|
# get_mongodb_version()
|
||||||
MONGODB_34 = (3, 4)
|
MONGODB_34 = (3, 4)
|
||||||
MONGODB_36 = (3, 6)
|
MONGODB_36 = (3, 6)
|
||||||
MONGODB_42 = (4, 2)
|
|
||||||
MONGODB_44 = (4, 4)
|
|
||||||
|
|
||||||
|
|
||||||
def get_mongodb_version():
|
def get_mongodb_version():
|
||||||
"""Return the version of the default connected mongoDB (first 2 digits)
|
"""Return the version of the connected mongoDB (first 2 digits)
|
||||||
|
|
||||||
:return: tuple(int, int)
|
:return: tuple(int, int)
|
||||||
"""
|
"""
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support.
|
Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support.
|
||||||
"""
|
"""
|
||||||
import pymongo
|
import pymongo
|
||||||
from pymongo.errors import OperationFailure
|
|
||||||
|
|
||||||
_PYMONGO_37 = (3, 7)
|
_PYMONGO_37 = (3, 7)
|
||||||
|
|
||||||
@ -11,40 +10,13 @@ PYMONGO_VERSION = tuple(pymongo.version_tuple[:2])
|
|||||||
IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37
|
IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37
|
||||||
|
|
||||||
|
|
||||||
def count_documents(
|
def count_documents(collection, filter):
|
||||||
collection, filter, skip=None, limit=None, hint=None, collation=None
|
|
||||||
):
|
|
||||||
"""Pymongo>3.7 deprecates count in favour of count_documents"""
|
"""Pymongo>3.7 deprecates count in favour of count_documents"""
|
||||||
if limit == 0:
|
|
||||||
return 0 # Pymongo raises an OperationFailure if called with limit=0
|
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
if skip is not None:
|
|
||||||
kwargs["skip"] = skip
|
|
||||||
if limit is not None:
|
|
||||||
kwargs["limit"] = limit
|
|
||||||
if hint not in (-1, None):
|
|
||||||
kwargs["hint"] = hint
|
|
||||||
if collation is not None:
|
|
||||||
kwargs["collation"] = collation
|
|
||||||
|
|
||||||
# count_documents appeared in pymongo 3.7
|
|
||||||
if IS_PYMONGO_GTE_37:
|
if IS_PYMONGO_GTE_37:
|
||||||
try:
|
return collection.count_documents(filter)
|
||||||
return collection.count_documents(filter=filter, **kwargs)
|
else:
|
||||||
except OperationFailure:
|
count = collection.find(filter).count()
|
||||||
# OperationFailure - accounts for some operators that used to work
|
return count
|
||||||
# with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere)
|
|
||||||
# fallback to deprecated Cursor.count
|
|
||||||
# Keeping this should be reevaluated the day pymongo removes .count entirely
|
|
||||||
pass
|
|
||||||
|
|
||||||
cursor = collection.find(filter)
|
|
||||||
for option, option_value in kwargs.items():
|
|
||||||
cursor_method = getattr(cursor, option)
|
|
||||||
cursor = cursor_method(option_value)
|
|
||||||
with_limit_and_skip = "skip" in kwargs or "limit" in kwargs
|
|
||||||
return cursor.count(with_limit_and_skip=with_limit_and_skip)
|
|
||||||
|
|
||||||
|
|
||||||
def list_collection_names(db, include_system_collections=False):
|
def list_collection_names(db, include_system_collections=False):
|
||||||
|
23
mongoengine/python_support.py
Normal file
23
mongoengine/python_support.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
"""
|
||||||
|
Helper functions, constants, and types to aid with Python v2.7 - v3.x support
|
||||||
|
"""
|
||||||
|
import six
|
||||||
|
|
||||||
|
# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3.
|
||||||
|
StringIO = six.BytesIO
|
||||||
|
|
||||||
|
# Additionally for Py2, try to use the faster cStringIO, if available
|
||||||
|
if not six.PY3:
|
||||||
|
try:
|
||||||
|
import cStringIO
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
StringIO = cStringIO.StringIO
|
||||||
|
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
from collections.abc import Hashable
|
||||||
|
else:
|
||||||
|
# raises DeprecationWarnings in Python >=3.7
|
||||||
|
from collections import Hashable
|
@ -1,26 +1,24 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import itertools
|
import itertools
|
||||||
import re
|
import re
|
||||||
import warnings
|
import warnings
|
||||||
from collections.abc import Mapping
|
|
||||||
|
|
||||||
import pymongo
|
|
||||||
import pymongo.errors
|
|
||||||
from bson import SON, json_util
|
from bson import SON, json_util
|
||||||
from bson.code import Code
|
from bson.code import Code
|
||||||
|
import pymongo
|
||||||
|
import pymongo.errors
|
||||||
from pymongo.collection import ReturnDocument
|
from pymongo.collection import ReturnDocument
|
||||||
from pymongo.common import validate_read_preference
|
from pymongo.common import validate_read_preference
|
||||||
from pymongo.read_concern import ReadConcern
|
import six
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
from mongoengine.base import get_document
|
from mongoengine.base import get_document
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from mongoengine.context_managers import (
|
from mongoengine.context_managers import set_write_concern, switch_db
|
||||||
set_read_write_concern,
|
|
||||||
set_write_concern,
|
|
||||||
switch_db,
|
|
||||||
)
|
|
||||||
from mongoengine.errors import (
|
from mongoengine.errors import (
|
||||||
BulkWriteError,
|
BulkWriteError,
|
||||||
InvalidQueryError,
|
InvalidQueryError,
|
||||||
@ -28,11 +26,11 @@ from mongoengine.errors import (
|
|||||||
NotUniqueError,
|
NotUniqueError,
|
||||||
OperationError,
|
OperationError,
|
||||||
)
|
)
|
||||||
from mongoengine.pymongo_support import count_documents
|
|
||||||
from mongoengine.queryset import transform
|
from mongoengine.queryset import transform
|
||||||
from mongoengine.queryset.field_list import QueryFieldList
|
from mongoengine.queryset.field_list import QueryFieldList
|
||||||
from mongoengine.queryset.visitor import Q, QNode
|
from mongoengine.queryset.visitor import Q, QNode
|
||||||
|
|
||||||
|
|
||||||
__all__ = ("BaseQuerySet", "DO_NOTHING", "NULLIFY", "CASCADE", "DENY", "PULL")
|
__all__ = ("BaseQuerySet", "DO_NOTHING", "NULLIFY", "CASCADE", "DENY", "PULL")
|
||||||
|
|
||||||
# Delete rules
|
# Delete rules
|
||||||
@ -43,7 +41,7 @@ DENY = 3
|
|||||||
PULL = 4
|
PULL = 4
|
||||||
|
|
||||||
|
|
||||||
class BaseQuerySet:
|
class BaseQuerySet(object):
|
||||||
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
||||||
providing :class:`~mongoengine.Document` objects as the results.
|
providing :class:`~mongoengine.Document` objects as the results.
|
||||||
"""
|
"""
|
||||||
@ -62,9 +60,8 @@ class BaseQuerySet:
|
|||||||
self._ordering = None
|
self._ordering = None
|
||||||
self._snapshot = False
|
self._snapshot = False
|
||||||
self._timeout = True
|
self._timeout = True
|
||||||
self._allow_disk_use = False
|
self._slave_okay = False
|
||||||
self._read_preference = None
|
self._read_preference = None
|
||||||
self._read_concern = None
|
|
||||||
self._iter = False
|
self._iter = False
|
||||||
self._scalar = []
|
self._scalar = []
|
||||||
self._none = False
|
self._none = False
|
||||||
@ -83,20 +80,13 @@ class BaseQuerySet:
|
|||||||
self._cursor_obj = None
|
self._cursor_obj = None
|
||||||
self._limit = None
|
self._limit = None
|
||||||
self._skip = None
|
self._skip = None
|
||||||
|
|
||||||
self._hint = -1 # Using -1 as None is a valid value for hint
|
self._hint = -1 # Using -1 as None is a valid value for hint
|
||||||
self._collation = None
|
self._collation = None
|
||||||
self._batch_size = None
|
self._batch_size = None
|
||||||
|
self.only_fields = []
|
||||||
self._max_time_ms = None
|
self._max_time_ms = None
|
||||||
self._comment = None
|
self._comment = None
|
||||||
|
|
||||||
# Hack - As people expect cursor[5:5] to return
|
|
||||||
# an empty result set. It's hard to do that right, though, because the
|
|
||||||
# server uses limit(0) to mean 'no limit'. So we set _empty
|
|
||||||
# in that case and check for it when iterating. We also unset
|
|
||||||
# it anytime we change _limit. Inspired by how it is done in pymongo.Cursor
|
|
||||||
self._empty = False
|
|
||||||
|
|
||||||
def __call__(self, q_obj=None, **query):
|
def __call__(self, q_obj=None, **query):
|
||||||
"""Filter the selected documents by calling the
|
"""Filter the selected documents by calling the
|
||||||
:class:`~mongoengine.queryset.QuerySet` with a query.
|
:class:`~mongoengine.queryset.QuerySet` with a query.
|
||||||
@ -169,7 +159,6 @@ class BaseQuerySet:
|
|||||||
[<User: User object>, <User: User object>]
|
[<User: User object>, <User: User object>]
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
queryset._empty = False
|
|
||||||
|
|
||||||
# Handle a slice
|
# Handle a slice
|
||||||
if isinstance(key, slice):
|
if isinstance(key, slice):
|
||||||
@ -177,8 +166,6 @@ class BaseQuerySet:
|
|||||||
queryset._skip, queryset._limit = key.start, key.stop
|
queryset._skip, queryset._limit = key.start, key.stop
|
||||||
if key.start and key.stop:
|
if key.start and key.stop:
|
||||||
queryset._limit = key.stop - key.start
|
queryset._limit = key.stop - key.start
|
||||||
if queryset._limit == 0:
|
|
||||||
queryset._empty = True
|
|
||||||
|
|
||||||
# Allow further QuerySet modifications to be performed
|
# Allow further QuerySet modifications to be performed
|
||||||
return queryset
|
return queryset
|
||||||
@ -190,6 +177,7 @@ class BaseQuerySet:
|
|||||||
queryset._document._from_son(
|
queryset._document._from_son(
|
||||||
queryset._cursor[key],
|
queryset._cursor[key],
|
||||||
_auto_dereference=self._auto_dereference,
|
_auto_dereference=self._auto_dereference,
|
||||||
|
only_fields=self.only_fields,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -199,6 +187,7 @@ class BaseQuerySet:
|
|||||||
return queryset._document._from_son(
|
return queryset._document._from_son(
|
||||||
queryset._cursor[key],
|
queryset._cursor[key],
|
||||||
_auto_dereference=self._auto_dereference,
|
_auto_dereference=self._auto_dereference,
|
||||||
|
only_fields=self.only_fields,
|
||||||
)
|
)
|
||||||
|
|
||||||
raise TypeError("Provide a slice or an integer index")
|
raise TypeError("Provide a slice or an integer index")
|
||||||
@ -215,6 +204,8 @@ class BaseQuerySet:
|
|||||||
"""Avoid to open all records in an if stmt in Py3."""
|
"""Avoid to open all records in an if stmt in Py3."""
|
||||||
return self._has_data()
|
return self._has_data()
|
||||||
|
|
||||||
|
__nonzero__ = __bool__ # For Py2 support
|
||||||
|
|
||||||
# Core functions
|
# Core functions
|
||||||
|
|
||||||
def all(self):
|
def all(self):
|
||||||
@ -257,30 +248,34 @@ class BaseQuerySet:
|
|||||||
`DocumentName.MultipleObjectsReturned` exception if multiple results
|
`DocumentName.MultipleObjectsReturned` exception if multiple results
|
||||||
and :class:`~mongoengine.queryset.DoesNotExist` or
|
and :class:`~mongoengine.queryset.DoesNotExist` or
|
||||||
`DocumentName.DoesNotExist` if no results are found.
|
`DocumentName.DoesNotExist` if no results are found.
|
||||||
|
|
||||||
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
queryset = queryset.order_by().limit(2)
|
queryset = queryset.order_by().limit(2)
|
||||||
queryset = queryset.filter(*q_objs, **query)
|
queryset = queryset.filter(*q_objs, **query)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = next(queryset)
|
result = six.next(queryset)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
msg = "%s matching query does not exist." % queryset._document._class_name
|
msg = "%s matching query does not exist." % queryset._document._class_name
|
||||||
raise queryset._document.DoesNotExist(msg)
|
raise queryset._document.DoesNotExist(msg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Check if there is another match
|
six.next(queryset)
|
||||||
next(queryset)
|
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# If we were able to retrieve the 2nd doc, raise the MultipleObjectsReturned exception.
|
# If we were able to retrieve the 2nd doc, rewind the cursor and
|
||||||
raise queryset._document.MultipleObjectsReturned(
|
# raise the MultipleObjectsReturned exception.
|
||||||
"2 or more items returned, instead of 1"
|
queryset.rewind()
|
||||||
)
|
message = u"%d items returned, instead of 1" % queryset.count()
|
||||||
|
raise queryset._document.MultipleObjectsReturned(message)
|
||||||
|
|
||||||
def create(self, **kwargs):
|
def create(self, **kwargs):
|
||||||
"""Create new object. Returns the saved object instance."""
|
"""Create new object. Returns the saved object instance.
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
|
"""
|
||||||
return self._document(**kwargs).save(force_insert=True)
|
return self._document(**kwargs).save(force_insert=True)
|
||||||
|
|
||||||
def first(self):
|
def first(self):
|
||||||
@ -312,6 +307,10 @@ class BaseQuerySet:
|
|||||||
|
|
||||||
By default returns document instances, set ``load_bulk`` to False to
|
By default returns document instances, set ``load_bulk`` to False to
|
||||||
return just ``ObjectIds``
|
return just ``ObjectIds``
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
.. versionchanged:: 0.10.7
|
||||||
|
Add signal_kwargs argument
|
||||||
"""
|
"""
|
||||||
Document = _import_class("Document")
|
Document = _import_class("Document")
|
||||||
|
|
||||||
@ -354,20 +353,20 @@ class BaseQuerySet:
|
|||||||
)
|
)
|
||||||
except pymongo.errors.DuplicateKeyError as err:
|
except pymongo.errors.DuplicateKeyError as err:
|
||||||
message = "Could not save document (%s)"
|
message = "Could not save document (%s)"
|
||||||
raise NotUniqueError(message % err)
|
raise NotUniqueError(message % six.text_type(err))
|
||||||
except pymongo.errors.BulkWriteError as err:
|
except pymongo.errors.BulkWriteError as err:
|
||||||
# inserting documents that already have an _id field will
|
# inserting documents that already have an _id field will
|
||||||
# give huge performance debt or raise
|
# give huge performance debt or raise
|
||||||
message = "Bulk write error: (%s)"
|
message = u"Bulk write error: (%s)"
|
||||||
raise BulkWriteError(message % err.details)
|
raise BulkWriteError(message % six.text_type(err.details))
|
||||||
except pymongo.errors.OperationFailure as err:
|
except pymongo.errors.OperationFailure as err:
|
||||||
message = "Could not save document (%s)"
|
message = "Could not save document (%s)"
|
||||||
if re.match("^E1100[01] duplicate key", str(err)):
|
if re.match("^E1100[01] duplicate key", six.text_type(err)):
|
||||||
# E11000 - duplicate key error index
|
# E11000 - duplicate key error index
|
||||||
# E11001 - duplicate key on update
|
# E11001 - duplicate key on update
|
||||||
message = "Tried to save duplicate unique keys (%s)"
|
message = u"Tried to save duplicate unique keys (%s)"
|
||||||
raise NotUniqueError(message % err)
|
raise NotUniqueError(message % six.text_type(err))
|
||||||
raise OperationError(message % err)
|
raise OperationError(message % six.text_type(err))
|
||||||
|
|
||||||
# Apply inserted_ids to documents
|
# Apply inserted_ids to documents
|
||||||
for doc, doc_id in zip(docs, ids):
|
for doc, doc_id in zip(docs, ids):
|
||||||
@ -393,36 +392,9 @@ class BaseQuerySet:
|
|||||||
:meth:`skip` that has been applied to this cursor into account when
|
:meth:`skip` that has been applied to this cursor into account when
|
||||||
getting the count
|
getting the count
|
||||||
"""
|
"""
|
||||||
# mimic the fact that setting .limit(0) in pymongo sets no limit
|
if self._limit == 0 and with_limit_and_skip is False or self._none:
|
||||||
# https://docs.mongodb.com/manual/reference/method/cursor.limit/#zero-value
|
|
||||||
if (
|
|
||||||
self._limit == 0
|
|
||||||
and with_limit_and_skip is False
|
|
||||||
or self._none
|
|
||||||
or self._empty
|
|
||||||
):
|
|
||||||
return 0
|
return 0
|
||||||
|
count = self._cursor.count(with_limit_and_skip=with_limit_and_skip)
|
||||||
kwargs = (
|
|
||||||
{"limit": self._limit, "skip": self._skip} if with_limit_and_skip else {}
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._limit == 0:
|
|
||||||
# mimic the fact that historically .limit(0) sets no limit
|
|
||||||
kwargs.pop("limit", None)
|
|
||||||
|
|
||||||
if self._hint not in (-1, None):
|
|
||||||
kwargs["hint"] = self._hint
|
|
||||||
|
|
||||||
if self._collation:
|
|
||||||
kwargs["collation"] = self._collation
|
|
||||||
|
|
||||||
count = count_documents(
|
|
||||||
collection=self._cursor.collection,
|
|
||||||
filter=self._query,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._cursor_obj = None
|
self._cursor_obj = None
|
||||||
return count
|
return count
|
||||||
|
|
||||||
@ -518,13 +490,7 @@ class BaseQuerySet:
|
|||||||
return result.deleted_count
|
return result.deleted_count
|
||||||
|
|
||||||
def update(
|
def update(
|
||||||
self,
|
self, upsert=False, multi=True, write_concern=None, full_result=False, **update
|
||||||
upsert=False,
|
|
||||||
multi=True,
|
|
||||||
write_concern=None,
|
|
||||||
read_concern=None,
|
|
||||||
full_result=False,
|
|
||||||
**update,
|
|
||||||
):
|
):
|
||||||
"""Perform an atomic update on the fields matched by the query.
|
"""Perform an atomic update on the fields matched by the query.
|
||||||
|
|
||||||
@ -536,12 +502,13 @@ class BaseQuerySet:
|
|||||||
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||||
wait until at least two servers have recorded the write and
|
wait until at least two servers have recorded the write and
|
||||||
will force an fsync on the primary server.
|
will force an fsync on the primary server.
|
||||||
:param read_concern: Override the read concern for the operation
|
|
||||||
:param full_result: Return the associated ``pymongo.UpdateResult`` rather than just the number
|
:param full_result: Return the associated ``pymongo.UpdateResult`` rather than just the number
|
||||||
updated items
|
updated items
|
||||||
:param update: Django-style update keyword arguments
|
:param update: Django-style update keyword arguments
|
||||||
|
|
||||||
:returns the number of updated documents (unless ``full_result`` is True)
|
:returns the number of updated documents (unless ``full_result`` is True)
|
||||||
|
|
||||||
|
.. versionadded:: 0.2
|
||||||
"""
|
"""
|
||||||
if not update and not upsert:
|
if not update and not upsert:
|
||||||
raise OperationError("No update parameters, would remove data")
|
raise OperationError("No update parameters, would remove data")
|
||||||
@ -561,9 +528,7 @@ class BaseQuerySet:
|
|||||||
else:
|
else:
|
||||||
update["$set"] = {"_cls": queryset._document._class_name}
|
update["$set"] = {"_cls": queryset._document._class_name}
|
||||||
try:
|
try:
|
||||||
with set_read_write_concern(
|
with set_write_concern(queryset._collection, write_concern) as collection:
|
||||||
queryset._collection, write_concern, read_concern
|
|
||||||
) as collection:
|
|
||||||
update_func = collection.update_one
|
update_func = collection.update_one
|
||||||
if multi:
|
if multi:
|
||||||
update_func = collection.update_many
|
update_func = collection.update_many
|
||||||
@ -573,14 +538,14 @@ class BaseQuerySet:
|
|||||||
elif result.raw_result:
|
elif result.raw_result:
|
||||||
return result.raw_result["n"]
|
return result.raw_result["n"]
|
||||||
except pymongo.errors.DuplicateKeyError as err:
|
except pymongo.errors.DuplicateKeyError as err:
|
||||||
raise NotUniqueError("Update failed (%s)" % err)
|
raise NotUniqueError(u"Update failed (%s)" % six.text_type(err))
|
||||||
except pymongo.errors.OperationFailure as err:
|
except pymongo.errors.OperationFailure as err:
|
||||||
if str(err) == "multi not coded yet":
|
if six.text_type(err) == u"multi not coded yet":
|
||||||
message = "update() method requires MongoDB 1.1.3+"
|
message = u"update() method requires MongoDB 1.1.3+"
|
||||||
raise OperationError(message)
|
raise OperationError(message)
|
||||||
raise OperationError("Update failed (%s)" % err)
|
raise OperationError(u"Update failed (%s)" % six.text_type(err))
|
||||||
|
|
||||||
def upsert_one(self, write_concern=None, read_concern=None, **update):
|
def upsert_one(self, write_concern=None, **update):
|
||||||
"""Overwrite or add the first document matched by the query.
|
"""Overwrite or add the first document matched by the query.
|
||||||
|
|
||||||
:param write_concern: Extra keyword arguments are passed down which
|
:param write_concern: Extra keyword arguments are passed down which
|
||||||
@ -589,19 +554,19 @@ class BaseQuerySet:
|
|||||||
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||||
wait until at least two servers have recorded the write and
|
wait until at least two servers have recorded the write and
|
||||||
will force an fsync on the primary server.
|
will force an fsync on the primary server.
|
||||||
:param read_concern: Override the read concern for the operation
|
|
||||||
:param update: Django-style update keyword arguments
|
:param update: Django-style update keyword arguments
|
||||||
|
|
||||||
:returns the new or overwritten document
|
:returns the new or overwritten document
|
||||||
|
|
||||||
|
.. versionadded:: 0.10.2
|
||||||
"""
|
"""
|
||||||
|
|
||||||
atomic_update = self.update(
|
atomic_update = self.update(
|
||||||
multi=False,
|
multi=False,
|
||||||
upsert=True,
|
upsert=True,
|
||||||
write_concern=write_concern,
|
write_concern=write_concern,
|
||||||
read_concern=read_concern,
|
|
||||||
full_result=True,
|
full_result=True,
|
||||||
**update,
|
**update
|
||||||
)
|
)
|
||||||
|
|
||||||
if atomic_update.raw_result["updatedExisting"]:
|
if atomic_update.raw_result["updatedExisting"]:
|
||||||
@ -626,13 +591,14 @@ class BaseQuerySet:
|
|||||||
:param update: Django-style update keyword arguments
|
:param update: Django-style update keyword arguments
|
||||||
full_result
|
full_result
|
||||||
:returns the number of updated documents (unless ``full_result`` is True)
|
:returns the number of updated documents (unless ``full_result`` is True)
|
||||||
|
.. versionadded:: 0.2
|
||||||
"""
|
"""
|
||||||
return self.update(
|
return self.update(
|
||||||
upsert=upsert,
|
upsert=upsert,
|
||||||
multi=False,
|
multi=False,
|
||||||
write_concern=write_concern,
|
write_concern=write_concern,
|
||||||
full_result=full_result,
|
full_result=full_result,
|
||||||
**update,
|
**update
|
||||||
)
|
)
|
||||||
|
|
||||||
def modify(
|
def modify(
|
||||||
@ -657,6 +623,8 @@ class BaseQuerySet:
|
|||||||
:param new: return updated rather than original document
|
:param new: return updated rather than original document
|
||||||
(default ``False``)
|
(default ``False``)
|
||||||
:param update: Django-style update keyword arguments
|
:param update: Django-style update keyword arguments
|
||||||
|
|
||||||
|
.. versionadded:: 0.9
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if remove and new:
|
if remove and new:
|
||||||
@ -690,19 +658,21 @@ class BaseQuerySet:
|
|||||||
upsert=upsert,
|
upsert=upsert,
|
||||||
sort=sort,
|
sort=sort,
|
||||||
return_document=return_doc,
|
return_document=return_doc,
|
||||||
**self._cursor_args,
|
**self._cursor_args
|
||||||
)
|
)
|
||||||
except pymongo.errors.DuplicateKeyError as err:
|
except pymongo.errors.DuplicateKeyError as err:
|
||||||
raise NotUniqueError("Update failed (%s)" % err)
|
raise NotUniqueError(u"Update failed (%s)" % err)
|
||||||
except pymongo.errors.OperationFailure as err:
|
except pymongo.errors.OperationFailure as err:
|
||||||
raise OperationError("Update failed (%s)" % err)
|
raise OperationError(u"Update failed (%s)" % err)
|
||||||
|
|
||||||
if full_response:
|
if full_response:
|
||||||
if result["value"] is not None:
|
if result["value"] is not None:
|
||||||
result["value"] = self._document._from_son(result["value"])
|
result["value"] = self._document._from_son(
|
||||||
|
result["value"], only_fields=self.only_fields
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
if result is not None:
|
if result is not None:
|
||||||
result = self._document._from_son(result)
|
result = self._document._from_son(result, only_fields=self.only_fields)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -712,6 +682,8 @@ class BaseQuerySet:
|
|||||||
`None` if no document exists with that id.
|
`None` if no document exists with that id.
|
||||||
|
|
||||||
:param object_id: the value for the id of the document to look up
|
:param object_id: the value for the id of the document to look up
|
||||||
|
|
||||||
|
.. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
if not queryset._query_obj.empty:
|
if not queryset._query_obj.empty:
|
||||||
@ -722,16 +694,20 @@ class BaseQuerySet:
|
|||||||
def in_bulk(self, object_ids):
|
def in_bulk(self, object_ids):
|
||||||
"""Retrieve a set of documents by their ids.
|
"""Retrieve a set of documents by their ids.
|
||||||
|
|
||||||
:param object_ids: a list or tuple of ObjectId's
|
:param object_ids: a list or tuple of ``ObjectId``\ s
|
||||||
:rtype: dict of ObjectId's as keys and collection-specific
|
:rtype: dict of ObjectIds as keys and collection-specific
|
||||||
Document subclasses as values.
|
Document subclasses as values.
|
||||||
|
|
||||||
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
doc_map = {}
|
doc_map = {}
|
||||||
|
|
||||||
docs = self._collection.find({"_id": {"$in": object_ids}}, **self._cursor_args)
|
docs = self._collection.find({"_id": {"$in": object_ids}}, **self._cursor_args)
|
||||||
if self._scalar:
|
if self._scalar:
|
||||||
for doc in docs:
|
for doc in docs:
|
||||||
doc_map[doc["_id"]] = self._get_scalar(self._document._from_son(doc))
|
doc_map[doc["_id"]] = self._get_scalar(
|
||||||
|
self._document._from_son(doc, only_fields=self.only_fields)
|
||||||
|
)
|
||||||
elif self._as_pymongo:
|
elif self._as_pymongo:
|
||||||
for doc in docs:
|
for doc in docs:
|
||||||
doc_map[doc["_id"]] = doc
|
doc_map[doc["_id"]] = doc
|
||||||
@ -739,15 +715,14 @@ class BaseQuerySet:
|
|||||||
for doc in docs:
|
for doc in docs:
|
||||||
doc_map[doc["_id"]] = self._document._from_son(
|
doc_map[doc["_id"]] = self._document._from_son(
|
||||||
doc,
|
doc,
|
||||||
|
only_fields=self.only_fields,
|
||||||
_auto_dereference=self._auto_dereference,
|
_auto_dereference=self._auto_dereference,
|
||||||
)
|
)
|
||||||
|
|
||||||
return doc_map
|
return doc_map
|
||||||
|
|
||||||
def none(self):
|
def none(self):
|
||||||
"""Returns a queryset that never returns any objects and no query will be executed when accessing the results
|
"""Helper that just returns a list"""
|
||||||
inspired by django none() https://docs.djangoproject.com/en/dev/ref/models/querysets/#none
|
|
||||||
"""
|
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
queryset._none = True
|
queryset._none = True
|
||||||
return queryset
|
return queryset
|
||||||
@ -767,6 +742,8 @@ class BaseQuerySet:
|
|||||||
evaluated against if you are using more than one database.
|
evaluated against if you are using more than one database.
|
||||||
|
|
||||||
:param alias: The database alias
|
:param alias: The database alias
|
||||||
|
|
||||||
|
.. versionadded:: 0.9
|
||||||
"""
|
"""
|
||||||
|
|
||||||
with switch_db(self._document, alias) as cls:
|
with switch_db(self._document, alias) as cls:
|
||||||
@ -798,19 +775,18 @@ class BaseQuerySet:
|
|||||||
"_ordering",
|
"_ordering",
|
||||||
"_snapshot",
|
"_snapshot",
|
||||||
"_timeout",
|
"_timeout",
|
||||||
"_allow_disk_use",
|
"_slave_okay",
|
||||||
"_read_preference",
|
"_read_preference",
|
||||||
"_read_concern",
|
|
||||||
"_iter",
|
"_iter",
|
||||||
"_scalar",
|
"_scalar",
|
||||||
"_as_pymongo",
|
"_as_pymongo",
|
||||||
"_limit",
|
"_limit",
|
||||||
"_skip",
|
"_skip",
|
||||||
"_empty",
|
|
||||||
"_hint",
|
"_hint",
|
||||||
"_collation",
|
"_collation",
|
||||||
"_auto_dereference",
|
"_auto_dereference",
|
||||||
"_search_text",
|
"_search_text",
|
||||||
|
"only_fields",
|
||||||
"_max_time_ms",
|
"_max_time_ms",
|
||||||
"_comment",
|
"_comment",
|
||||||
"_batch_size",
|
"_batch_size",
|
||||||
@ -829,6 +805,8 @@ class BaseQuerySet:
|
|||||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects or
|
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects or
|
||||||
:class:`~bson.object_id.ObjectId` a maximum depth in order to cut down
|
:class:`~bson.object_id.ObjectId` a maximum depth in order to cut down
|
||||||
the number queries to mongodb.
|
the number queries to mongodb.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
# Make select related work the same for querysets
|
# Make select related work the same for querysets
|
||||||
max_depth += 1
|
max_depth += 1
|
||||||
@ -844,7 +822,6 @@ class BaseQuerySet:
|
|||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
queryset._limit = n
|
queryset._limit = n
|
||||||
queryset._empty = False # cancels the effect of empty
|
|
||||||
|
|
||||||
# If a cursor object has already been created, apply the limit to it.
|
# If a cursor object has already been created, apply the limit to it.
|
||||||
if queryset._cursor_obj:
|
if queryset._cursor_obj:
|
||||||
@ -877,6 +854,8 @@ class BaseQuerySet:
|
|||||||
|
|
||||||
Hinting will not do anything if the corresponding index does not exist.
|
Hinting will not do anything if the corresponding index does not exist.
|
||||||
The last hint applied to this cursor takes precedence over all others.
|
The last hint applied to this cursor takes precedence over all others.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
queryset._hint = index
|
queryset._hint = index
|
||||||
@ -938,6 +917,10 @@ class BaseQuerySet:
|
|||||||
|
|
||||||
.. note:: This is a command and won't take ordering or limit into
|
.. note:: This is a command and won't take ordering or limit into
|
||||||
account.
|
account.
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
|
.. versionchanged:: 0.5 - Fixed handling references
|
||||||
|
.. versionchanged:: 0.6 - Improved db_field refrence handling
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
|
|
||||||
@ -1001,8 +984,12 @@ class BaseQuerySet:
|
|||||||
field filters.
|
field filters.
|
||||||
|
|
||||||
:param fields: fields to include
|
:param fields: fields to include
|
||||||
|
|
||||||
|
.. versionadded:: 0.3
|
||||||
|
.. versionchanged:: 0.5 - Added subfield support
|
||||||
"""
|
"""
|
||||||
fields = {f: QueryFieldList.ONLY for f in fields}
|
fields = {f: QueryFieldList.ONLY for f in fields}
|
||||||
|
self.only_fields = fields.keys()
|
||||||
return self.fields(True, **fields)
|
return self.fields(True, **fields)
|
||||||
|
|
||||||
def exclude(self, *fields):
|
def exclude(self, *fields):
|
||||||
@ -1019,6 +1006,8 @@ class BaseQuerySet:
|
|||||||
field filters.
|
field filters.
|
||||||
|
|
||||||
:param fields: fields to exclude
|
:param fields: fields to exclude
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
fields = {f: QueryFieldList.EXCLUDE for f in fields}
|
fields = {f: QueryFieldList.EXCLUDE for f in fields}
|
||||||
return self.fields(**fields)
|
return self.fields(**fields)
|
||||||
@ -1037,18 +1026,18 @@ class BaseQuerySet:
|
|||||||
|
|
||||||
posts = BlogPost.objects(...).fields(comments=0)
|
posts = BlogPost.objects(...).fields(comments=0)
|
||||||
|
|
||||||
To retrieve a subrange or sublist of array elements,
|
To retrieve a subrange of array elements:
|
||||||
support exist for both the `slice` and `elemMatch` projection operator:
|
|
||||||
|
|
||||||
posts = BlogPost.objects(...).fields(slice__comments=5)
|
posts = BlogPost.objects(...).fields(slice__comments=5)
|
||||||
posts = BlogPost.objects(...).fields(elemMatch__comments="test")
|
|
||||||
|
|
||||||
:param kwargs: A set of keyword arguments identifying what to
|
:param kwargs: A set of keyword arguments identifying what to
|
||||||
include, exclude, or slice.
|
include, exclude, or slice.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Check for an operator and transform to mongo-style if there is
|
# Check for an operator and transform to mongo-style if there is
|
||||||
operators = ["slice", "elemMatch"]
|
operators = ["slice"]
|
||||||
cleaned_fields = []
|
cleaned_fields = []
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
parts = key.split("__")
|
parts = key.split("__")
|
||||||
@ -1086,6 +1075,8 @@ class BaseQuerySet:
|
|||||||
.exclude(). ::
|
.exclude(). ::
|
||||||
|
|
||||||
post = BlogPost.objects.exclude('comments').all_fields()
|
post = BlogPost.objects.exclude('comments').all_fields()
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
queryset._loaded_fields = QueryFieldList(
|
queryset._loaded_fields = QueryFieldList(
|
||||||
@ -1149,7 +1140,7 @@ class BaseQuerySet:
|
|||||||
|
|
||||||
def explain(self):
|
def explain(self):
|
||||||
"""Return an explain plan record for the
|
"""Return an explain plan record for the
|
||||||
:class:`~mongoengine.queryset.QuerySet` cursor.
|
:class:`~mongoengine.queryset.QuerySet`\ 's cursor.
|
||||||
"""
|
"""
|
||||||
return self._cursor.explain()
|
return self._cursor.explain()
|
||||||
|
|
||||||
@ -1158,6 +1149,9 @@ class BaseQuerySet:
|
|||||||
"""Enable or disable snapshot mode when querying.
|
"""Enable or disable snapshot mode when querying.
|
||||||
|
|
||||||
:param enabled: whether or not snapshot mode is enabled
|
:param enabled: whether or not snapshot mode is enabled
|
||||||
|
|
||||||
|
..versionchanged:: 0.5 - made chainable
|
||||||
|
.. deprecated:: Ignored with PyMongo 3+
|
||||||
"""
|
"""
|
||||||
msg = "snapshot is deprecated as it has no impact when using PyMongo 3+."
|
msg = "snapshot is deprecated as it has no impact when using PyMongo 3+."
|
||||||
warnings.warn(msg, DeprecationWarning)
|
warnings.warn(msg, DeprecationWarning)
|
||||||
@ -1165,25 +1159,31 @@ class BaseQuerySet:
|
|||||||
queryset._snapshot = enabled
|
queryset._snapshot = enabled
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
def allow_disk_use(self, enabled):
|
|
||||||
"""Enable or disable the use of temporary files on disk while processing a blocking sort operation.
|
|
||||||
(To store data exceeding the 100 megabyte system memory limit)
|
|
||||||
|
|
||||||
:param enabled: whether or not temporary files on disk are used
|
|
||||||
"""
|
|
||||||
queryset = self.clone()
|
|
||||||
queryset._allow_disk_use = enabled
|
|
||||||
return queryset
|
|
||||||
|
|
||||||
def timeout(self, enabled):
|
def timeout(self, enabled):
|
||||||
"""Enable or disable the default mongod timeout when querying. (no_cursor_timeout option)
|
"""Enable or disable the default mongod timeout when querying. (no_cursor_timeout option)
|
||||||
|
|
||||||
:param enabled: whether or not the timeout is used
|
:param enabled: whether or not the timeout is used
|
||||||
|
|
||||||
|
..versionchanged:: 0.5 - made chainable
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
queryset._timeout = enabled
|
queryset._timeout = enabled
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
# DEPRECATED. Has no more impact on PyMongo 3+
|
||||||
|
def slave_okay(self, enabled):
|
||||||
|
"""Enable or disable the slave_okay when querying.
|
||||||
|
|
||||||
|
:param enabled: whether or not the slave_okay is enabled
|
||||||
|
|
||||||
|
.. deprecated:: Ignored with PyMongo 3+
|
||||||
|
"""
|
||||||
|
msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+."
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
|
queryset = self.clone()
|
||||||
|
queryset._slave_okay = enabled
|
||||||
|
return queryset
|
||||||
|
|
||||||
def read_preference(self, read_preference):
|
def read_preference(self, read_preference):
|
||||||
"""Change the read_preference when querying.
|
"""Change the read_preference when querying.
|
||||||
|
|
||||||
@ -1196,22 +1196,6 @@ class BaseQuerySet:
|
|||||||
queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference
|
queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
def read_concern(self, read_concern):
|
|
||||||
"""Change the read_concern when querying.
|
|
||||||
|
|
||||||
:param read_concern: override ReplicaSetConnection-level
|
|
||||||
preference.
|
|
||||||
"""
|
|
||||||
if read_concern is not None and not isinstance(read_concern, Mapping):
|
|
||||||
raise TypeError(f"{read_concern!r} is not a valid read concern.")
|
|
||||||
|
|
||||||
queryset = self.clone()
|
|
||||||
queryset._read_concern = (
|
|
||||||
ReadConcern(**read_concern) if read_concern is not None else None
|
|
||||||
)
|
|
||||||
queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_concern
|
|
||||||
return queryset
|
|
||||||
|
|
||||||
def scalar(self, *fields):
|
def scalar(self, *fields):
|
||||||
"""Instead of returning Document instances, return either a specific
|
"""Instead of returning Document instances, return either a specific
|
||||||
value or a tuple of values in order.
|
value or a tuple of values in order.
|
||||||
@ -1266,7 +1250,10 @@ class BaseQuerySet:
|
|||||||
def from_json(self, json_data):
|
def from_json(self, json_data):
|
||||||
"""Converts json data to unsaved objects"""
|
"""Converts json data to unsaved objects"""
|
||||||
son_data = json_util.loads(json_data)
|
son_data = json_util.loads(json_data)
|
||||||
return [self._document._from_son(data) for data in son_data]
|
return [
|
||||||
|
self._document._from_son(data, only_fields=self.only_fields)
|
||||||
|
for data in son_data
|
||||||
|
]
|
||||||
|
|
||||||
def aggregate(self, pipeline, *suppl_pipeline, **kwargs):
|
def aggregate(self, pipeline, *suppl_pipeline, **kwargs):
|
||||||
"""Perform a aggregate function based in your queryset params
|
"""Perform a aggregate function based in your queryset params
|
||||||
@ -1277,6 +1264,7 @@ class BaseQuerySet:
|
|||||||
parameter will be removed shortly
|
parameter will be removed shortly
|
||||||
:param kwargs: (optional) kwargs dictionary to be passed to pymongo's aggregate call
|
:param kwargs: (optional) kwargs dictionary to be passed to pymongo's aggregate call
|
||||||
See https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.aggregate
|
See https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.aggregate
|
||||||
|
.. versionadded:: 0.9
|
||||||
"""
|
"""
|
||||||
using_deprecated_interface = isinstance(pipeline, dict) or bool(suppl_pipeline)
|
using_deprecated_interface = isinstance(pipeline, dict) or bool(suppl_pipeline)
|
||||||
user_pipeline = [pipeline] if isinstance(pipeline, dict) else list(pipeline)
|
user_pipeline = [pipeline] if isinstance(pipeline, dict) else list(pipeline)
|
||||||
@ -1307,11 +1295,10 @@ class BaseQuerySet:
|
|||||||
final_pipeline = initial_pipeline + user_pipeline
|
final_pipeline = initial_pipeline + user_pipeline
|
||||||
|
|
||||||
collection = self._collection
|
collection = self._collection
|
||||||
if self._read_preference is not None or self._read_concern is not None:
|
if self._read_preference is not None:
|
||||||
collection = self._collection.with_options(
|
collection = self._collection.with_options(
|
||||||
read_preference=self._read_preference, read_concern=self._read_concern
|
read_preference=self._read_preference
|
||||||
)
|
)
|
||||||
|
|
||||||
return collection.aggregate(final_pipeline, cursor={}, **kwargs)
|
return collection.aggregate(final_pipeline, cursor={}, **kwargs)
|
||||||
|
|
||||||
# JS functionality
|
# JS functionality
|
||||||
@ -1348,23 +1335,32 @@ class BaseQuerySet:
|
|||||||
Map/Reduce changed in server version **>= 1.7.4**. The PyMongo
|
Map/Reduce changed in server version **>= 1.7.4**. The PyMongo
|
||||||
:meth:`~pymongo.collection.Collection.map_reduce` helper requires
|
:meth:`~pymongo.collection.Collection.map_reduce` helper requires
|
||||||
PyMongo version **>= 1.11**.
|
PyMongo version **>= 1.11**.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5
|
||||||
|
- removed ``keep_temp`` keyword argument, which was only relevant
|
||||||
|
for MongoDB server versions older than 1.7.4
|
||||||
|
|
||||||
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
|
|
||||||
MapReduceDocument = _import_class("MapReduceDocument")
|
MapReduceDocument = _import_class("MapReduceDocument")
|
||||||
|
|
||||||
|
if not hasattr(self._collection, "map_reduce"):
|
||||||
|
raise NotImplementedError("Requires MongoDB >= 1.7.1")
|
||||||
|
|
||||||
map_f_scope = {}
|
map_f_scope = {}
|
||||||
if isinstance(map_f, Code):
|
if isinstance(map_f, Code):
|
||||||
map_f_scope = map_f.scope
|
map_f_scope = map_f.scope
|
||||||
map_f = str(map_f)
|
map_f = six.text_type(map_f)
|
||||||
map_f = Code(queryset._sub_js_fields(map_f), map_f_scope or None)
|
map_f = Code(queryset._sub_js_fields(map_f), map_f_scope)
|
||||||
|
|
||||||
reduce_f_scope = {}
|
reduce_f_scope = {}
|
||||||
if isinstance(reduce_f, Code):
|
if isinstance(reduce_f, Code):
|
||||||
reduce_f_scope = reduce_f.scope
|
reduce_f_scope = reduce_f.scope
|
||||||
reduce_f = str(reduce_f)
|
reduce_f = six.text_type(reduce_f)
|
||||||
reduce_f_code = queryset._sub_js_fields(reduce_f)
|
reduce_f_code = queryset._sub_js_fields(reduce_f)
|
||||||
reduce_f = Code(reduce_f_code, reduce_f_scope or None)
|
reduce_f = Code(reduce_f_code, reduce_f_scope)
|
||||||
|
|
||||||
mr_args = {"query": queryset._query}
|
mr_args = {"query": queryset._query}
|
||||||
|
|
||||||
@ -1372,9 +1368,9 @@ class BaseQuerySet:
|
|||||||
finalize_f_scope = {}
|
finalize_f_scope = {}
|
||||||
if isinstance(finalize_f, Code):
|
if isinstance(finalize_f, Code):
|
||||||
finalize_f_scope = finalize_f.scope
|
finalize_f_scope = finalize_f.scope
|
||||||
finalize_f = str(finalize_f)
|
finalize_f = six.text_type(finalize_f)
|
||||||
finalize_f_code = queryset._sub_js_fields(finalize_f)
|
finalize_f_code = queryset._sub_js_fields(finalize_f)
|
||||||
finalize_f = Code(finalize_f_code, finalize_f_scope or None)
|
finalize_f = Code(finalize_f_code, finalize_f_scope)
|
||||||
mr_args["finalize"] = finalize_f
|
mr_args["finalize"] = finalize_f
|
||||||
|
|
||||||
if scope:
|
if scope:
|
||||||
@ -1388,7 +1384,7 @@ class BaseQuerySet:
|
|||||||
else:
|
else:
|
||||||
map_reduce_function = "map_reduce"
|
map_reduce_function = "map_reduce"
|
||||||
|
|
||||||
if isinstance(output, str):
|
if isinstance(output, six.string_types):
|
||||||
mr_args["out"] = output
|
mr_args["out"] = output
|
||||||
|
|
||||||
elif isinstance(output, dict):
|
elif isinstance(output, dict):
|
||||||
@ -1481,6 +1477,8 @@ class BaseQuerySet:
|
|||||||
.. note:: When using this mode of query, the database will call your
|
.. note:: When using this mode of query, the database will call your
|
||||||
function, or evaluate your predicate clause, for each object
|
function, or evaluate your predicate clause, for each object
|
||||||
in the collection.
|
in the collection.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
where_clause = queryset._sub_js_fields(where_clause)
|
where_clause = queryset._sub_js_fields(where_clause)
|
||||||
@ -1557,6 +1555,9 @@ class BaseQuerySet:
|
|||||||
:param field: the field to use
|
:param field: the field to use
|
||||||
:param normalize: normalize the results so they add to 1.0
|
:param normalize: normalize the results so they add to 1.0
|
||||||
:param map_reduce: Use map_reduce over exec_js
|
:param map_reduce: Use map_reduce over exec_js
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5 defaults to map_reduce and can handle embedded
|
||||||
|
document lookups
|
||||||
"""
|
"""
|
||||||
if map_reduce:
|
if map_reduce:
|
||||||
return self._item_frequencies_map_reduce(field, normalize=normalize)
|
return self._item_frequencies_map_reduce(field, normalize=normalize)
|
||||||
@ -1565,11 +1566,12 @@ class BaseQuerySet:
|
|||||||
# Iterator helpers
|
# Iterator helpers
|
||||||
|
|
||||||
def __next__(self):
|
def __next__(self):
|
||||||
"""Wrap the result in a :class:`~mongoengine.Document` object."""
|
"""Wrap the result in a :class:`~mongoengine.Document` object.
|
||||||
if self._none or self._empty:
|
"""
|
||||||
|
if self._limit == 0 or self._none:
|
||||||
raise StopIteration
|
raise StopIteration
|
||||||
|
|
||||||
raw_doc = next(self._cursor)
|
raw_doc = six.next(self._cursor)
|
||||||
|
|
||||||
if self._as_pymongo:
|
if self._as_pymongo:
|
||||||
return raw_doc
|
return raw_doc
|
||||||
@ -1577,6 +1579,7 @@ class BaseQuerySet:
|
|||||||
doc = self._document._from_son(
|
doc = self._document._from_son(
|
||||||
raw_doc,
|
raw_doc,
|
||||||
_auto_dereference=self._auto_dereference,
|
_auto_dereference=self._auto_dereference,
|
||||||
|
only_fields=self.only_fields,
|
||||||
)
|
)
|
||||||
|
|
||||||
if self._scalar:
|
if self._scalar:
|
||||||
@ -1584,8 +1587,13 @@ class BaseQuerySet:
|
|||||||
|
|
||||||
return doc
|
return doc
|
||||||
|
|
||||||
|
next = __next__ # For Python2 support
|
||||||
|
|
||||||
def rewind(self):
|
def rewind(self):
|
||||||
"""Rewind the cursor to its unevaluated state."""
|
"""Rewind the cursor to its unevaluated state.
|
||||||
|
|
||||||
|
.. versionadded:: 0.3
|
||||||
|
"""
|
||||||
self._iter = False
|
self._iter = False
|
||||||
self._cursor.rewind()
|
self._cursor.rewind()
|
||||||
|
|
||||||
@ -1611,9 +1619,6 @@ class BaseQuerySet:
|
|||||||
if not self._timeout:
|
if not self._timeout:
|
||||||
cursor_args["no_cursor_timeout"] = True
|
cursor_args["no_cursor_timeout"] = True
|
||||||
|
|
||||||
if self._allow_disk_use:
|
|
||||||
cursor_args["allow_disk_use"] = True
|
|
||||||
|
|
||||||
if self._loaded_fields:
|
if self._loaded_fields:
|
||||||
cursor_args[fields_name] = self._loaded_fields.as_dict()
|
cursor_args[fields_name] = self._loaded_fields.as_dict()
|
||||||
|
|
||||||
@ -1637,9 +1642,9 @@ class BaseQuerySet:
|
|||||||
# XXX In PyMongo 3+, we define the read preference on a collection
|
# XXX In PyMongo 3+, we define the read preference on a collection
|
||||||
# level, not a cursor level. Thus, we need to get a cloned collection
|
# level, not a cursor level. Thus, we need to get a cloned collection
|
||||||
# object using `with_options` first.
|
# object using `with_options` first.
|
||||||
if self._read_preference is not None or self._read_concern is not None:
|
if self._read_preference is not None:
|
||||||
self._cursor_obj = self._collection.with_options(
|
self._cursor_obj = self._collection.with_options(
|
||||||
read_preference=self._read_preference, read_concern=self._read_concern
|
read_preference=self._read_preference
|
||||||
).find(self._query, **self._cursor_args)
|
).find(self._query, **self._cursor_args)
|
||||||
else:
|
else:
|
||||||
self._cursor_obj = self._collection.find(self._query, **self._cursor_args)
|
self._cursor_obj = self._collection.find(self._query, **self._cursor_args)
|
||||||
@ -1811,13 +1816,13 @@ class BaseQuerySet:
|
|||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
total, data, types = self.exec_js(freq_func, field)
|
total, data, types = self.exec_js(freq_func, field)
|
||||||
values = {types.get(k): int(v) for k, v in data.items()}
|
values = {types.get(k): int(v) for k, v in iteritems(data)}
|
||||||
|
|
||||||
if normalize:
|
if normalize:
|
||||||
values = {k: float(v) / total for k, v in values.items()}
|
values = {k: float(v) / total for k, v in values.items()}
|
||||||
|
|
||||||
frequencies = {}
|
frequencies = {}
|
||||||
for k, v in values.items():
|
for k, v in iteritems(values):
|
||||||
if isinstance(k, float):
|
if isinstance(k, float):
|
||||||
if int(k) == k:
|
if int(k) == k:
|
||||||
k = int(k)
|
k = int(k)
|
||||||
@ -1837,7 +1842,7 @@ class BaseQuerySet:
|
|||||||
field_parts = field.split(".")
|
field_parts = field.split(".")
|
||||||
try:
|
try:
|
||||||
field = ".".join(
|
field = ".".join(
|
||||||
f if isinstance(f, str) else f.db_field
|
f if isinstance(f, six.string_types) else f.db_field
|
||||||
for f in self._document._lookup_field(field_parts)
|
for f in self._document._lookup_field(field_parts)
|
||||||
)
|
)
|
||||||
db_field_paths.append(field)
|
db_field_paths.append(field)
|
||||||
@ -1849,7 +1854,7 @@ class BaseQuerySet:
|
|||||||
for subdoc in subclasses:
|
for subdoc in subclasses:
|
||||||
try:
|
try:
|
||||||
subfield = ".".join(
|
subfield = ".".join(
|
||||||
f if isinstance(f, str) else f.db_field
|
f if isinstance(f, six.string_types) else f.db_field
|
||||||
for f in subdoc._lookup_field(field_parts)
|
for f in subdoc._lookup_field(field_parts)
|
||||||
)
|
)
|
||||||
db_field_paths.append(subfield)
|
db_field_paths.append(subfield)
|
||||||
@ -1923,7 +1928,7 @@ class BaseQuerySet:
|
|||||||
field_name = match.group(1).split(".")
|
field_name = match.group(1).split(".")
|
||||||
fields = self._document._lookup_field(field_name)
|
fields = self._document._lookup_field(field_name)
|
||||||
# Substitute the correct name for the field into the javascript
|
# Substitute the correct name for the field into the javascript
|
||||||
return '["%s"]' % fields[-1].db_field
|
return u'["%s"]' % fields[-1].db_field
|
||||||
|
|
||||||
def field_path_sub(match):
|
def field_path_sub(match):
|
||||||
# Extract just the field name, and look up the field objects
|
# Extract just the field name, and look up the field objects
|
||||||
@ -1953,3 +1958,23 @@ class BaseQuerySet:
|
|||||||
setattr(queryset, "_" + method_name, val)
|
setattr(queryset, "_" + method_name, val)
|
||||||
|
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
# Deprecated
|
||||||
|
def ensure_index(self, **kwargs):
|
||||||
|
"""Deprecated use :func:`Document.ensure_index`"""
|
||||||
|
msg = (
|
||||||
|
"Doc.objects()._ensure_index() is deprecated. "
|
||||||
|
"Use Doc.ensure_index() instead."
|
||||||
|
)
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
|
self._document.__class__.ensure_index(**kwargs)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _ensure_indexes(self):
|
||||||
|
"""Deprecated use :func:`~Document.ensure_indexes`"""
|
||||||
|
msg = (
|
||||||
|
"Doc.objects()._ensure_indexes() is deprecated. "
|
||||||
|
"Use Doc.ensure_indexes() instead."
|
||||||
|
)
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
|
self._document.__class__.ensure_indexes()
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
__all__ = ("QueryFieldList",)
|
__all__ = ("QueryFieldList",)
|
||||||
|
|
||||||
|
|
||||||
class QueryFieldList:
|
class QueryFieldList(object):
|
||||||
"""Object that handles combinations of .only() and .exclude() calls"""
|
"""Object that handles combinations of .only() and .exclude() calls"""
|
||||||
|
|
||||||
ONLY = 1
|
ONLY = 1
|
||||||
@ -69,6 +69,8 @@ class QueryFieldList:
|
|||||||
def __bool__(self):
|
def __bool__(self):
|
||||||
return bool(self.fields)
|
return bool(self.fields)
|
||||||
|
|
||||||
|
__nonzero__ = __bool__ # For Py2 support
|
||||||
|
|
||||||
def as_dict(self):
|
def as_dict(self):
|
||||||
field_list = {field: self.value for field in self.fields}
|
field_list = {field: self.value for field in self.fields}
|
||||||
if self.slice:
|
if self.slice:
|
||||||
@ -78,7 +80,7 @@ class QueryFieldList:
|
|||||||
return field_list
|
return field_list
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.fields = set()
|
self.fields = set([])
|
||||||
self.slice = {}
|
self.slice = {}
|
||||||
self.value = self.ONLY
|
self.value = self.ONLY
|
||||||
|
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from mongoengine.queryset.queryset import QuerySet
|
from mongoengine.queryset.queryset import QuerySet
|
||||||
|
|
||||||
__all__ = ("queryset_manager", "QuerySetManager")
|
__all__ = ("queryset_manager", "QuerySetManager")
|
||||||
|
|
||||||
|
|
||||||
class QuerySetManager:
|
class QuerySetManager(object):
|
||||||
"""
|
"""
|
||||||
The default QuerySet Manager.
|
The default QuerySet Manager.
|
||||||
|
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
|
import six
|
||||||
|
|
||||||
from mongoengine.errors import OperationError
|
from mongoengine.errors import OperationError
|
||||||
from mongoengine.queryset.base import (
|
from mongoengine.queryset.base import (
|
||||||
|
BaseQuerySet,
|
||||||
CASCADE,
|
CASCADE,
|
||||||
DENY,
|
DENY,
|
||||||
DO_NOTHING,
|
DO_NOTHING,
|
||||||
NULLIFY,
|
NULLIFY,
|
||||||
PULL,
|
PULL,
|
||||||
BaseQuerySet,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
@ -125,8 +127,8 @@ class QuerySet(BaseQuerySet):
|
|||||||
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
||||||
# the result cache.
|
# the result cache.
|
||||||
try:
|
try:
|
||||||
for _ in range(ITER_CHUNK_SIZE):
|
for _ in six.moves.range(ITER_CHUNK_SIZE):
|
||||||
self._result_cache.append(next(self))
|
self._result_cache.append(six.next(self))
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
# Getting this exception means there are no more docs in the
|
# Getting this exception means there are no more docs in the
|
||||||
# db cursor. Set _has_more to False so that we can use that
|
# db cursor. Set _has_more to False so that we can use that
|
||||||
@ -141,16 +143,18 @@ class QuerySet(BaseQuerySet):
|
|||||||
getting the count
|
getting the count
|
||||||
"""
|
"""
|
||||||
if with_limit_and_skip is False:
|
if with_limit_and_skip is False:
|
||||||
return super().count(with_limit_and_skip)
|
return super(QuerySet, self).count(with_limit_and_skip)
|
||||||
|
|
||||||
if self._len is None:
|
if self._len is None:
|
||||||
# cache the length
|
self._len = super(QuerySet, self).count(with_limit_and_skip)
|
||||||
self._len = super().count(with_limit_and_skip)
|
|
||||||
|
|
||||||
return self._len
|
return self._len
|
||||||
|
|
||||||
def no_cache(self):
|
def no_cache(self):
|
||||||
"""Convert to a non-caching queryset"""
|
"""Convert to a non-caching queryset
|
||||||
|
|
||||||
|
.. versionadded:: 0.8.3 Convert to non caching queryset
|
||||||
|
"""
|
||||||
if self._result_cache is not None:
|
if self._result_cache is not None:
|
||||||
raise OperationError("QuerySet already cached")
|
raise OperationError("QuerySet already cached")
|
||||||
|
|
||||||
@ -161,18 +165,24 @@ class QuerySetNoCache(BaseQuerySet):
|
|||||||
"""A non caching QuerySet"""
|
"""A non caching QuerySet"""
|
||||||
|
|
||||||
def cache(self):
|
def cache(self):
|
||||||
"""Convert to a caching queryset"""
|
"""Convert to a caching queryset
|
||||||
|
|
||||||
|
.. versionadded:: 0.8.3 Convert to caching queryset
|
||||||
|
"""
|
||||||
return self._clone_into(QuerySet(self._document, self._collection))
|
return self._clone_into(QuerySet(self._document, self._collection))
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Provides the string representation of the QuerySet"""
|
"""Provides the string representation of the QuerySet
|
||||||
|
|
||||||
|
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
||||||
|
"""
|
||||||
if self._iter:
|
if self._iter:
|
||||||
return ".. queryset mid-iteration .."
|
return ".. queryset mid-iteration .."
|
||||||
|
|
||||||
data = []
|
data = []
|
||||||
for _ in range(REPR_OUTPUT_SIZE + 1):
|
for _ in six.moves.range(REPR_OUTPUT_SIZE + 1):
|
||||||
try:
|
try:
|
||||||
data.append(next(self))
|
data.append(six.next(self))
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -1,14 +1,16 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import pymongo
|
from bson import ObjectId, SON
|
||||||
from bson import SON, ObjectId
|
|
||||||
from bson.dbref import DBRef
|
from bson.dbref import DBRef
|
||||||
|
import pymongo
|
||||||
|
import six
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine.base import UPDATE_OPERATORS
|
from mongoengine.base import UPDATE_OPERATORS
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.errors import InvalidQueryError
|
from mongoengine.errors import InvalidQueryError
|
||||||
|
|
||||||
__all__ = ("query", "update", "STRING_OPERATORS")
|
__all__ = ("query", "update")
|
||||||
|
|
||||||
COMPARISON_OPERATORS = (
|
COMPARISON_OPERATORS = (
|
||||||
"ne",
|
"ne",
|
||||||
@ -51,10 +53,6 @@ STRING_OPERATORS = (
|
|||||||
"iendswith",
|
"iendswith",
|
||||||
"exact",
|
"exact",
|
||||||
"iexact",
|
"iexact",
|
||||||
"regex",
|
|
||||||
"iregex",
|
|
||||||
"wholeword",
|
|
||||||
"iwholeword",
|
|
||||||
)
|
)
|
||||||
CUSTOM_OPERATORS = ("match",)
|
CUSTOM_OPERATORS = ("match",)
|
||||||
MATCH_OPERATORS = (
|
MATCH_OPERATORS = (
|
||||||
@ -103,7 +101,7 @@ def query(_doc_cls=None, **kwargs):
|
|||||||
cleaned_fields = []
|
cleaned_fields = []
|
||||||
for field in fields:
|
for field in fields:
|
||||||
append_field = True
|
append_field = True
|
||||||
if isinstance(field, str):
|
if isinstance(field, six.string_types):
|
||||||
parts.append(field)
|
parts.append(field)
|
||||||
append_field = False
|
append_field = False
|
||||||
# is last and CachedReferenceField
|
# is last and CachedReferenceField
|
||||||
@ -171,9 +169,9 @@ def query(_doc_cls=None, **kwargs):
|
|||||||
|
|
||||||
key = ".".join(parts)
|
key = ".".join(parts)
|
||||||
|
|
||||||
if key not in mongo_query:
|
if op is None or key not in mongo_query:
|
||||||
mongo_query[key] = value
|
mongo_query[key] = value
|
||||||
else:
|
elif key in mongo_query:
|
||||||
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
||||||
mongo_query[key].update(value)
|
mongo_query[key].update(value)
|
||||||
# $max/minDistance needs to come last - convert to SON
|
# $max/minDistance needs to come last - convert to SON
|
||||||
@ -182,7 +180,7 @@ def query(_doc_cls=None, **kwargs):
|
|||||||
"$near" in value_dict or "$nearSphere" in value_dict
|
"$near" in value_dict or "$nearSphere" in value_dict
|
||||||
):
|
):
|
||||||
value_son = SON()
|
value_son = SON()
|
||||||
for k, v in value_dict.items():
|
for k, v in iteritems(value_dict):
|
||||||
if k == "$maxDistance" or k == "$minDistance":
|
if k == "$maxDistance" or k == "$minDistance":
|
||||||
continue
|
continue
|
||||||
value_son[k] = v
|
value_son[k] = v
|
||||||
@ -283,7 +281,7 @@ def update(_doc_cls=None, **update):
|
|||||||
appended_sub_field = False
|
appended_sub_field = False
|
||||||
for field in fields:
|
for field in fields:
|
||||||
append_field = True
|
append_field = True
|
||||||
if isinstance(field, str):
|
if isinstance(field, six.string_types):
|
||||||
# Convert the S operator to $
|
# Convert the S operator to $
|
||||||
if field == "S":
|
if field == "S":
|
||||||
field = "$"
|
field = "$"
|
||||||
@ -437,9 +435,7 @@ def _geo_operator(field, op, value):
|
|||||||
value = {"$near": _infer_geometry(value)}
|
value = {"$near": _infer_geometry(value)}
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
'Geo method "{}" has not been implemented for a {} '.format(
|
'Geo method "%s" has not been implemented for a %s ' % (op, field._name)
|
||||||
op, field._name
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
@ -7,20 +7,18 @@ from mongoengine.queryset import transform
|
|||||||
__all__ = ("Q", "QNode")
|
__all__ = ("Q", "QNode")
|
||||||
|
|
||||||
|
|
||||||
def warn_empty_is_deprecated():
|
class QNodeVisitor(object):
|
||||||
msg = "'empty' property is deprecated in favour of using 'not bool(filter)'"
|
"""Base visitor class for visiting Q-object nodes in a query tree.
|
||||||
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
"""
|
||||||
|
|
||||||
|
|
||||||
class QNodeVisitor:
|
|
||||||
"""Base visitor class for visiting Q-object nodes in a query tree."""
|
|
||||||
|
|
||||||
def visit_combination(self, combination):
|
def visit_combination(self, combination):
|
||||||
"""Called by QCombination objects."""
|
"""Called by QCombination objects.
|
||||||
|
"""
|
||||||
return combination
|
return combination
|
||||||
|
|
||||||
def visit_query(self, query):
|
def visit_query(self, query):
|
||||||
"""Called by (New)Q objects."""
|
"""Called by (New)Q objects.
|
||||||
|
"""
|
||||||
return query
|
return query
|
||||||
|
|
||||||
|
|
||||||
@ -46,7 +44,8 @@ class SimplificationVisitor(QNodeVisitor):
|
|||||||
return combination
|
return combination
|
||||||
|
|
||||||
def _query_conjunction(self, queries):
|
def _query_conjunction(self, queries):
|
||||||
"""Merges query dicts - effectively &ing them together."""
|
"""Merges query dicts - effectively &ing them together.
|
||||||
|
"""
|
||||||
query_ops = set()
|
query_ops = set()
|
||||||
combined_query = {}
|
combined_query = {}
|
||||||
for query in queries:
|
for query in queries:
|
||||||
@ -80,7 +79,7 @@ class QueryCompilerVisitor(QNodeVisitor):
|
|||||||
return transform.query(self.document, **query.query)
|
return transform.query(self.document, **query.query)
|
||||||
|
|
||||||
|
|
||||||
class QNode:
|
class QNode(object):
|
||||||
"""Base class for nodes in query trees."""
|
"""Base class for nodes in query trees."""
|
||||||
|
|
||||||
AND = 0
|
AND = 0
|
||||||
@ -99,18 +98,19 @@ class QNode:
|
|||||||
object.
|
object.
|
||||||
"""
|
"""
|
||||||
# If the other Q() is empty, ignore it and just use `self`.
|
# If the other Q() is empty, ignore it and just use `self`.
|
||||||
if not bool(other):
|
if getattr(other, "empty", True):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
# Or if this Q is empty, ignore it and just use `other`.
|
# Or if this Q is empty, ignore it and just use `other`.
|
||||||
if not bool(self):
|
if self.empty:
|
||||||
return other
|
return other
|
||||||
|
|
||||||
return QCombination(operation, [self, other])
|
return QCombination(operation, [self, other])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def empty(self):
|
def empty(self):
|
||||||
warn_empty_is_deprecated()
|
msg = "'empty' property is deprecated in favour of using 'not bool(filter)'"
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def __or__(self, other):
|
def __or__(self, other):
|
||||||
@ -143,6 +143,8 @@ class QCombination(QNode):
|
|||||||
def __bool__(self):
|
def __bool__(self):
|
||||||
return bool(self.children)
|
return bool(self.children)
|
||||||
|
|
||||||
|
__nonzero__ = __bool__ # For Py2 support
|
||||||
|
|
||||||
def accept(self, visitor):
|
def accept(self, visitor):
|
||||||
for i in range(len(self.children)):
|
for i in range(len(self.children)):
|
||||||
if isinstance(self.children[i], QNode):
|
if isinstance(self.children[i], QNode):
|
||||||
@ -152,7 +154,8 @@ class QCombination(QNode):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def empty(self):
|
def empty(self):
|
||||||
warn_empty_is_deprecated()
|
msg = "'empty' property is deprecated in favour of using 'not bool(filter)'"
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
return not bool(self.children)
|
return not bool(self.children)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
@ -177,6 +180,8 @@ class Q(QNode):
|
|||||||
def __bool__(self):
|
def __bool__(self):
|
||||||
return bool(self.query)
|
return bool(self.query)
|
||||||
|
|
||||||
|
__nonzero__ = __bool__ # For Py2 support
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return self.__class__ == other.__class__ and self.query == other.query
|
return self.__class__ == other.__class__ and self.query == other.query
|
||||||
|
|
||||||
@ -185,5 +190,4 @@ class Q(QNode):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def empty(self):
|
def empty(self):
|
||||||
warn_empty_is_deprecated()
|
|
||||||
return not bool(self.query)
|
return not bool(self.query)
|
||||||
|
@ -15,11 +15,11 @@ try:
|
|||||||
signals_available = True
|
signals_available = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
|
||||||
class Namespace:
|
class Namespace(object):
|
||||||
def signal(self, name, doc=None):
|
def signal(self, name, doc=None):
|
||||||
return _FakeSignal(name, doc)
|
return _FakeSignal(name, doc)
|
||||||
|
|
||||||
class _FakeSignal:
|
class _FakeSignal(object):
|
||||||
"""If blinker is unavailable, create a fake class with the same
|
"""If blinker is unavailable, create a fake class with the same
|
||||||
interface that allows sending of signals but will fail with an
|
interface that allows sending of signals but will fail with an
|
||||||
error on anything else. Instead of doing anything on send, it
|
error on anything else. Instead of doing anything on send, it
|
||||||
|
@ -1,7 +0,0 @@
|
|||||||
black
|
|
||||||
flake8
|
|
||||||
pre-commit
|
|
||||||
pytest
|
|
||||||
ipdb
|
|
||||||
ipython
|
|
||||||
tox
|
|
3
requirements-lint.txt
Normal file
3
requirements-lint.txt
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
black
|
||||||
|
flake8
|
||||||
|
flake8-import-order
|
4
requirements.txt
Normal file
4
requirements.txt
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
pymongo>=3.4
|
||||||
|
six==1.10.0
|
||||||
|
Sphinx==1.5.5
|
||||||
|
sphinx-rtd-theme==0.2.4
|
12
setup.cfg
12
setup.cfg
@ -1,18 +1,10 @@
|
|||||||
[flake8]
|
[flake8]
|
||||||
ignore=E501,F403,F405,I201,I202,W504,W605,W503,B007
|
ignore=E501,F401,F403,F405,I201,I202,W504, W605, W503
|
||||||
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
||||||
max-complexity=47
|
max-complexity=47
|
||||||
|
application-import-names=mongoengine,tests
|
||||||
|
|
||||||
[tool:pytest]
|
[tool:pytest]
|
||||||
# Limits the discovery to tests directory
|
# Limits the discovery to tests directory
|
||||||
# avoids that it runs for instance the benchmark
|
# avoids that it runs for instance the benchmark
|
||||||
testpaths = tests
|
testpaths = tests
|
||||||
|
|
||||||
[isort]
|
|
||||||
known_first_party = mongoengine,tests
|
|
||||||
default_section = THIRDPARTY
|
|
||||||
multi_line_output = 3
|
|
||||||
include_trailing_comma = True
|
|
||||||
combine_as_imports = True
|
|
||||||
line_length = 70
|
|
||||||
ensure_newline_before_comments = 1
|
|
||||||
|
37
setup.py
37
setup.py
@ -7,7 +7,7 @@ from setuptools.command.test import test as TestCommand
|
|||||||
|
|
||||||
# Hack to silence atexit traceback in newer python versions
|
# Hack to silence atexit traceback in newer python versions
|
||||||
try:
|
try:
|
||||||
import multiprocessing # noqa: F401
|
import multiprocessing
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -53,8 +53,8 @@ class PyTest(TestCommand):
|
|||||||
|
|
||||||
def run_tests(self):
|
def run_tests(self):
|
||||||
# import here, cause outside the eggs aren't loaded
|
# import here, cause outside the eggs aren't loaded
|
||||||
import pytest
|
|
||||||
from pkg_resources import _namespace_packages
|
from pkg_resources import _namespace_packages
|
||||||
|
import pytest
|
||||||
|
|
||||||
# Purge modules under test from sys.modules. The test loader will
|
# Purge modules under test from sys.modules. The test loader will
|
||||||
# re-import them from the build location. Required when 2to3 is used
|
# re-import them from the build location. Required when 2to3 is used
|
||||||
@ -92,17 +92,16 @@ version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0]
|
|||||||
VERSION = get_version(eval(version_line.split("=")[-1]))
|
VERSION = get_version(eval(version_line.split("=")[-1]))
|
||||||
|
|
||||||
CLASSIFIERS = [
|
CLASSIFIERS = [
|
||||||
"Development Status :: 5 - Production/Stable",
|
"Development Status :: 4 - Beta",
|
||||||
"Intended Audience :: Developers",
|
"Intended Audience :: Developers",
|
||||||
"License :: OSI Approved :: MIT License",
|
"License :: OSI Approved :: MIT License",
|
||||||
"Operating System :: OS Independent",
|
"Operating System :: OS Independent",
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 2",
|
||||||
|
"Programming Language :: Python :: 2.7",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.5",
|
||||||
"Programming Language :: Python :: 3.6",
|
"Programming Language :: Python :: 3.6",
|
||||||
"Programming Language :: Python :: 3.7",
|
|
||||||
"Programming Language :: Python :: 3.8",
|
|
||||||
"Programming Language :: Python :: 3.9",
|
|
||||||
"Programming Language :: Python :: 3.10",
|
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: Implementation :: PyPy",
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
"Topic :: Database",
|
"Topic :: Database",
|
||||||
@ -112,19 +111,22 @@ CLASSIFIERS = [
|
|||||||
extra_opts = {
|
extra_opts = {
|
||||||
"packages": find_packages(exclude=["tests", "tests.*"]),
|
"packages": find_packages(exclude=["tests", "tests.*"]),
|
||||||
"tests_require": [
|
"tests_require": [
|
||||||
"pytest",
|
"pytest<5.0",
|
||||||
"pytest-cov",
|
"pytest-cov",
|
||||||
"coverage",
|
"coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls
|
||||||
"blinker",
|
"blinker",
|
||||||
"Pillow>=7.0.0",
|
"Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
if sys.version_info[0] == 3:
|
||||||
if "test" in sys.argv:
|
extra_opts["use_2to3"] = True
|
||||||
extra_opts["packages"] = find_packages()
|
if "test" in sys.argv:
|
||||||
extra_opts["package_data"] = {
|
extra_opts["packages"] = find_packages()
|
||||||
"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]
|
extra_opts["package_data"] = {
|
||||||
}
|
"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
extra_opts["tests_require"] += ["python-dateutil"]
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="mongoengine",
|
name="mongoengine",
|
||||||
@ -141,8 +143,7 @@ setup(
|
|||||||
long_description=LONG_DESCRIPTION,
|
long_description=LONG_DESCRIPTION,
|
||||||
platforms=["any"],
|
platforms=["any"],
|
||||||
classifiers=CLASSIFIERS,
|
classifiers=CLASSIFIERS,
|
||||||
python_requires=">=3.6",
|
install_requires=["pymongo>=3.4", "six>=1.10.0"],
|
||||||
install_requires=["pymongo>=3.4, <4.0"],
|
|
||||||
cmdclass={"test": PyTest},
|
cmdclass={"test": PyTest},
|
||||||
**extra_opts
|
**extra_opts
|
||||||
)
|
)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
@ -26,14 +27,16 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
self.db.drop_collection(collection)
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
def test_definition(self):
|
def test_definition(self):
|
||||||
"""Ensure that document may be defined using fields."""
|
"""Ensure that document may be defined using fields.
|
||||||
|
"""
|
||||||
assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys())
|
assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys())
|
||||||
assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted(
|
assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted(
|
||||||
x.__class__.__name__ for x in self.Person._fields.values()
|
[x.__class__.__name__ for x in self.Person._fields.values()]
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_get_db(self):
|
def test_get_db(self):
|
||||||
"""Ensure that get_db returns the expected db."""
|
"""Ensure that get_db returns the expected db.
|
||||||
|
"""
|
||||||
db = self.Person._get_db()
|
db = self.Person._get_db()
|
||||||
assert self.db == db
|
assert self.db == db
|
||||||
|
|
||||||
@ -45,13 +48,15 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
assert collection_name == self.Person._get_collection_name()
|
assert collection_name == self.Person._get_collection_name()
|
||||||
|
|
||||||
def test_get_collection(self):
|
def test_get_collection(self):
|
||||||
"""Ensure that get_collection returns the expected collection."""
|
"""Ensure that get_collection returns the expected collection.
|
||||||
|
"""
|
||||||
collection_name = "person"
|
collection_name = "person"
|
||||||
collection = self.Person._get_collection()
|
collection = self.Person._get_collection()
|
||||||
assert self.db[collection_name] == collection
|
assert self.db[collection_name] == collection
|
||||||
|
|
||||||
def test_drop_collection(self):
|
def test_drop_collection(self):
|
||||||
"""Ensure that the collection may be dropped from the database."""
|
"""Ensure that the collection may be dropped from the database.
|
||||||
|
"""
|
||||||
collection_name = "person"
|
collection_name = "person"
|
||||||
self.Person(name="Test").save()
|
self.Person(name="Test").save()
|
||||||
assert collection_name in list_collection_names(self.db)
|
assert collection_name in list_collection_names(self.db)
|
||||||
@ -73,7 +78,7 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY}
|
assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY}
|
||||||
|
|
||||||
def test_compare_indexes(self):
|
def test_compare_indexes(self):
|
||||||
"""Ensure that the indexes are properly created and that
|
""" Ensure that the indexes are properly created and that
|
||||||
compare_indexes identifies the missing/extra indexes
|
compare_indexes identifies the missing/extra indexes
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -106,7 +111,7 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def test_compare_indexes_inheritance(self):
|
def test_compare_indexes_inheritance(self):
|
||||||
"""Ensure that the indexes are properly created and that
|
""" Ensure that the indexes are properly created and that
|
||||||
compare_indexes identifies the missing/extra indexes for subclassed
|
compare_indexes identifies the missing/extra indexes for subclassed
|
||||||
documents (_cls included)
|
documents (_cls included)
|
||||||
"""
|
"""
|
||||||
@ -146,7 +151,7 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def test_compare_indexes_multiple_subclasses(self):
|
def test_compare_indexes_multiple_subclasses(self):
|
||||||
"""Ensure that compare_indexes behaves correctly if called from a
|
""" Ensure that compare_indexes behaves correctly if called from a
|
||||||
class, which base class has multiple subclasses
|
class, which base class has multiple subclasses
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -177,7 +182,7 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []}
|
assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []}
|
||||||
|
|
||||||
def test_compare_indexes_for_text_indexes(self):
|
def test_compare_indexes_for_text_indexes(self):
|
||||||
"""Ensure that compare_indexes behaves correctly for text indexes"""
|
""" Ensure that compare_indexes behaves correctly for text indexes """
|
||||||
|
|
||||||
class Doc(Document):
|
class Doc(Document):
|
||||||
a = StringField()
|
a = StringField()
|
||||||
@ -199,7 +204,7 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
assert actual == expected
|
assert actual == expected
|
||||||
|
|
||||||
def test_list_indexes_inheritance(self):
|
def test_list_indexes_inheritance(self):
|
||||||
"""ensure that all of the indexes are listed regardless of the super-
|
""" ensure that all of the indexes are listed regardless of the super-
|
||||||
or sub-class that we call it from
|
or sub-class that we call it from
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -231,7 +236,7 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
assert BlogPost.list_indexes() == [
|
assert BlogPost.list_indexes() == [
|
||||||
[("_cls", 1), ("author", 1), ("tags", 1)],
|
[("_cls", 1), ("author", 1), ("tags", 1)],
|
||||||
[("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)],
|
[("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)],
|
||||||
[("_id", 1)],
|
[(u"_id", 1)],
|
||||||
[("_cls", 1)],
|
[("_cls", 1)],
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -256,7 +261,8 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL
|
assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL
|
||||||
|
|
||||||
def test_collection_naming(self):
|
def test_collection_naming(self):
|
||||||
"""Ensure that a collection with a specified name may be used."""
|
"""Ensure that a collection with a specified name may be used.
|
||||||
|
"""
|
||||||
|
|
||||||
class DefaultNamingTest(Document):
|
class DefaultNamingTest(Document):
|
||||||
pass
|
pass
|
||||||
@ -288,7 +294,7 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
assert "wibble" == InheritedAbstractNamingTest._get_collection_name()
|
assert "wibble" == InheritedAbstractNamingTest._get_collection_name()
|
||||||
|
|
||||||
# Mixin tests
|
# Mixin tests
|
||||||
class BaseMixin:
|
class BaseMixin(object):
|
||||||
meta = {"collection": lambda c: c.__name__.lower()}
|
meta = {"collection": lambda c: c.__name__.lower()}
|
||||||
|
|
||||||
class OldMixinNamingConvention(Document, BaseMixin):
|
class OldMixinNamingConvention(Document, BaseMixin):
|
||||||
@ -299,7 +305,7 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
== OldMixinNamingConvention._get_collection_name()
|
== OldMixinNamingConvention._get_collection_name()
|
||||||
)
|
)
|
||||||
|
|
||||||
class BaseMixin:
|
class BaseMixin(object):
|
||||||
meta = {"collection": lambda c: c.__name__.lower()}
|
meta = {"collection": lambda c: c.__name__.lower()}
|
||||||
|
|
||||||
class BaseDocument(Document, BaseMixin):
|
class BaseDocument(Document, BaseMixin):
|
||||||
@ -311,7 +317,8 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
assert "basedocument" == MyDocument._get_collection_name()
|
assert "basedocument" == MyDocument._get_collection_name()
|
||||||
|
|
||||||
def test_custom_collection_name_operations(self):
|
def test_custom_collection_name_operations(self):
|
||||||
"""Ensure that a collection with a specified name is used as expected."""
|
"""Ensure that a collection with a specified name is used as expected.
|
||||||
|
"""
|
||||||
collection_name = "personCollTest"
|
collection_name = "personCollTest"
|
||||||
|
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
@ -331,7 +338,8 @@ class TestClassMethods(unittest.TestCase):
|
|||||||
assert collection_name not in list_collection_names(self.db)
|
assert collection_name not in list_collection_names(self.db)
|
||||||
|
|
||||||
def test_collection_name_and_primary(self):
|
def test_collection_name_and_primary(self):
|
||||||
"""Ensure that a collection with a specified name may be used."""
|
"""Ensure that a collection with a specified name may be used.
|
||||||
|
"""
|
||||||
|
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
name = StringField(primary_key=True)
|
name = StringField(primary_key=True)
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from bson import SON
|
from bson import SON
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.pymongo_support import list_collection_names
|
from mongoengine.pymongo_support import list_collection_names
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
@ -9,7 +9,7 @@ from tests.utils import MongoDBTestCase
|
|||||||
|
|
||||||
class TestDelta(MongoDBTestCase):
|
class TestDelta(MongoDBTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super().setUp()
|
super(TestDelta, self).setUp()
|
||||||
|
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -29,8 +29,7 @@ class TestDelta(MongoDBTestCase):
|
|||||||
self.delta(Document)
|
self.delta(Document)
|
||||||
self.delta(DynamicDocument)
|
self.delta(DynamicDocument)
|
||||||
|
|
||||||
@staticmethod
|
def delta(self, DocClass):
|
||||||
def delta(DocClass):
|
|
||||||
class Doc(DocClass):
|
class Doc(DocClass):
|
||||||
string_field = StringField()
|
string_field = StringField()
|
||||||
int_field = IntField()
|
int_field = IntField()
|
||||||
@ -429,20 +428,13 @@ class TestDelta(MongoDBTestCase):
|
|||||||
assert doc.dict_field == {"hello": "world"}
|
assert doc.dict_field == {"hello": "world"}
|
||||||
assert doc.list_field == ["1", 2, {"hello": "world"}]
|
assert doc.list_field == ["1", 2, {"hello": "world"}]
|
||||||
|
|
||||||
def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self):
|
def test_delta_recursive_db_field(self):
|
||||||
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||||
|
|
||||||
def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self):
|
|
||||||
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||||
|
|
||||||
def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self):
|
|
||||||
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||||
|
|
||||||
def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self):
|
|
||||||
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||||
|
|
||||||
@staticmethod
|
def delta_recursive_db_field(self, DocClass, EmbeddedClass):
|
||||||
def delta_recursive_db_field(DocClass, EmbeddedClass):
|
|
||||||
class Embedded(EmbeddedClass):
|
class Embedded(EmbeddedClass):
|
||||||
string_field = StringField(db_field="db_string_field")
|
string_field = StringField(db_field="db_string_field")
|
||||||
int_field = IntField(db_field="db_int_field")
|
int_field = IntField(db_field="db_int_field")
|
||||||
@ -495,7 +487,6 @@ class TestDelta(MongoDBTestCase):
|
|||||||
doc = doc.reload(10)
|
doc = doc.reload(10)
|
||||||
assert doc.embedded_field.dict_field == {}
|
assert doc.embedded_field.dict_field == {}
|
||||||
|
|
||||||
assert doc._get_changed_fields() == []
|
|
||||||
doc.embedded_field.list_field = []
|
doc.embedded_field.list_field = []
|
||||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
||||||
assert doc.embedded_field._delta() == ({}, {"db_list_field": 1})
|
assert doc.embedded_field._delta() == ({}, {"db_list_field": 1})
|
||||||
@ -546,7 +537,6 @@ class TestDelta(MongoDBTestCase):
|
|||||||
{},
|
{},
|
||||||
)
|
)
|
||||||
doc.save()
|
doc.save()
|
||||||
assert doc._get_changed_fields() == []
|
|
||||||
doc = doc.reload(10)
|
doc = doc.reload(10)
|
||||||
|
|
||||||
assert doc.embedded_field.list_field[0] == "1"
|
assert doc.embedded_field.list_field[0] == "1"
|
||||||
@ -644,10 +634,6 @@ class TestDelta(MongoDBTestCase):
|
|||||||
doc.save()
|
doc.save()
|
||||||
doc = doc.reload(10)
|
doc = doc.reload(10)
|
||||||
|
|
||||||
assert doc._delta() == (
|
|
||||||
{},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
del doc.embedded_field.list_field[2].list_field
|
del doc.embedded_field.list_field[2].list_field
|
||||||
assert doc._delta() == (
|
assert doc._delta() == (
|
||||||
{},
|
{},
|
||||||
@ -746,12 +732,12 @@ class TestDelta(MongoDBTestCase):
|
|||||||
assert organization._get_changed_fields() == []
|
assert organization._get_changed_fields() == []
|
||||||
|
|
||||||
updates, removals = organization._delta()
|
updates, removals = organization._delta()
|
||||||
assert removals == {}
|
assert {} == removals
|
||||||
assert updates == {}
|
assert {} == updates
|
||||||
|
|
||||||
organization.employees.append(person)
|
organization.employees.append(person)
|
||||||
updates, removals = organization._delta()
|
updates, removals = organization._delta()
|
||||||
assert removals == {}
|
assert {} == removals
|
||||||
assert "employees" in updates
|
assert "employees" in updates
|
||||||
|
|
||||||
def test_delta_with_dbref_false(self):
|
def test_delta_with_dbref_false(self):
|
||||||
@ -763,12 +749,12 @@ class TestDelta(MongoDBTestCase):
|
|||||||
assert organization._get_changed_fields() == []
|
assert organization._get_changed_fields() == []
|
||||||
|
|
||||||
updates, removals = organization._delta()
|
updates, removals = organization._delta()
|
||||||
assert removals == {}
|
assert {} == removals
|
||||||
assert updates == {}
|
assert {} == updates
|
||||||
|
|
||||||
organization.employees.append(person)
|
organization.employees.append(person)
|
||||||
updates, removals = organization._delta()
|
updates, removals = organization._delta()
|
||||||
assert removals == {}
|
assert {} == removals
|
||||||
assert "employees" in updates
|
assert "employees" in updates
|
||||||
|
|
||||||
def test_nested_nested_fields_mark_as_changed(self):
|
def test_nested_nested_fields_mark_as_changed(self):
|
||||||
@ -781,46 +767,19 @@ class TestDelta(MongoDBTestCase):
|
|||||||
|
|
||||||
MyDoc.drop_collection()
|
MyDoc.drop_collection()
|
||||||
|
|
||||||
MyDoc(name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}).save()
|
mydoc = MyDoc(
|
||||||
|
name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}
|
||||||
|
).save()
|
||||||
|
|
||||||
mydoc = MyDoc.objects.first()
|
mydoc = MyDoc.objects.first()
|
||||||
subdoc = mydoc.subs["a"]["b"]
|
subdoc = mydoc.subs["a"]["b"]
|
||||||
subdoc.name = "bar"
|
subdoc.name = "bar"
|
||||||
|
|
||||||
assert subdoc._get_changed_fields() == ["name"]
|
assert ["name"] == subdoc._get_changed_fields()
|
||||||
assert mydoc._get_changed_fields() == ["subs.a.b.name"]
|
assert ["subs.a.b.name"] == mydoc._get_changed_fields()
|
||||||
|
|
||||||
mydoc._clear_changed_fields()
|
mydoc._clear_changed_fields()
|
||||||
assert mydoc._get_changed_fields() == []
|
assert [] == mydoc._get_changed_fields()
|
||||||
|
|
||||||
def test_nested_nested_fields_db_field_set__gets_mark_as_changed_and_cleaned(self):
|
|
||||||
class EmbeddedDoc(EmbeddedDocument):
|
|
||||||
name = StringField(db_field="db_name")
|
|
||||||
|
|
||||||
class MyDoc(Document):
|
|
||||||
embed = EmbeddedDocumentField(EmbeddedDoc, db_field="db_embed")
|
|
||||||
name = StringField(db_field="db_name")
|
|
||||||
|
|
||||||
MyDoc.drop_collection()
|
|
||||||
|
|
||||||
MyDoc(name="testcase1", embed=EmbeddedDoc(name="foo")).save()
|
|
||||||
|
|
||||||
mydoc = MyDoc.objects.first()
|
|
||||||
mydoc.embed.name = "foo1"
|
|
||||||
|
|
||||||
assert mydoc.embed._get_changed_fields() == ["db_name"]
|
|
||||||
assert mydoc._get_changed_fields() == ["db_embed.db_name"]
|
|
||||||
|
|
||||||
mydoc = MyDoc.objects.first()
|
|
||||||
embed = EmbeddedDoc(name="foo2")
|
|
||||||
embed.name = "bar"
|
|
||||||
mydoc.embed = embed
|
|
||||||
|
|
||||||
assert embed._get_changed_fields() == ["db_name"]
|
|
||||||
assert mydoc._get_changed_fields() == ["db_embed"]
|
|
||||||
|
|
||||||
mydoc._clear_changed_fields()
|
|
||||||
assert mydoc._get_changed_fields() == []
|
|
||||||
|
|
||||||
def test_lower_level_mark_as_changed(self):
|
def test_lower_level_mark_as_changed(self):
|
||||||
class EmbeddedDoc(EmbeddedDocument):
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
@ -835,17 +794,17 @@ class TestDelta(MongoDBTestCase):
|
|||||||
|
|
||||||
mydoc = MyDoc.objects.first()
|
mydoc = MyDoc.objects.first()
|
||||||
mydoc.subs["a"] = EmbeddedDoc()
|
mydoc.subs["a"] = EmbeddedDoc()
|
||||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
assert ["subs.a"] == mydoc._get_changed_fields()
|
||||||
|
|
||||||
subdoc = mydoc.subs["a"]
|
subdoc = mydoc.subs["a"]
|
||||||
subdoc.name = "bar"
|
subdoc.name = "bar"
|
||||||
|
|
||||||
assert subdoc._get_changed_fields() == ["name"]
|
assert ["name"] == subdoc._get_changed_fields()
|
||||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
assert ["subs.a"] == mydoc._get_changed_fields()
|
||||||
mydoc.save()
|
mydoc.save()
|
||||||
|
|
||||||
mydoc._clear_changed_fields()
|
mydoc._clear_changed_fields()
|
||||||
assert mydoc._get_changed_fields() == []
|
assert [] == mydoc._get_changed_fields()
|
||||||
|
|
||||||
def test_upper_level_mark_as_changed(self):
|
def test_upper_level_mark_as_changed(self):
|
||||||
class EmbeddedDoc(EmbeddedDocument):
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
@ -862,15 +821,15 @@ class TestDelta(MongoDBTestCase):
|
|||||||
subdoc = mydoc.subs["a"]
|
subdoc = mydoc.subs["a"]
|
||||||
subdoc.name = "bar"
|
subdoc.name = "bar"
|
||||||
|
|
||||||
assert subdoc._get_changed_fields() == ["name"]
|
assert ["name"] == subdoc._get_changed_fields()
|
||||||
assert mydoc._get_changed_fields() == ["subs.a.name"]
|
assert ["subs.a.name"] == mydoc._get_changed_fields()
|
||||||
|
|
||||||
mydoc.subs["a"] = EmbeddedDoc()
|
mydoc.subs["a"] = EmbeddedDoc()
|
||||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
assert ["subs.a"] == mydoc._get_changed_fields()
|
||||||
mydoc.save()
|
mydoc.save()
|
||||||
|
|
||||||
mydoc._clear_changed_fields()
|
mydoc._clear_changed_fields()
|
||||||
assert mydoc._get_changed_fields() == []
|
assert [] == mydoc._get_changed_fields()
|
||||||
|
|
||||||
def test_referenced_object_changed_attributes(self):
|
def test_referenced_object_changed_attributes(self):
|
||||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||||
|
@ -10,7 +10,7 @@ __all__ = ("TestDynamicDocument",)
|
|||||||
|
|
||||||
class TestDynamicDocument(MongoDBTestCase):
|
class TestDynamicDocument(MongoDBTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super().setUp()
|
super(TestDynamicDocument, self).setUp()
|
||||||
|
|
||||||
class Person(DynamicDocument):
|
class Person(DynamicDocument):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -37,19 +37,6 @@ class TestDynamicDocument(MongoDBTestCase):
|
|||||||
# Confirm no changes to self.Person
|
# Confirm no changes to self.Person
|
||||||
assert not hasattr(self.Person, "age")
|
assert not hasattr(self.Person, "age")
|
||||||
|
|
||||||
def test_dynamic_document_parse_values_in_constructor_like_document_do(self):
|
|
||||||
class ProductDynamicDocument(DynamicDocument):
|
|
||||||
title = StringField()
|
|
||||||
price = FloatField()
|
|
||||||
|
|
||||||
class ProductDocument(Document):
|
|
||||||
title = StringField()
|
|
||||||
price = FloatField()
|
|
||||||
|
|
||||||
product = ProductDocument(title="Blabla", price="12.5")
|
|
||||||
dyn_product = ProductDynamicDocument(title="Blabla", price="12.5")
|
|
||||||
assert product.price == dyn_product.price == 12.5
|
|
||||||
|
|
||||||
def test_change_scope_of_variable(self):
|
def test_change_scope_of_variable(self):
|
||||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||||
p = self.Person()
|
p = self.Person()
|
||||||
@ -118,17 +105,17 @@ class TestDynamicDocument(MongoDBTestCase):
|
|||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||||
assert raw_p == {"_cls": "Person", "_id": p.id, "name": "Dean"}
|
assert raw_p == {"_cls": u"Person", "_id": p.id, "name": u"Dean"}
|
||||||
|
|
||||||
p.name = "OldDean"
|
p.name = "OldDean"
|
||||||
p.newattr = "garbage"
|
p.newattr = "garbage"
|
||||||
p.save()
|
p.save()
|
||||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||||
assert raw_p == {
|
assert raw_p == {
|
||||||
"_cls": "Person",
|
"_cls": u"Person",
|
||||||
"_id": p.id,
|
"_id": p.id,
|
||||||
"name": "OldDean",
|
"name": "OldDean",
|
||||||
"newattr": "garbage",
|
"newattr": u"garbage",
|
||||||
}
|
}
|
||||||
|
|
||||||
def test_fields_containing_underscore(self):
|
def test_fields_containing_underscore(self):
|
||||||
@ -144,14 +131,14 @@ class TestDynamicDocument(MongoDBTestCase):
|
|||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||||
assert raw_p == {"_id": p.id, "_name": "Dean", "name": "Dean"}
|
assert raw_p == {"_id": p.id, "_name": u"Dean", "name": u"Dean"}
|
||||||
|
|
||||||
p.name = "OldDean"
|
p.name = "OldDean"
|
||||||
p._name = "NewDean"
|
p._name = "NewDean"
|
||||||
p._newattr1 = "garbage" # Unknown fields won't be added
|
p._newattr1 = "garbage" # Unknown fields won't be added
|
||||||
p.save()
|
p.save()
|
||||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||||
assert raw_p == {"_id": p.id, "_name": "NewDean", "name": "OldDean"}
|
assert raw_p == {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"}
|
||||||
|
|
||||||
def test_dynamic_document_queries(self):
|
def test_dynamic_document_queries(self):
|
||||||
"""Ensure we can query dynamic fields"""
|
"""Ensure we can query dynamic fields"""
|
||||||
|
@ -1,16 +1,14 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import unittest
|
import unittest
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import pytest
|
|
||||||
from pymongo.collation import Collation
|
from pymongo.collation import Collation
|
||||||
from pymongo.errors import OperationFailure
|
from pymongo.errors import OperationFailure
|
||||||
|
import pytest
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from mongoengine.mongodb_support import (
|
|
||||||
MONGODB_42,
|
|
||||||
get_mongodb_version,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestIndexes(unittest.TestCase):
|
class TestIndexes(unittest.TestCase):
|
||||||
@ -61,7 +59,7 @@ class TestIndexes(unittest.TestCase):
|
|||||||
info = BlogPost.objects._collection.index_information()
|
info = BlogPost.objects._collection.index_information()
|
||||||
# _id, '-date', 'tags', ('cat', 'date')
|
# _id, '-date', 'tags', ('cat', 'date')
|
||||||
assert len(info) == 4
|
assert len(info) == 4
|
||||||
info = [value["key"] for key, value in info.items()]
|
info = [value["key"] for key, value in iteritems(info)]
|
||||||
for expected in expected_specs:
|
for expected in expected_specs:
|
||||||
assert expected["fields"] in info
|
assert expected["fields"] in info
|
||||||
|
|
||||||
@ -89,7 +87,7 @@ class TestIndexes(unittest.TestCase):
|
|||||||
# the indices on -date and tags will both contain
|
# the indices on -date and tags will both contain
|
||||||
# _cls as first element in the key
|
# _cls as first element in the key
|
||||||
assert len(info) == 4
|
assert len(info) == 4
|
||||||
info = [value["key"] for key, value in info.items()]
|
info = [value["key"] for key, value in iteritems(info)]
|
||||||
for expected in expected_specs:
|
for expected in expected_specs:
|
||||||
assert expected["fields"] in info
|
assert expected["fields"] in info
|
||||||
|
|
||||||
@ -104,7 +102,7 @@ class TestIndexes(unittest.TestCase):
|
|||||||
|
|
||||||
ExtendedBlogPost.ensure_indexes()
|
ExtendedBlogPost.ensure_indexes()
|
||||||
info = ExtendedBlogPost.objects._collection.index_information()
|
info = ExtendedBlogPost.objects._collection.index_information()
|
||||||
info = [value["key"] for key, value in info.items()]
|
info = [value["key"] for key, value in iteritems(info)]
|
||||||
for expected in expected_specs:
|
for expected in expected_specs:
|
||||||
assert expected["fields"] in info
|
assert expected["fields"] in info
|
||||||
|
|
||||||
@ -175,7 +173,8 @@ class TestIndexes(unittest.TestCase):
|
|||||||
assert MyDoc._meta["index_specs"] == [{"fields": [("keywords", 1)]}]
|
assert MyDoc._meta["index_specs"] == [{"fields": [("keywords", 1)]}]
|
||||||
|
|
||||||
def test_embedded_document_index_meta(self):
|
def test_embedded_document_index_meta(self):
|
||||||
"""Ensure that embedded document indexes are created explicitly"""
|
"""Ensure that embedded document indexes are created explicitly
|
||||||
|
"""
|
||||||
|
|
||||||
class Rank(EmbeddedDocument):
|
class Rank(EmbeddedDocument):
|
||||||
title = StringField(required=True)
|
title = StringField(required=True)
|
||||||
@ -193,11 +192,12 @@ class TestIndexes(unittest.TestCase):
|
|||||||
# Indexes are lazy so use list() to perform query
|
# Indexes are lazy so use list() to perform query
|
||||||
list(Person.objects)
|
list(Person.objects)
|
||||||
info = Person.objects._collection.index_information()
|
info = Person.objects._collection.index_information()
|
||||||
info = [value["key"] for key, value in info.items()]
|
info = [value["key"] for key, value in iteritems(info)]
|
||||||
assert [("rank.title", 1)] in info
|
assert [("rank.title", 1)] in info
|
||||||
|
|
||||||
def test_explicit_geo2d_index(self):
|
def test_explicit_geo2d_index(self):
|
||||||
"""Ensure that geo2d indexes work when created via meta[indexes]"""
|
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||||
|
"""
|
||||||
|
|
||||||
class Place(Document):
|
class Place(Document):
|
||||||
location = DictField()
|
location = DictField()
|
||||||
@ -207,11 +207,12 @@ class TestIndexes(unittest.TestCase):
|
|||||||
|
|
||||||
Place.ensure_indexes()
|
Place.ensure_indexes()
|
||||||
info = Place._get_collection().index_information()
|
info = Place._get_collection().index_information()
|
||||||
info = [value["key"] for key, value in info.items()]
|
info = [value["key"] for key, value in iteritems(info)]
|
||||||
assert [("location.point", "2d")] in info
|
assert [("location.point", "2d")] in info
|
||||||
|
|
||||||
def test_explicit_geo2d_index_embedded(self):
|
def test_explicit_geo2d_index_embedded(self):
|
||||||
"""Ensure that geo2d indexes work when created via meta[indexes]"""
|
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||||
|
"""
|
||||||
|
|
||||||
class EmbeddedLocation(EmbeddedDocument):
|
class EmbeddedLocation(EmbeddedDocument):
|
||||||
location = DictField()
|
location = DictField()
|
||||||
@ -226,11 +227,12 @@ class TestIndexes(unittest.TestCase):
|
|||||||
|
|
||||||
Place.ensure_indexes()
|
Place.ensure_indexes()
|
||||||
info = Place._get_collection().index_information()
|
info = Place._get_collection().index_information()
|
||||||
info = [value["key"] for key, value in info.items()]
|
info = [value["key"] for key, value in iteritems(info)]
|
||||||
assert [("current.location.point", "2d")] in info
|
assert [("current.location.point", "2d")] in info
|
||||||
|
|
||||||
def test_explicit_geosphere_index(self):
|
def test_explicit_geosphere_index(self):
|
||||||
"""Ensure that geosphere indexes work when created via meta[indexes]"""
|
"""Ensure that geosphere indexes work when created via meta[indexes]
|
||||||
|
"""
|
||||||
|
|
||||||
class Place(Document):
|
class Place(Document):
|
||||||
location = DictField()
|
location = DictField()
|
||||||
@ -242,11 +244,12 @@ class TestIndexes(unittest.TestCase):
|
|||||||
|
|
||||||
Place.ensure_indexes()
|
Place.ensure_indexes()
|
||||||
info = Place._get_collection().index_information()
|
info = Place._get_collection().index_information()
|
||||||
info = [value["key"] for key, value in info.items()]
|
info = [value["key"] for key, value in iteritems(info)]
|
||||||
assert [("location.point", "2dsphere")] in info
|
assert [("location.point", "2dsphere")] in info
|
||||||
|
|
||||||
def test_explicit_geohaystack_index(self):
|
def test_explicit_geohaystack_index(self):
|
||||||
"""Ensure that geohaystack indexes work when created via meta[indexes]"""
|
"""Ensure that geohaystack indexes work when created via meta[indexes]
|
||||||
|
"""
|
||||||
pytest.skip(
|
pytest.skip(
|
||||||
"GeoHaystack index creation is not supported for now"
|
"GeoHaystack index creation is not supported for now"
|
||||||
"from meta, as it requires a bucketSize parameter."
|
"from meta, as it requires a bucketSize parameter."
|
||||||
@ -263,11 +266,12 @@ class TestIndexes(unittest.TestCase):
|
|||||||
|
|
||||||
Place.ensure_indexes()
|
Place.ensure_indexes()
|
||||||
info = Place._get_collection().index_information()
|
info = Place._get_collection().index_information()
|
||||||
info = [value["key"] for key, value in info.items()]
|
info = [value["key"] for key, value in iteritems(info)]
|
||||||
assert [("location.point", "geoHaystack")] in info
|
assert [("location.point", "geoHaystack")] in info
|
||||||
|
|
||||||
def test_create_geohaystack_index(self):
|
def test_create_geohaystack_index(self):
|
||||||
"""Ensure that geohaystack indexes can be created"""
|
"""Ensure that geohaystack indexes can be created
|
||||||
|
"""
|
||||||
|
|
||||||
class Place(Document):
|
class Place(Document):
|
||||||
location = DictField()
|
location = DictField()
|
||||||
@ -275,7 +279,7 @@ class TestIndexes(unittest.TestCase):
|
|||||||
|
|
||||||
Place.create_index({"fields": (")location.point", "name")}, bucketSize=10)
|
Place.create_index({"fields": (")location.point", "name")}, bucketSize=10)
|
||||||
info = Place._get_collection().index_information()
|
info = Place._get_collection().index_information()
|
||||||
info = [value["key"] for key, value in info.items()]
|
info = [value["key"] for key, value in iteritems(info)]
|
||||||
assert [("location.point", "geoHaystack"), ("name", 1)] in info
|
assert [("location.point", "geoHaystack"), ("name", 1)] in info
|
||||||
|
|
||||||
def test_dictionary_indexes(self):
|
def test_dictionary_indexes(self):
|
||||||
@ -304,7 +308,7 @@ class TestIndexes(unittest.TestCase):
|
|||||||
info = BlogPost.objects._collection.index_information()
|
info = BlogPost.objects._collection.index_information()
|
||||||
info = [
|
info = [
|
||||||
(value["key"], value.get("unique", False), value.get("sparse", False))
|
(value["key"], value.get("unique", False), value.get("sparse", False))
|
||||||
for key, value in info.items()
|
for key, value in iteritems(info)
|
||||||
]
|
]
|
||||||
assert ([("addDate", -1)], True, True) in info
|
assert ([("addDate", -1)], True, True) in info
|
||||||
|
|
||||||
@ -362,7 +366,8 @@ class TestIndexes(unittest.TestCase):
|
|||||||
assert sorted(info.keys()) == ["_cls_1_user_guid_1", "_id_"]
|
assert sorted(info.keys()) == ["_cls_1_user_guid_1", "_id_"]
|
||||||
|
|
||||||
def test_embedded_document_index(self):
|
def test_embedded_document_index(self):
|
||||||
"""Tests settings an index on an embedded document"""
|
"""Tests settings an index on an embedded document
|
||||||
|
"""
|
||||||
|
|
||||||
class Date(EmbeddedDocument):
|
class Date(EmbeddedDocument):
|
||||||
year = IntField(db_field="yr")
|
year = IntField(db_field="yr")
|
||||||
@ -379,7 +384,8 @@ class TestIndexes(unittest.TestCase):
|
|||||||
assert sorted(info.keys()) == ["_id_", "date.yr_-1"]
|
assert sorted(info.keys()) == ["_id_", "date.yr_-1"]
|
||||||
|
|
||||||
def test_list_embedded_document_index(self):
|
def test_list_embedded_document_index(self):
|
||||||
"""Ensure list embedded documents can be indexed"""
|
"""Ensure list embedded documents can be indexed
|
||||||
|
"""
|
||||||
|
|
||||||
class Tag(EmbeddedDocument):
|
class Tag(EmbeddedDocument):
|
||||||
name = StringField(db_field="tag")
|
name = StringField(db_field="tag")
|
||||||
@ -415,7 +421,8 @@ class TestIndexes(unittest.TestCase):
|
|||||||
assert sorted(info.keys()) == ["_cls_1", "_id_"]
|
assert sorted(info.keys()) == ["_cls_1", "_id_"]
|
||||||
|
|
||||||
def test_covered_index(self):
|
def test_covered_index(self):
|
||||||
"""Ensure that covered indexes can be used"""
|
"""Ensure that covered indexes can be used
|
||||||
|
"""
|
||||||
|
|
||||||
class Test(Document):
|
class Test(Document):
|
||||||
a = IntField()
|
a = IntField()
|
||||||
@ -456,11 +463,9 @@ class TestIndexes(unittest.TestCase):
|
|||||||
.get("stage")
|
.get("stage")
|
||||||
== "IXSCAN"
|
== "IXSCAN"
|
||||||
)
|
)
|
||||||
mongo_db = get_mongodb_version()
|
|
||||||
PROJECTION_STR = "PROJECTION" if mongo_db < MONGODB_42 else "PROJECTION_COVERED"
|
|
||||||
assert (
|
assert (
|
||||||
query_plan.get("queryPlanner").get("winningPlan").get("stage")
|
query_plan.get("queryPlanner").get("winningPlan").get("stage")
|
||||||
== PROJECTION_STR
|
== "PROJECTION"
|
||||||
)
|
)
|
||||||
|
|
||||||
query_plan = Test.objects(a=1).explain()
|
query_plan = Test.objects(a=1).explain()
|
||||||
@ -547,15 +552,15 @@ class TestIndexes(unittest.TestCase):
|
|||||||
assert 5 == query_result.count()
|
assert 5 == query_result.count()
|
||||||
|
|
||||||
incorrect_collation = {"arndom": "wrdo"}
|
incorrect_collation = {"arndom": "wrdo"}
|
||||||
with pytest.raises(OperationFailure) as exc_info:
|
with pytest.raises(OperationFailure):
|
||||||
BlogPost.objects.collation(incorrect_collation).count()
|
BlogPost.objects.collation(incorrect_collation).count()
|
||||||
assert "Missing expected field" in str(exc_info.value)
|
|
||||||
|
|
||||||
query_result = BlogPost.objects.collation({}).order_by("name")
|
query_result = BlogPost.objects.collation({}).order_by("name")
|
||||||
assert [x.name for x in query_result] == sorted(names)
|
assert [x.name for x in query_result] == sorted(names)
|
||||||
|
|
||||||
def test_unique(self):
|
def test_unique(self):
|
||||||
"""Ensure that uniqueness constraints are applied to fields."""
|
"""Ensure that uniqueness constraints are applied to fields.
|
||||||
|
"""
|
||||||
|
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
title = StringField()
|
title = StringField()
|
||||||
@ -603,7 +608,8 @@ class TestIndexes(unittest.TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_unique_with(self):
|
def test_unique_with(self):
|
||||||
"""Ensure that unique_with constraints are applied to fields."""
|
"""Ensure that unique_with constraints are applied to fields.
|
||||||
|
"""
|
||||||
|
|
||||||
class Date(EmbeddedDocument):
|
class Date(EmbeddedDocument):
|
||||||
year = IntField(db_field="yr")
|
year = IntField(db_field="yr")
|
||||||
@ -628,7 +634,8 @@ class TestIndexes(unittest.TestCase):
|
|||||||
post3.save()
|
post3.save()
|
||||||
|
|
||||||
def test_unique_embedded_document(self):
|
def test_unique_embedded_document(self):
|
||||||
"""Ensure that uniqueness constraints are applied to fields on embedded documents."""
|
"""Ensure that uniqueness constraints are applied to fields on embedded documents.
|
||||||
|
"""
|
||||||
|
|
||||||
class SubDocument(EmbeddedDocument):
|
class SubDocument(EmbeddedDocument):
|
||||||
year = IntField(db_field="yr")
|
year = IntField(db_field="yr")
|
||||||
@ -799,6 +806,18 @@ class TestIndexes(unittest.TestCase):
|
|||||||
info = Log.objects._collection.index_information()
|
info = Log.objects._collection.index_information()
|
||||||
assert 3600 == info["created_1"]["expireAfterSeconds"]
|
assert 3600 == info["created_1"]["expireAfterSeconds"]
|
||||||
|
|
||||||
|
def test_index_drop_dups_silently_ignored(self):
|
||||||
|
class Customer(Document):
|
||||||
|
cust_id = IntField(unique=True, required=True)
|
||||||
|
meta = {
|
||||||
|
"indexes": ["cust_id"],
|
||||||
|
"index_drop_dups": True,
|
||||||
|
"allow_inheritance": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
Customer.drop_collection()
|
||||||
|
Customer.objects.first()
|
||||||
|
|
||||||
def test_unique_and_indexes(self):
|
def test_unique_and_indexes(self):
|
||||||
"""Ensure that 'unique' constraints aren't overridden by
|
"""Ensure that 'unique' constraints aren't overridden by
|
||||||
meta.indexes.
|
meta.indexes.
|
||||||
@ -882,7 +901,7 @@ class TestIndexes(unittest.TestCase):
|
|||||||
self.fail("Unbound local error at index + pk definition")
|
self.fail("Unbound local error at index + pk definition")
|
||||||
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
info = BlogPost.objects._collection.index_information()
|
||||||
info = [value["key"] for key, value in info.items()]
|
info = [value["key"] for key, value in iteritems(info)]
|
||||||
index_item = [("_id", 1), ("comments.comment_id", 1)]
|
index_item = [("_id", 1), ("comments.comment_id", 1)]
|
||||||
assert index_item in info
|
assert index_item in info
|
||||||
|
|
||||||
@ -923,7 +942,7 @@ class TestIndexes(unittest.TestCase):
|
|||||||
meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]}
|
meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]}
|
||||||
|
|
||||||
info = MyDoc.objects._collection.index_information()
|
info = MyDoc.objects._collection.index_information()
|
||||||
info = [value["key"] for key, value in info.items()]
|
info = [value["key"] for key, value in iteritems(info)]
|
||||||
assert [("provider_ids.foo", 1)] in info
|
assert [("provider_ids.foo", 1)] in info
|
||||||
assert [("provider_ids.bar", 1)] in info
|
assert [("provider_ids.bar", 1)] in info
|
||||||
|
|
||||||
@ -1039,6 +1058,10 @@ class TestIndexes(unittest.TestCase):
|
|||||||
del index_info[key][
|
del index_info[key][
|
||||||
"ns"
|
"ns"
|
||||||
] # drop the index namespace - we don't care about that here, MongoDB 3+
|
] # drop the index namespace - we don't care about that here, MongoDB 3+
|
||||||
|
if "dropDups" in index_info[key]:
|
||||||
|
del index_info[key][
|
||||||
|
"dropDups"
|
||||||
|
] # drop the index dropDups - it is deprecated in MongoDB 3+
|
||||||
|
|
||||||
assert index_info == {
|
assert index_info == {
|
||||||
"txt_1": {"key": [("txt", 1)], "background": False},
|
"txt_1": {"key": [("txt", 1)], "background": False},
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import unittest
|
import unittest
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine import (
|
from mongoengine import (
|
||||||
BooleanField,
|
BooleanField,
|
||||||
@ -45,7 +47,8 @@ class TestInheritance(MongoDBTestCase):
|
|||||||
test_doc.delete()
|
test_doc.delete()
|
||||||
|
|
||||||
def test_superclasses(self):
|
def test_superclasses(self):
|
||||||
"""Ensure that the correct list of superclasses is assembled."""
|
"""Ensure that the correct list of superclasses is assembled.
|
||||||
|
"""
|
||||||
|
|
||||||
class Animal(Document):
|
class Animal(Document):
|
||||||
meta = {"allow_inheritance": True}
|
meta = {"allow_inheritance": True}
|
||||||
@ -215,7 +218,8 @@ class TestInheritance(MongoDBTestCase):
|
|||||||
assert Pike._subclasses == ("Animal.Fish.Pike",)
|
assert Pike._subclasses == ("Animal.Fish.Pike",)
|
||||||
|
|
||||||
def test_inheritance_meta_data(self):
|
def test_inheritance_meta_data(self):
|
||||||
"""Ensure that document may inherit fields from a superclass document."""
|
"""Ensure that document may inherit fields from a superclass document.
|
||||||
|
"""
|
||||||
|
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -232,7 +236,8 @@ class TestInheritance(MongoDBTestCase):
|
|||||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||||
|
|
||||||
def test_inheritance_to_mongo_keys(self):
|
def test_inheritance_to_mongo_keys(self):
|
||||||
"""Ensure that document may inherit fields from a superclass document."""
|
"""Ensure that document may inherit fields from a superclass document.
|
||||||
|
"""
|
||||||
|
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -256,7 +261,7 @@ class TestInheritance(MongoDBTestCase):
|
|||||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||||
|
|
||||||
def test_indexes_and_multiple_inheritance(self):
|
def test_indexes_and_multiple_inheritance(self):
|
||||||
"""Ensure that all of the indexes are created for a document with
|
""" Ensure that all of the indexes are created for a document with
|
||||||
multiple inheritance.
|
multiple inheritance.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -280,11 +285,14 @@ class TestInheritance(MongoDBTestCase):
|
|||||||
C.ensure_indexes()
|
C.ensure_indexes()
|
||||||
|
|
||||||
assert sorted(
|
assert sorted(
|
||||||
idx["key"] for idx in C._get_collection().index_information().values()
|
[idx["key"] for idx in C._get_collection().index_information().values()]
|
||||||
) == sorted([[("_cls", 1), ("b", 1)], [("_id", 1)], [("_cls", 1), ("a", 1)]])
|
) == sorted(
|
||||||
|
[[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]]
|
||||||
|
)
|
||||||
|
|
||||||
def test_polymorphic_queries(self):
|
def test_polymorphic_queries(self):
|
||||||
"""Ensure that the correct subclasses are returned from a query"""
|
"""Ensure that the correct subclasses are returned from a query
|
||||||
|
"""
|
||||||
|
|
||||||
class Animal(Document):
|
class Animal(Document):
|
||||||
meta = {"allow_inheritance": True}
|
meta = {"allow_inheritance": True}
|
||||||
@ -341,7 +349,8 @@ class TestInheritance(MongoDBTestCase):
|
|||||||
assert "_cls" not in obj
|
assert "_cls" not in obj
|
||||||
|
|
||||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||||
"""Ensure if inheritance is on in a subclass you cant turn it off."""
|
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||||
|
"""
|
||||||
|
|
||||||
class Animal(Document):
|
class Animal(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -467,7 +476,7 @@ class TestInheritance(MongoDBTestCase):
|
|||||||
assert city.pk is None
|
assert city.pk is None
|
||||||
# TODO: expected error? Shouldn't we create a new error type?
|
# TODO: expected error? Shouldn't we create a new error type?
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
city.pk = 1
|
setattr(city, "pk", 1)
|
||||||
|
|
||||||
def test_allow_inheritance_embedded_document(self):
|
def test_allow_inheritance_embedded_document(self):
|
||||||
"""Ensure embedded documents respect inheritance."""
|
"""Ensure embedded documents respect inheritance."""
|
||||||
@ -491,7 +500,8 @@ class TestInheritance(MongoDBTestCase):
|
|||||||
assert "_cls" in doc.to_mongo()
|
assert "_cls" in doc.to_mongo()
|
||||||
|
|
||||||
def test_document_inheritance(self):
|
def test_document_inheritance(self):
|
||||||
"""Ensure mutliple inheritance of abstract documents"""
|
"""Ensure mutliple inheritance of abstract documents
|
||||||
|
"""
|
||||||
|
|
||||||
class DateCreatedDocument(Document):
|
class DateCreatedDocument(Document):
|
||||||
meta = {"allow_inheritance": True, "abstract": True}
|
meta = {"allow_inheritance": True, "abstract": True}
|
||||||
@ -499,8 +509,13 @@ class TestInheritance(MongoDBTestCase):
|
|||||||
class DateUpdatedDocument(Document):
|
class DateUpdatedDocument(Document):
|
||||||
meta = {"allow_inheritance": True, "abstract": True}
|
meta = {"allow_inheritance": True, "abstract": True}
|
||||||
|
|
||||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
try:
|
||||||
pass
|
|
||||||
|
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
assert False, "Couldn't create MyDocument class"
|
||||||
|
|
||||||
def test_abstract_documents(self):
|
def test_abstract_documents(self):
|
||||||
"""Ensure that a document superclass can be marked as abstract
|
"""Ensure that a document superclass can be marked as abstract
|
||||||
@ -508,6 +523,7 @@ class TestInheritance(MongoDBTestCase):
|
|||||||
|
|
||||||
defaults = {
|
defaults = {
|
||||||
"index_background": True,
|
"index_background": True,
|
||||||
|
"index_drop_dups": True,
|
||||||
"index_opts": {"hello": "world"},
|
"index_opts": {"hello": "world"},
|
||||||
"allow_inheritance": True,
|
"allow_inheritance": True,
|
||||||
"queryset_class": "QuerySet",
|
"queryset_class": "QuerySet",
|
||||||
@ -534,7 +550,7 @@ class TestInheritance(MongoDBTestCase):
|
|||||||
class Human(Mammal):
|
class Human(Mammal):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
for k, v in defaults.items():
|
for k, v in iteritems(defaults):
|
||||||
for cls in [Animal, Fish, Guppy]:
|
for cls in [Animal, Fish, Guppy]:
|
||||||
assert cls._meta[k] == v
|
assert cls._meta[k] == v
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import unittest
|
import unittest
|
||||||
@ -6,9 +7,10 @@ import weakref
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import bson
|
import bson
|
||||||
import pytest
|
|
||||||
from bson import DBRef, ObjectId
|
from bson import DBRef, ObjectId
|
||||||
from pymongo.errors import DuplicateKeyError
|
from pymongo.errors import DuplicateKeyError
|
||||||
|
import pytest
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
@ -23,11 +25,7 @@ from mongoengine.errors import (
|
|||||||
NotUniqueError,
|
NotUniqueError,
|
||||||
SaveConditionError,
|
SaveConditionError,
|
||||||
)
|
)
|
||||||
from mongoengine.mongodb_support import (
|
from mongoengine.mongodb_support import MONGODB_34, MONGODB_36, get_mongodb_version
|
||||||
MONGODB_34,
|
|
||||||
MONGODB_36,
|
|
||||||
get_mongodb_version,
|
|
||||||
)
|
|
||||||
from mongoengine.pymongo_support import list_collection_names
|
from mongoengine.pymongo_support import list_collection_names
|
||||||
from mongoengine.queryset import NULLIFY, Q
|
from mongoengine.queryset import NULLIFY, Q
|
||||||
from tests import fixtures
|
from tests import fixtures
|
||||||
@ -65,12 +63,12 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
for collection in list_collection_names(self.db):
|
for collection in list_collection_names(self.db):
|
||||||
self.db.drop_collection(collection)
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
def _assert_db_equal(self, docs):
|
def assertDbEqual(self, docs):
|
||||||
assert list(self.Person._get_collection().find().sort("id")) == sorted(
|
assert list(self.Person._get_collection().find().sort("id")) == sorted(
|
||||||
docs, key=lambda doc: doc["_id"]
|
docs, key=lambda doc: doc["_id"]
|
||||||
)
|
)
|
||||||
|
|
||||||
def _assert_has_instance(self, field, instance):
|
def assertHasInstance(self, field, instance):
|
||||||
assert hasattr(field, "_instance")
|
assert hasattr(field, "_instance")
|
||||||
assert field._instance is not None
|
assert field._instance is not None
|
||||||
if isinstance(field._instance, weakref.ProxyType):
|
if isinstance(field._instance, weakref.ProxyType):
|
||||||
@ -164,7 +162,8 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
Log.objects
|
Log.objects
|
||||||
|
|
||||||
def test_repr(self):
|
def test_repr(self):
|
||||||
"""Ensure that unicode representation works"""
|
"""Ensure that unicode representation works
|
||||||
|
"""
|
||||||
|
|
||||||
class Article(Document):
|
class Article(Document):
|
||||||
title = StringField()
|
title = StringField()
|
||||||
@ -172,7 +171,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.title
|
return self.title
|
||||||
|
|
||||||
doc = Article(title="привет мир")
|
doc = Article(title=u"привет мир")
|
||||||
|
|
||||||
assert "<Article: привет мир>" == repr(doc)
|
assert "<Article: привет мир>" == repr(doc)
|
||||||
|
|
||||||
@ -185,12 +184,12 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
doc = Article(title="привет мир")
|
doc = Article(title=u"привет мир")
|
||||||
assert "<Article: None>" == repr(doc)
|
assert "<Article: None>" == repr(doc)
|
||||||
|
|
||||||
def test_queryset_resurrects_dropped_collection(self):
|
def test_queryset_resurrects_dropped_collection(self):
|
||||||
self.Person.drop_collection()
|
self.Person.drop_collection()
|
||||||
assert list(self.Person.objects()) == []
|
assert [] == list(self.Person.objects())
|
||||||
|
|
||||||
# Ensure works correctly with inhertited classes
|
# Ensure works correctly with inhertited classes
|
||||||
class Actor(self.Person):
|
class Actor(self.Person):
|
||||||
@ -198,7 +197,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
|
|
||||||
Actor.objects()
|
Actor.objects()
|
||||||
self.Person.drop_collection()
|
self.Person.drop_collection()
|
||||||
assert list(Actor.objects()) == []
|
assert [] == list(Actor.objects())
|
||||||
|
|
||||||
def test_polymorphic_references(self):
|
def test_polymorphic_references(self):
|
||||||
"""Ensure that the correct subclasses are returned from a query
|
"""Ensure that the correct subclasses are returned from a query
|
||||||
@ -407,16 +406,6 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
assert person.name == "Test User"
|
assert person.name == "Test User"
|
||||||
assert person.age == 30
|
assert person.age == 30
|
||||||
|
|
||||||
def test__qs_property_does_not_raise(self):
|
|
||||||
# ensures no regression of #2500
|
|
||||||
class MyDocument(Document):
|
|
||||||
pass
|
|
||||||
|
|
||||||
MyDocument.drop_collection()
|
|
||||||
object = MyDocument()
|
|
||||||
object._qs().insert([MyDocument()])
|
|
||||||
assert MyDocument.objects.count() == 1
|
|
||||||
|
|
||||||
def test_to_dbref(self):
|
def test_to_dbref(self):
|
||||||
"""Ensure that you can get a dbref of a document."""
|
"""Ensure that you can get a dbref of a document."""
|
||||||
person = self.Person(name="Test User", age=30)
|
person = self.Person(name="Test User", age=30)
|
||||||
@ -513,7 +502,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
doc.reload()
|
doc.reload()
|
||||||
Animal.drop_collection()
|
Animal.drop_collection()
|
||||||
|
|
||||||
def test_save_update_shard_key_routing(self):
|
def test_update_shard_key_routing(self):
|
||||||
"""Ensures updating a doc with a specified shard_key includes it in
|
"""Ensures updating a doc with a specified shard_key includes it in
|
||||||
the query.
|
the query.
|
||||||
"""
|
"""
|
||||||
@ -535,32 +524,9 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0]
|
query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0]
|
||||||
assert query_op["op"] == "update"
|
assert query_op["op"] == "update"
|
||||||
if mongo_db <= MONGODB_34:
|
if mongo_db <= MONGODB_34:
|
||||||
assert set(query_op["query"].keys()) == {"_id", "is_mammal"}
|
assert set(query_op["query"].keys()) == set(["_id", "is_mammal"])
|
||||||
else:
|
else:
|
||||||
assert set(query_op["command"]["q"].keys()) == {"_id", "is_mammal"}
|
assert set(query_op["command"]["q"].keys()) == set(["_id", "is_mammal"])
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
|
|
||||||
def test_save_create_shard_key_routing(self):
|
|
||||||
"""Ensures inserting a doc with a specified shard_key includes it in
|
|
||||||
the query.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
_id = UUIDField(binary=False, primary_key=True, default=uuid.uuid4)
|
|
||||||
is_mammal = BooleanField()
|
|
||||||
name = StringField()
|
|
||||||
meta = {"shard_key": ("is_mammal",)}
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
doc = Animal(is_mammal=True, name="Dog")
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
doc.save()
|
|
||||||
query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0]
|
|
||||||
assert query_op["op"] == "command"
|
|
||||||
assert query_op["command"]["findAndModify"] == "animal"
|
|
||||||
assert set(query_op["command"]["query"].keys()) == {"_id", "is_mammal"}
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
Animal.drop_collection()
|
||||||
|
|
||||||
@ -613,8 +579,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
doc.embedded_field.list_field.append(1)
|
doc.embedded_field.list_field.append(1)
|
||||||
doc.embedded_field.dict_field["woot"] = "woot"
|
doc.embedded_field.dict_field["woot"] = "woot"
|
||||||
|
|
||||||
changed = doc._get_changed_fields()
|
assert doc._get_changed_fields() == [
|
||||||
assert changed == [
|
|
||||||
"list_field",
|
"list_field",
|
||||||
"dict_field.woot",
|
"dict_field.woot",
|
||||||
"embedded_field.list_field",
|
"embedded_field.list_field",
|
||||||
@ -740,11 +705,11 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
Doc.drop_collection()
|
Doc.drop_collection()
|
||||||
|
|
||||||
doc = Doc(embedded_field=Embedded(string="Hi"))
|
doc = Doc(embedded_field=Embedded(string="Hi"))
|
||||||
self._assert_has_instance(doc.embedded_field, doc)
|
self.assertHasInstance(doc.embedded_field, doc)
|
||||||
|
|
||||||
doc.save()
|
doc.save()
|
||||||
doc = Doc.objects.get()
|
doc = Doc.objects.get()
|
||||||
self._assert_has_instance(doc.embedded_field, doc)
|
self.assertHasInstance(doc.embedded_field, doc)
|
||||||
|
|
||||||
def test_embedded_document_complex_instance(self):
|
def test_embedded_document_complex_instance(self):
|
||||||
"""Ensure that embedded documents in complex fields can reference
|
"""Ensure that embedded documents in complex fields can reference
|
||||||
@ -759,11 +724,11 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
|
|
||||||
Doc.drop_collection()
|
Doc.drop_collection()
|
||||||
doc = Doc(embedded_field=[Embedded(string="Hi")])
|
doc = Doc(embedded_field=[Embedded(string="Hi")])
|
||||||
self._assert_has_instance(doc.embedded_field[0], doc)
|
self.assertHasInstance(doc.embedded_field[0], doc)
|
||||||
|
|
||||||
doc.save()
|
doc.save()
|
||||||
doc = Doc.objects.get()
|
doc = Doc.objects.get()
|
||||||
self._assert_has_instance(doc.embedded_field[0], doc)
|
self.assertHasInstance(doc.embedded_field[0], doc)
|
||||||
|
|
||||||
def test_embedded_document_complex_instance_no_use_db_field(self):
|
def test_embedded_document_complex_instance_no_use_db_field(self):
|
||||||
"""Ensure that use_db_field is propagated to list of Emb Docs."""
|
"""Ensure that use_db_field is propagated to list of Emb Docs."""
|
||||||
@ -792,11 +757,11 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
|
|
||||||
acc = Account()
|
acc = Account()
|
||||||
acc.email = Email(email="test@example.com")
|
acc.email = Email(email="test@example.com")
|
||||||
self._assert_has_instance(acc._data["email"], acc)
|
self.assertHasInstance(acc._data["email"], acc)
|
||||||
acc.save()
|
acc.save()
|
||||||
|
|
||||||
acc1 = Account.objects.first()
|
acc1 = Account.objects.first()
|
||||||
self._assert_has_instance(acc1._data["email"], acc1)
|
self.assertHasInstance(acc1._data["email"], acc1)
|
||||||
|
|
||||||
def test_instance_is_set_on_setattr_on_embedded_document_list(self):
|
def test_instance_is_set_on_setattr_on_embedded_document_list(self):
|
||||||
class Email(EmbeddedDocument):
|
class Email(EmbeddedDocument):
|
||||||
@ -808,11 +773,11 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
Account.drop_collection()
|
Account.drop_collection()
|
||||||
acc = Account()
|
acc = Account()
|
||||||
acc.emails = [Email(email="test@example.com")]
|
acc.emails = [Email(email="test@example.com")]
|
||||||
self._assert_has_instance(acc._data["emails"][0], acc)
|
self.assertHasInstance(acc._data["emails"][0], acc)
|
||||||
acc.save()
|
acc.save()
|
||||||
|
|
||||||
acc1 = Account.objects.first()
|
acc1 = Account.objects.first()
|
||||||
self._assert_has_instance(acc1._data["emails"][0], acc1)
|
self.assertHasInstance(acc1._data["emails"][0], acc1)
|
||||||
|
|
||||||
def test_save_checks_that_clean_is_called(self):
|
def test_save_checks_that_clean_is_called(self):
|
||||||
class CustomError(Exception):
|
class CustomError(Exception):
|
||||||
@ -921,7 +886,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
with pytest.raises(InvalidDocumentError):
|
with pytest.raises(InvalidDocumentError):
|
||||||
self.Person().modify(set__age=10)
|
self.Person().modify(set__age=10)
|
||||||
|
|
||||||
self._assert_db_equal([dict(doc.to_mongo())])
|
self.assertDbEqual([dict(doc.to_mongo())])
|
||||||
|
|
||||||
def test_modify_invalid_query(self):
|
def test_modify_invalid_query(self):
|
||||||
doc1 = self.Person(name="bob", age=10).save()
|
doc1 = self.Person(name="bob", age=10).save()
|
||||||
@ -931,7 +896,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
with pytest.raises(InvalidQueryError):
|
with pytest.raises(InvalidQueryError):
|
||||||
doc1.modify({"id": doc2.id}, set__value=20)
|
doc1.modify({"id": doc2.id}, set__value=20)
|
||||||
|
|
||||||
self._assert_db_equal(docs)
|
self.assertDbEqual(docs)
|
||||||
|
|
||||||
def test_modify_match_another_document(self):
|
def test_modify_match_another_document(self):
|
||||||
doc1 = self.Person(name="bob", age=10).save()
|
doc1 = self.Person(name="bob", age=10).save()
|
||||||
@ -941,7 +906,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
n_modified = doc1.modify({"name": doc2.name}, set__age=100)
|
n_modified = doc1.modify({"name": doc2.name}, set__age=100)
|
||||||
assert n_modified == 0
|
assert n_modified == 0
|
||||||
|
|
||||||
self._assert_db_equal(docs)
|
self.assertDbEqual(docs)
|
||||||
|
|
||||||
def test_modify_not_exists(self):
|
def test_modify_not_exists(self):
|
||||||
doc1 = self.Person(name="bob", age=10).save()
|
doc1 = self.Person(name="bob", age=10).save()
|
||||||
@ -951,7 +916,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
n_modified = doc2.modify({"name": doc2.name}, set__age=100)
|
n_modified = doc2.modify({"name": doc2.name}, set__age=100)
|
||||||
assert n_modified == 0
|
assert n_modified == 0
|
||||||
|
|
||||||
self._assert_db_equal(docs)
|
self.assertDbEqual(docs)
|
||||||
|
|
||||||
def test_modify_update(self):
|
def test_modify_update(self):
|
||||||
other_doc = self.Person(name="bob", age=10).save()
|
other_doc = self.Person(name="bob", age=10).save()
|
||||||
@ -977,7 +942,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
assert doc.to_json() == doc_copy.to_json()
|
assert doc.to_json() == doc_copy.to_json()
|
||||||
assert doc._get_changed_fields() == []
|
assert doc._get_changed_fields() == []
|
||||||
|
|
||||||
self._assert_db_equal([dict(other_doc.to_mongo()), dict(doc.to_mongo())])
|
self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())])
|
||||||
|
|
||||||
def test_modify_with_positional_push(self):
|
def test_modify_with_positional_push(self):
|
||||||
class Content(EmbeddedDocument):
|
class Content(EmbeddedDocument):
|
||||||
@ -1442,15 +1407,15 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
coll = self.Person._get_collection()
|
coll = self.Person._get_collection()
|
||||||
doc = self.Person(name="John").save()
|
doc = self.Person(name="John").save()
|
||||||
raw_doc = coll.find_one({"_id": doc.pk})
|
raw_doc = coll.find_one({"_id": doc.pk})
|
||||||
assert set(raw_doc.keys()) == {"_id", "_cls", "name"}
|
assert set(raw_doc.keys()) == set(["_id", "_cls", "name"])
|
||||||
|
|
||||||
doc.update(rename__name="first_name")
|
doc.update(rename__name="first_name")
|
||||||
raw_doc = coll.find_one({"_id": doc.pk})
|
raw_doc = coll.find_one({"_id": doc.pk})
|
||||||
assert set(raw_doc.keys()) == {"_id", "_cls", "first_name"}
|
assert set(raw_doc.keys()) == set(["_id", "_cls", "first_name"])
|
||||||
assert raw_doc["first_name"] == "John"
|
assert raw_doc["first_name"] == "John"
|
||||||
|
|
||||||
def test_inserts_if_you_set_the_pk(self):
|
def test_inserts_if_you_set_the_pk(self):
|
||||||
_ = self.Person(name="p1", id=bson.ObjectId()).save()
|
p1 = self.Person(name="p1", id=bson.ObjectId()).save()
|
||||||
p2 = self.Person(name="p2")
|
p2 = self.Person(name="p2")
|
||||||
p2.id = bson.ObjectId()
|
p2.id = bson.ObjectId()
|
||||||
p2.save()
|
p2.save()
|
||||||
@ -1566,7 +1531,8 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
assert site.page.log_message == "Error: Dummy message"
|
assert site.page.log_message == "Error: Dummy message"
|
||||||
|
|
||||||
def test_update_list_field(self):
|
def test_update_list_field(self):
|
||||||
"""Test update on `ListField` with $pull + $in."""
|
"""Test update on `ListField` with $pull + $in.
|
||||||
|
"""
|
||||||
|
|
||||||
class Doc(Document):
|
class Doc(Document):
|
||||||
foo = ListField(StringField())
|
foo = ListField(StringField())
|
||||||
@ -2055,7 +2021,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
assert promoted_employee.details is None
|
assert promoted_employee.details is None
|
||||||
|
|
||||||
def test_object_mixins(self):
|
def test_object_mixins(self):
|
||||||
class NameMixin:
|
class NameMixin(object):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
class Foo(EmbeddedDocument, NameMixin):
|
class Foo(EmbeddedDocument, NameMixin):
|
||||||
@ -2069,7 +2035,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
assert ["id", "name", "widgets"] == sorted(Bar._fields.keys())
|
assert ["id", "name", "widgets"] == sorted(Bar._fields.keys())
|
||||||
|
|
||||||
def test_mixin_inheritance(self):
|
def test_mixin_inheritance(self):
|
||||||
class BaseMixIn:
|
class BaseMixIn(object):
|
||||||
count = IntField()
|
count = IntField()
|
||||||
data = StringField()
|
data = StringField()
|
||||||
|
|
||||||
@ -2230,7 +2196,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
|
|
||||||
user = User(name="Mike").save()
|
user = User(name="Mike").save()
|
||||||
reviewer = User(name="John").save()
|
reviewer = User(name="John").save()
|
||||||
_ = Book(author=user, reviewer=reviewer).save()
|
book = Book(author=user, reviewer=reviewer).save()
|
||||||
|
|
||||||
reviewer.delete()
|
reviewer.delete()
|
||||||
assert Book.objects.count() == 1
|
assert Book.objects.count() == 1
|
||||||
@ -2256,7 +2222,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
|
|
||||||
user_1 = User(id=1).save()
|
user_1 = User(id=1).save()
|
||||||
user_2 = User(id=2).save()
|
user_2 = User(id=2).save()
|
||||||
_ = Book(id=1, author=user_2).save()
|
book_1 = Book(id=1, author=user_2).save()
|
||||||
book_2 = Book(id=2, author=user_1).save()
|
book_2 = Book(id=2, author=user_1).save()
|
||||||
|
|
||||||
user_2.delete()
|
user_2.delete()
|
||||||
@ -2265,7 +2231,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
assert Book.objects.get() == book_2
|
assert Book.objects.get() == book_2
|
||||||
|
|
||||||
user_3 = User(id=3).save()
|
user_3 = User(id=3).save()
|
||||||
_ = Book(id=3, author=user_3).save()
|
book_3 = Book(id=3, author=user_3).save()
|
||||||
|
|
||||||
user_3.delete()
|
user_3.delete()
|
||||||
# Deleting user_3 should also delete book_3
|
# Deleting user_3 should also delete book_3
|
||||||
@ -2828,13 +2794,15 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
register_connection("testdb-2", "mongoenginetest2")
|
register_connection("testdb-2", "mongoenginetest2")
|
||||||
|
|
||||||
class A(Document):
|
class A(Document):
|
||||||
"""Uses default db_alias"""
|
"""Uses default db_alias
|
||||||
|
"""
|
||||||
|
|
||||||
name = StringField()
|
name = StringField()
|
||||||
meta = {"allow_inheritance": True}
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
class B(A):
|
class B(A):
|
||||||
"""Uses testdb-2 db_alias"""
|
"""Uses testdb-2 db_alias
|
||||||
|
"""
|
||||||
|
|
||||||
meta = {"db_alias": "testdb-2"}
|
meta = {"db_alias": "testdb-2"}
|
||||||
|
|
||||||
@ -2914,32 +2882,50 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
# Checks
|
# Checks
|
||||||
assert ",".join([str(b) for b in Book.objects.all()]) == "1,2,3,4,5,6,7,8,9"
|
assert ",".join([str(b) for b in Book.objects.all()]) == "1,2,3,4,5,6,7,8,9"
|
||||||
# bob related books
|
# bob related books
|
||||||
bob_books_qs = Book.objects.filter(
|
assert (
|
||||||
Q(extra__a=bob) | Q(author=bob) | Q(extra__b=bob)
|
",".join(
|
||||||
|
[
|
||||||
|
str(b)
|
||||||
|
for b in Book.objects.filter(
|
||||||
|
Q(extra__a=bob) | Q(author=bob) | Q(extra__b=bob)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
== "1,2,3,4"
|
||||||
)
|
)
|
||||||
assert [str(b) for b in bob_books_qs] == ["1", "2", "3", "4"]
|
|
||||||
assert bob_books_qs.count() == 4
|
|
||||||
|
|
||||||
# Susan & Karl related books
|
# Susan & Karl related books
|
||||||
susan_karl_books_qs = Book.objects.filter(
|
assert (
|
||||||
Q(extra__a__all=[karl, susan])
|
",".join(
|
||||||
| Q(author__all=[karl, susan])
|
[
|
||||||
| Q(extra__b__all=[karl.to_dbref(), susan.to_dbref()])
|
str(b)
|
||||||
|
for b in Book.objects.filter(
|
||||||
|
Q(extra__a__all=[karl, susan])
|
||||||
|
| Q(author__all=[karl, susan])
|
||||||
|
| Q(extra__b__all=[karl.to_dbref(), susan.to_dbref()])
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
== "1"
|
||||||
)
|
)
|
||||||
assert [str(b) for b in susan_karl_books_qs] == ["1"]
|
|
||||||
assert susan_karl_books_qs.count() == 1
|
|
||||||
|
|
||||||
# $Where
|
# $Where
|
||||||
custom_qs = Book.objects.filter(
|
assert (
|
||||||
__raw__={
|
u",".join(
|
||||||
"$where": """
|
[
|
||||||
|
str(b)
|
||||||
|
for b in Book.objects.filter(
|
||||||
|
__raw__={
|
||||||
|
"$where": """
|
||||||
function(){
|
function(){
|
||||||
return this.name == '1' ||
|
return this.name == '1' ||
|
||||||
this.name == '2';}"""
|
this.name == '2';}"""
|
||||||
}
|
}
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
== "1,2"
|
||||||
)
|
)
|
||||||
assert [str(b) for b in custom_qs] == ["1", "2"]
|
|
||||||
assert custom_qs.count() == 2
|
|
||||||
|
|
||||||
def test_switch_db_instance(self):
|
def test_switch_db_instance(self):
|
||||||
register_connection("testdb-1", "mongoenginetest2")
|
register_connection("testdb-1", "mongoenginetest2")
|
||||||
@ -3219,7 +3205,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
def test_positional_creation(self):
|
def test_positional_creation(self):
|
||||||
"""Document cannot be instantiated using positional arguments."""
|
"""Document cannot be instantiated using positional arguments."""
|
||||||
with pytest.raises(TypeError) as exc_info:
|
with pytest.raises(TypeError) as exc_info:
|
||||||
self.Person("Test User", 42)
|
person = self.Person("Test User", 42)
|
||||||
|
|
||||||
expected_msg = (
|
expected_msg = (
|
||||||
"Instantiating a document with positional arguments is not "
|
"Instantiating a document with positional arguments is not "
|
||||||
@ -3288,7 +3274,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
|
|
||||||
def expand(self):
|
def expand(self):
|
||||||
self.flattened_parameter = {}
|
self.flattened_parameter = {}
|
||||||
for parameter_name, parameter in self.parameters.items():
|
for parameter_name, parameter in iteritems(self.parameters):
|
||||||
parameter.expand()
|
parameter.expand()
|
||||||
|
|
||||||
class NodesSystem(Document):
|
class NodesSystem(Document):
|
||||||
@ -3296,10 +3282,10 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
nodes = MapField(ReferenceField(Node, dbref=False))
|
nodes = MapField(ReferenceField(Node, dbref=False))
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
for node_name, node in self.nodes.items():
|
for node_name, node in iteritems(self.nodes):
|
||||||
node.expand()
|
node.expand()
|
||||||
node.save(*args, **kwargs)
|
node.save(*args, **kwargs)
|
||||||
super().save(*args, **kwargs)
|
super(NodesSystem, self).save(*args, **kwargs)
|
||||||
|
|
||||||
NodesSystem.drop_collection()
|
NodesSystem.drop_collection()
|
||||||
Node.drop_collection()
|
Node.drop_collection()
|
||||||
@ -3426,7 +3412,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
assert obj3 != dbref2
|
assert obj3 != dbref2
|
||||||
assert dbref2 != obj3
|
assert dbref2 != obj3
|
||||||
|
|
||||||
def test_default_values_dont_get_override_upon_save_when_only_is_used(self):
|
def test_default_values(self):
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
created_on = DateTimeField(default=lambda: datetime.utcnow())
|
created_on = DateTimeField(default=lambda: datetime.utcnow())
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -3604,7 +3590,8 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
assert u_from_db.height is None
|
assert u_from_db.height is None
|
||||||
|
|
||||||
def test_not_saved_eq(self):
|
def test_not_saved_eq(self):
|
||||||
"""Ensure we can compare documents not saved."""
|
"""Ensure we can compare documents not saved.
|
||||||
|
"""
|
||||||
|
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
pass
|
pass
|
||||||
@ -3620,13 +3607,13 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
v = StringField()
|
v = StringField()
|
||||||
|
|
||||||
class A(Document):
|
class A(Document):
|
||||||
array = ListField(EmbeddedDocumentField(B))
|
l = ListField(EmbeddedDocumentField(B))
|
||||||
|
|
||||||
A.objects.delete()
|
A.objects.delete()
|
||||||
A(array=[B(v="1"), B(v="2"), B(v="3")]).save()
|
A(l=[B(v="1"), B(v="2"), B(v="3")]).save()
|
||||||
a = A.objects.get()
|
a = A.objects.get()
|
||||||
assert a.array._instance == a
|
assert a.l._instance == a
|
||||||
for idx, b in enumerate(a.array):
|
for idx, b in enumerate(a.l):
|
||||||
assert b._instance == a
|
assert b._instance == a
|
||||||
assert idx == 2
|
assert idx == 2
|
||||||
|
|
||||||
@ -3748,7 +3735,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
_ = list(Jedi.objects) # Ensure a proper document loads without errors
|
_ = list(Jedi.objects) # Ensure a proper document loads without errors
|
||||||
|
|
||||||
# Forces a document with a wrong shape (may occur in case of migration)
|
# Forces a document with a wrong shape (may occur in case of migration)
|
||||||
value = "I_should_be_a_dict"
|
value = u"I_should_be_a_dict"
|
||||||
coll.insert_one({"light_saber": value})
|
coll.insert_one({"light_saber": value})
|
||||||
|
|
||||||
with pytest.raises(InvalidDocumentError) as exc_info:
|
with pytest.raises(InvalidDocumentError) as exc_info:
|
||||||
@ -3813,95 +3800,5 @@ class ObjectKeyTestCase(MongoDBTestCase):
|
|||||||
assert book._object_key == {"pk": book.pk, "author__name": "Author"}
|
assert book._object_key == {"pk": book.pk, "author__name": "Author"}
|
||||||
|
|
||||||
|
|
||||||
class DBFieldMappingTest(MongoDBTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
class Fields:
|
|
||||||
w1 = BooleanField(db_field="w2")
|
|
||||||
|
|
||||||
x1 = BooleanField(db_field="x2")
|
|
||||||
x2 = BooleanField(db_field="x3")
|
|
||||||
|
|
||||||
y1 = BooleanField(db_field="y0")
|
|
||||||
y2 = BooleanField(db_field="y1")
|
|
||||||
|
|
||||||
z1 = BooleanField(db_field="z2")
|
|
||||||
z2 = BooleanField(db_field="z1")
|
|
||||||
|
|
||||||
class Doc(Fields, Document):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class DynDoc(Fields, DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.Doc = Doc
|
|
||||||
self.DynDoc = DynDoc
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
for collection in list_collection_names(self.db):
|
|
||||||
self.db.drop_collection(collection)
|
|
||||||
|
|
||||||
def test_setting_fields_in_constructor_of_strict_doc_uses_model_names(self):
|
|
||||||
doc = self.Doc(z1=True, z2=False)
|
|
||||||
assert doc.z1 is True
|
|
||||||
assert doc.z2 is False
|
|
||||||
|
|
||||||
def test_setting_fields_in_constructor_of_dyn_doc_uses_model_names(self):
|
|
||||||
doc = self.DynDoc(z1=True, z2=False)
|
|
||||||
assert doc.z1 is True
|
|
||||||
assert doc.z2 is False
|
|
||||||
|
|
||||||
def test_setting_unknown_field_in_constructor_of_dyn_doc_does_not_overwrite_model_fields(
|
|
||||||
self,
|
|
||||||
):
|
|
||||||
doc = self.DynDoc(w2=True)
|
|
||||||
assert doc.w1 is None
|
|
||||||
assert doc.w2 is True
|
|
||||||
|
|
||||||
def test_unknown_fields_of_strict_doc_do_not_overwrite_dbfields_1(self):
|
|
||||||
doc = self.Doc()
|
|
||||||
doc.w2 = True
|
|
||||||
doc.x3 = True
|
|
||||||
doc.y0 = True
|
|
||||||
doc.save()
|
|
||||||
reloaded = self.Doc.objects.get(id=doc.id)
|
|
||||||
assert reloaded.w1 is None
|
|
||||||
assert reloaded.x1 is None
|
|
||||||
assert reloaded.x2 is None
|
|
||||||
assert reloaded.y1 is None
|
|
||||||
assert reloaded.y2 is None
|
|
||||||
|
|
||||||
def test_dbfields_are_loaded_to_the_right_modelfield_for_strict_doc_2(self):
|
|
||||||
doc = self.Doc()
|
|
||||||
doc.x2 = True
|
|
||||||
doc.y2 = True
|
|
||||||
doc.z2 = True
|
|
||||||
doc.save()
|
|
||||||
reloaded = self.Doc.objects.get(id=doc.id)
|
|
||||||
assert (
|
|
||||||
reloaded.x1,
|
|
||||||
reloaded.x2,
|
|
||||||
reloaded.y1,
|
|
||||||
reloaded.y2,
|
|
||||||
reloaded.z1,
|
|
||||||
reloaded.z2,
|
|
||||||
) == (doc.x1, doc.x2, doc.y1, doc.y2, doc.z1, doc.z2)
|
|
||||||
|
|
||||||
def test_dbfields_are_loaded_to_the_right_modelfield_for_dyn_doc_2(self):
|
|
||||||
doc = self.DynDoc()
|
|
||||||
doc.x2 = True
|
|
||||||
doc.y2 = True
|
|
||||||
doc.z2 = True
|
|
||||||
doc.save()
|
|
||||||
reloaded = self.DynDoc.objects.get(id=doc.id)
|
|
||||||
assert (
|
|
||||||
reloaded.x1,
|
|
||||||
reloaded.x2,
|
|
||||||
reloaded.y1,
|
|
||||||
reloaded.y2,
|
|
||||||
reloaded.z1,
|
|
||||||
reloaded.z2,
|
|
||||||
) == (doc.x1, doc.x2, doc.y1, doc.y2, doc.z1, doc.z2)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import unittest
|
import unittest
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
from bson import ObjectId
|
from bson import ObjectId
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import unittest
|
import unittest
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
@ -9,7 +10,8 @@ from tests.utils import MongoDBTestCase
|
|||||||
|
|
||||||
class TestValidatorError(MongoDBTestCase):
|
class TestValidatorError(MongoDBTestCase):
|
||||||
def test_to_dict(self):
|
def test_to_dict(self):
|
||||||
"""Ensure a ValidationError handles error to_dict correctly."""
|
"""Ensure a ValidationError handles error to_dict correctly.
|
||||||
|
"""
|
||||||
error = ValidationError("root")
|
error = ValidationError("root")
|
||||||
assert error.to_dict() == {}
|
assert error.to_dict() == {}
|
||||||
|
|
||||||
@ -89,7 +91,8 @@ class TestValidatorError(MongoDBTestCase):
|
|||||||
p.validate()
|
p.validate()
|
||||||
|
|
||||||
def test_embedded_document_validation(self):
|
def test_embedded_document_validation(self):
|
||||||
"""Ensure that embedded documents may be validated."""
|
"""Ensure that embedded documents may be validated.
|
||||||
|
"""
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
class Comment(EmbeddedDocument):
|
||||||
date = DateTimeField()
|
date = DateTimeField()
|
||||||
@ -210,7 +213,10 @@ class TestValidatorError(MongoDBTestCase):
|
|||||||
child.reference = parent
|
child.reference = parent
|
||||||
|
|
||||||
# Saving the child should not raise a ValidationError
|
# Saving the child should not raise a ValidationError
|
||||||
child.save()
|
try:
|
||||||
|
child.save()
|
||||||
|
except ValidationError as e:
|
||||||
|
self.fail("ValidationError raised: %s" % e.message)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -1,25 +1,28 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
import pytest
|
|
||||||
from bson import Binary
|
from bson import Binary
|
||||||
|
import pytest
|
||||||
|
import six
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode(
|
BIN_VALUE = six.b(
|
||||||
"latin-1"
|
"\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestBinaryField(MongoDBTestCase):
|
class TestBinaryField(MongoDBTestCase):
|
||||||
def test_binary_fields(self):
|
def test_binary_fields(self):
|
||||||
"""Ensure that binary fields can be stored and retrieved."""
|
"""Ensure that binary fields can be stored and retrieved.
|
||||||
|
"""
|
||||||
|
|
||||||
class Attachment(Document):
|
class Attachment(Document):
|
||||||
content_type = StringField()
|
content_type = StringField()
|
||||||
blob = BinaryField()
|
blob = BinaryField()
|
||||||
|
|
||||||
BLOB = b"\xe6\x00\xc4\xff\x07"
|
BLOB = six.b("\xe6\x00\xc4\xff\x07")
|
||||||
MIME_TYPE = "application/octet-stream"
|
MIME_TYPE = "application/octet-stream"
|
||||||
|
|
||||||
Attachment.drop_collection()
|
Attachment.drop_collection()
|
||||||
@ -29,10 +32,11 @@ class TestBinaryField(MongoDBTestCase):
|
|||||||
|
|
||||||
attachment_1 = Attachment.objects().first()
|
attachment_1 = Attachment.objects().first()
|
||||||
assert MIME_TYPE == attachment_1.content_type
|
assert MIME_TYPE == attachment_1.content_type
|
||||||
assert BLOB == bytes(attachment_1.blob)
|
assert BLOB == six.binary_type(attachment_1.blob)
|
||||||
|
|
||||||
def test_validation_succeeds(self):
|
def test_validation_succeeds(self):
|
||||||
"""Ensure that valid values can be assigned to binary fields."""
|
"""Ensure that valid values can be assigned to binary fields.
|
||||||
|
"""
|
||||||
|
|
||||||
class AttachmentRequired(Document):
|
class AttachmentRequired(Document):
|
||||||
blob = BinaryField(required=True)
|
blob = BinaryField(required=True)
|
||||||
@ -43,11 +47,11 @@ class TestBinaryField(MongoDBTestCase):
|
|||||||
attachment_required = AttachmentRequired()
|
attachment_required = AttachmentRequired()
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
attachment_required.validate()
|
attachment_required.validate()
|
||||||
attachment_required.blob = Binary(b"\xe6\x00\xc4\xff\x07")
|
attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07"))
|
||||||
attachment_required.validate()
|
attachment_required.validate()
|
||||||
|
|
||||||
_5_BYTES = b"\xe6\x00\xc4\xff\x07"
|
_5_BYTES = six.b("\xe6\x00\xc4\xff\x07")
|
||||||
_4_BYTES = b"\xe6\x00\xc4\xff"
|
_4_BYTES = six.b("\xe6\x00\xc4\xff")
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
AttachmentSizeLimit(blob=_5_BYTES).validate()
|
AttachmentSizeLimit(blob=_5_BYTES).validate()
|
||||||
AttachmentSizeLimit(blob=_4_BYTES).validate()
|
AttachmentSizeLimit(blob=_4_BYTES).validate()
|
||||||
@ -58,7 +62,7 @@ class TestBinaryField(MongoDBTestCase):
|
|||||||
class Attachment(Document):
|
class Attachment(Document):
|
||||||
blob = BinaryField()
|
blob = BinaryField()
|
||||||
|
|
||||||
for invalid_data in (2, "Im_a_unicode", ["some_str"]):
|
for invalid_data in (2, u"Im_a_unicode", ["some_str"]):
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
Attachment(blob=invalid_data).validate()
|
Attachment(blob=invalid_data).validate()
|
||||||
|
|
||||||
@ -119,7 +123,10 @@ class TestBinaryField(MongoDBTestCase):
|
|||||||
upsert=True, new=True, set__bin_field=BIN_VALUE
|
upsert=True, new=True, set__bin_field=BIN_VALUE
|
||||||
)
|
)
|
||||||
assert doc.some_field == "test"
|
assert doc.some_field == "test"
|
||||||
assert doc.bin_field == BIN_VALUE
|
if six.PY3:
|
||||||
|
assert doc.bin_field == BIN_VALUE
|
||||||
|
else:
|
||||||
|
assert doc.bin_field == Binary(BIN_VALUE)
|
||||||
|
|
||||||
def test_update_one(self):
|
def test_update_one(self):
|
||||||
"""Ensures no regression of bug #1127"""
|
"""Ensures no regression of bug #1127"""
|
||||||
@ -129,7 +136,7 @@ class TestBinaryField(MongoDBTestCase):
|
|||||||
|
|
||||||
MyDocument.drop_collection()
|
MyDocument.drop_collection()
|
||||||
|
|
||||||
bin_data = b"\xe6\x00\xc4\xff\x07"
|
bin_data = six.b("\xe6\x00\xc4\xff\x07")
|
||||||
doc = MyDocument(bin_field=bin_data).save()
|
doc = MyDocument(bin_field=bin_data).save()
|
||||||
|
|
||||||
n_updated = MyDocument.objects(bin_field=bin_data).update_one(
|
n_updated = MyDocument.objects(bin_field=bin_data).update_one(
|
||||||
@ -137,4 +144,7 @@ class TestBinaryField(MongoDBTestCase):
|
|||||||
)
|
)
|
||||||
assert n_updated == 1
|
assert n_updated == 1
|
||||||
fetched = MyDocument.objects.with_id(doc.id)
|
fetched = MyDocument.objects.with_id(doc.id)
|
||||||
assert fetched.bin_field == BIN_VALUE
|
if six.PY3:
|
||||||
|
assert fetched.bin_field == BIN_VALUE
|
||||||
|
else:
|
||||||
|
assert fetched.bin_field == Binary(BIN_VALUE)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
@ -13,17 +14,6 @@ class TestBooleanField(MongoDBTestCase):
|
|||||||
person.save()
|
person.save()
|
||||||
assert get_as_pymongo(person) == {"_id": person.id, "admin": True}
|
assert get_as_pymongo(person) == {"_id": person.id, "admin": True}
|
||||||
|
|
||||||
def test_construction_does_not_fail_uncastable_value(self):
|
|
||||||
class BoolFail:
|
|
||||||
def __bool__(self):
|
|
||||||
return "bogus"
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
admin = BooleanField()
|
|
||||||
|
|
||||||
person = Person(admin=BoolFail())
|
|
||||||
person.admin == "bogus"
|
|
||||||
|
|
||||||
def test_validation(self):
|
def test_validation(self):
|
||||||
"""Ensure that invalid values cannot be assigned to boolean
|
"""Ensure that invalid values cannot be assigned to boolean
|
||||||
fields.
|
fields.
|
||||||
|
@ -1,29 +1,13 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from mongoengine import (
|
from mongoengine import *
|
||||||
CachedReferenceField,
|
|
||||||
DecimalField,
|
|
||||||
Document,
|
|
||||||
EmbeddedDocument,
|
|
||||||
EmbeddedDocumentField,
|
|
||||||
InvalidDocumentError,
|
|
||||||
ListField,
|
|
||||||
ReferenceField,
|
|
||||||
StringField,
|
|
||||||
ValidationError,
|
|
||||||
)
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestCachedReferenceField(MongoDBTestCase):
|
class TestCachedReferenceField(MongoDBTestCase):
|
||||||
def test_constructor_fail_bad_document_type(self):
|
|
||||||
with pytest.raises(
|
|
||||||
ValidationError, match="must be a document class or a string"
|
|
||||||
):
|
|
||||||
CachedReferenceField(document_type=0)
|
|
||||||
|
|
||||||
def test_get_and_save(self):
|
def test_get_and_save(self):
|
||||||
"""
|
"""
|
||||||
Tests #1047: CachedReferenceField creates DBRefs on to_python,
|
Tests #1047: CachedReferenceField creates DBRefs on to_python,
|
||||||
@ -207,9 +191,9 @@ class TestCachedReferenceField(MongoDBTestCase):
|
|||||||
|
|
||||||
assert dict(a2.to_mongo()) == {
|
assert dict(a2.to_mongo()) == {
|
||||||
"_id": a2.pk,
|
"_id": a2.pk,
|
||||||
"name": "Wilson Junior",
|
"name": u"Wilson Junior",
|
||||||
"tp": "pf",
|
"tp": u"pf",
|
||||||
"father": {"_id": a1.pk, "tp": "pj"},
|
"father": {"_id": a1.pk, "tp": u"pj"},
|
||||||
}
|
}
|
||||||
|
|
||||||
assert Person.objects(father=a1)._query == {"father._id": a1.pk}
|
assert Person.objects(father=a1)._query == {"father._id": a1.pk}
|
||||||
@ -221,9 +205,9 @@ class TestCachedReferenceField(MongoDBTestCase):
|
|||||||
a2.reload()
|
a2.reload()
|
||||||
assert dict(a2.to_mongo()) == {
|
assert dict(a2.to_mongo()) == {
|
||||||
"_id": a2.pk,
|
"_id": a2.pk,
|
||||||
"name": "Wilson Junior",
|
"name": u"Wilson Junior",
|
||||||
"tp": "pf",
|
"tp": u"pf",
|
||||||
"father": {"_id": a1.pk, "tp": "pf"},
|
"father": {"_id": a1.pk, "tp": u"pf"},
|
||||||
}
|
}
|
||||||
|
|
||||||
def test_cached_reference_fields_on_embedded_documents(self):
|
def test_cached_reference_fields_on_embedded_documents(self):
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import datetime
|
import datetime
|
||||||
import itertools
|
import itertools
|
||||||
import math
|
import math
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -59,13 +59,13 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
|||||||
assert log == log1
|
assert log == log1
|
||||||
|
|
||||||
# Test string padding
|
# Test string padding
|
||||||
microsecond = map(int, (math.pow(10, x) for x in range(6)))
|
microsecond = map(int, [math.pow(10, x) for x in range(6)])
|
||||||
mm = dd = hh = ii = ss = [1, 10]
|
mm = dd = hh = ii = ss = [1, 10]
|
||||||
|
|
||||||
for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond):
|
for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond):
|
||||||
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"]
|
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"]
|
||||||
assert (
|
assert (
|
||||||
re.match(r"^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored)
|
re.match("^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored)
|
||||||
is not None
|
is not None
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -74,7 +74,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
|||||||
"date_with_dots"
|
"date_with_dots"
|
||||||
]
|
]
|
||||||
assert (
|
assert (
|
||||||
re.match(r"^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None
|
re.match("^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_complexdatetime_usage(self):
|
def test_complexdatetime_usage(self):
|
||||||
@ -191,18 +191,3 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
|||||||
|
|
||||||
fetched_log = Log.objects.with_id(log.id)
|
fetched_log = Log.objects.with_id(log.id)
|
||||||
assert fetched_log.timestamp >= NOW
|
assert fetched_log.timestamp >= NOW
|
||||||
|
|
||||||
def test_setting_bad_value_does_not_raise_unless_validate_is_called(self):
|
|
||||||
# test regression of #2253
|
|
||||||
|
|
||||||
class Log(Document):
|
|
||||||
timestamp = ComplexDateTimeField()
|
|
||||||
|
|
||||||
Log.drop_collection()
|
|
||||||
|
|
||||||
log = Log(timestamp="garbage")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
log.save()
|
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import six
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import dateutil
|
import dateutil
|
||||||
@ -87,6 +89,17 @@ class TestDateField(MongoDBTestCase):
|
|||||||
assert log.date == d1.date()
|
assert log.date == d1.date()
|
||||||
assert log.date == d2.date()
|
assert log.date == d2.date()
|
||||||
|
|
||||||
|
if not six.PY3:
|
||||||
|
# Pre UTC dates microseconds below 1000 are dropped
|
||||||
|
# This does not seem to be true in PY3
|
||||||
|
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||||
|
d2 = datetime.datetime(1969, 12, 31, 23, 59, 59)
|
||||||
|
log.date = d1
|
||||||
|
log.save()
|
||||||
|
log.reload()
|
||||||
|
assert log.date == d1.date()
|
||||||
|
assert log.date == d2.date()
|
||||||
|
|
||||||
def test_regular_usage(self):
|
def test_regular_usage(self):
|
||||||
"""Tests for regular datetime fields"""
|
"""Tests for regular datetime fields"""
|
||||||
|
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import six
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import dateutil
|
import dateutil
|
||||||
@ -9,6 +11,7 @@ except ImportError:
|
|||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine import connection
|
from mongoengine import connection
|
||||||
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -95,6 +98,17 @@ class TestDateTimeField(MongoDBTestCase):
|
|||||||
assert log.date != d1
|
assert log.date != d1
|
||||||
assert log.date == d2
|
assert log.date == d2
|
||||||
|
|
||||||
|
if not six.PY3:
|
||||||
|
# Pre UTC dates microseconds below 1000 are dropped
|
||||||
|
# This does not seem to be true in PY3
|
||||||
|
d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||||
|
d2 = dt.datetime(1969, 12, 31, 23, 59, 59)
|
||||||
|
log.date = d1
|
||||||
|
log.save()
|
||||||
|
log.reload()
|
||||||
|
assert log.date != d1
|
||||||
|
assert log.date == d2
|
||||||
|
|
||||||
def test_regular_usage(self):
|
def test_regular_usage(self):
|
||||||
"""Tests for regular datetime fields"""
|
"""Tests for regular datetime fields"""
|
||||||
|
|
||||||
@ -199,7 +213,7 @@ class TestDateTimeField(MongoDBTestCase):
|
|||||||
# make sure that passing a parsable datetime works
|
# make sure that passing a parsable datetime works
|
||||||
dtd = DTDoc()
|
dtd = DTDoc()
|
||||||
dtd.date = date_str
|
dtd.date = date_str
|
||||||
assert isinstance(dtd.date, str)
|
assert isinstance(dtd.date, six.string_types)
|
||||||
dtd.save()
|
dtd.save()
|
||||||
dtd.reload()
|
dtd.reload()
|
||||||
|
|
||||||
|
@ -1,12 +1,62 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from mongoengine import DecimalField, Document, ValidationError
|
from mongoengine import *
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestDecimalField(MongoDBTestCase):
|
class TestDecimalField(MongoDBTestCase):
|
||||||
|
def test_validation(self):
|
||||||
|
"""Ensure that invalid values cannot be assigned to decimal fields.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5"))
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(height=Decimal("1.89")).save()
|
||||||
|
person = Person.objects.first()
|
||||||
|
assert person.height == Decimal("1.89")
|
||||||
|
|
||||||
|
person.height = "2.0"
|
||||||
|
person.save()
|
||||||
|
person.height = 0.01
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
person.validate()
|
||||||
|
person.height = Decimal("0.01")
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
person.validate()
|
||||||
|
person.height = Decimal("4.0")
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
person.validate()
|
||||||
|
person.height = "something invalid"
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
person.validate()
|
||||||
|
|
||||||
|
person_2 = Person(height="something invalid")
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
person_2.validate()
|
||||||
|
|
||||||
|
def test_comparison(self):
|
||||||
|
class Person(Document):
|
||||||
|
money = DecimalField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(money=6).save()
|
||||||
|
Person(money=7).save()
|
||||||
|
Person(money=8).save()
|
||||||
|
Person(money=10).save()
|
||||||
|
|
||||||
|
assert 2 == Person.objects(money__gt=Decimal("7")).count()
|
||||||
|
assert 2 == Person.objects(money__gt=7).count()
|
||||||
|
assert 2 == Person.objects(money__gt="7").count()
|
||||||
|
|
||||||
|
assert 3 == Person.objects(money__gte="7").count()
|
||||||
|
|
||||||
def test_storage(self):
|
def test_storage(self):
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
float_value = DecimalField(precision=4)
|
float_value = DecimalField(precision=4)
|
||||||
@ -58,83 +108,3 @@ class TestDecimalField(MongoDBTestCase):
|
|||||||
for field_name in ["float_value", "string_value"]:
|
for field_name in ["float_value", "string_value"]:
|
||||||
actual = list(Person.objects().scalar(field_name))
|
actual = list(Person.objects().scalar(field_name))
|
||||||
assert expected == actual
|
assert expected == actual
|
||||||
|
|
||||||
def test_save_none(self):
|
|
||||||
class Person(Document):
|
|
||||||
value = DecimalField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
person = Person(value=None)
|
|
||||||
assert person.value is None
|
|
||||||
person.save()
|
|
||||||
fetched_person = Person.objects.first()
|
|
||||||
fetched_person.value is None
|
|
||||||
|
|
||||||
def test_validation(self):
|
|
||||||
"""Ensure that invalid values cannot be assigned to decimal fields."""
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5"))
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
Person(height=Decimal("1.89")).save()
|
|
||||||
person = Person.objects.first()
|
|
||||||
assert person.height == Decimal("1.89")
|
|
||||||
|
|
||||||
person.height = "2.0"
|
|
||||||
person.save()
|
|
||||||
person.height = 0.01
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
person.validate()
|
|
||||||
person.height = Decimal("0.01")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
person.validate()
|
|
||||||
person.height = Decimal("4.0")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
person.validate()
|
|
||||||
person.height = "something invalid"
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
person_2 = Person(height="something invalid")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
person_2.validate()
|
|
||||||
|
|
||||||
def test_comparison(self):
|
|
||||||
class Person(Document):
|
|
||||||
money = DecimalField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
Person(money=6).save()
|
|
||||||
Person(money=7).save()
|
|
||||||
Person(money=8).save()
|
|
||||||
Person(money=10).save()
|
|
||||||
|
|
||||||
assert 2 == Person.objects(money__gt=Decimal("7")).count()
|
|
||||||
assert 2 == Person.objects(money__gt=7).count()
|
|
||||||
assert 2 == Person.objects(money__gt="7").count()
|
|
||||||
|
|
||||||
assert 3 == Person.objects(money__gte="7").count()
|
|
||||||
|
|
||||||
def test_precision_0(self):
|
|
||||||
"""prevent regression of a bug that was raising an exception when using precision=0"""
|
|
||||||
|
|
||||||
class TestDoc(Document):
|
|
||||||
d = DecimalField(precision=0)
|
|
||||||
|
|
||||||
TestDoc.drop_collection()
|
|
||||||
|
|
||||||
td = TestDoc(d=Decimal("12.00032678131263"))
|
|
||||||
assert td.d == Decimal("12")
|
|
||||||
|
|
||||||
def test_precision_negative_raise(self):
|
|
||||||
"""prevent regression of a bug that was raising an exception when using precision=0"""
|
|
||||||
with pytest.raises(
|
|
||||||
ValidationError, match="precision must be a positive integer"
|
|
||||||
):
|
|
||||||
|
|
||||||
class TestDoc(Document):
|
|
||||||
dneg = DecimalField(precision=-1)
|
|
||||||
|
@ -1,12 +1,10 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import pytest
|
import pytest
|
||||||
from bson import InvalidDocument
|
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.base import BaseDict
|
from mongoengine.base import BaseDict
|
||||||
from mongoengine.mongodb_support import (
|
from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version
|
||||||
MONGODB_36,
|
|
||||||
get_mongodb_version,
|
|
||||||
)
|
|
||||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||||
|
|
||||||
|
|
||||||
@ -21,24 +19,22 @@ class TestDictField(MongoDBTestCase):
|
|||||||
post = BlogPost(info=info).save()
|
post = BlogPost(info=info).save()
|
||||||
assert get_as_pymongo(post) == {"_id": post.id, "info": info}
|
assert get_as_pymongo(post) == {"_id": post.id, "info": info}
|
||||||
|
|
||||||
def test_validate_invalid_type(self):
|
def test_general_things(self):
|
||||||
class BlogPost(Document):
|
"""Ensure that dict types work as expected."""
|
||||||
info = DictField()
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
invalid_infos = ["my post", ["test", "test"], {1: "test"}]
|
|
||||||
for invalid_info in invalid_infos:
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
BlogPost(info=invalid_info).validate()
|
|
||||||
|
|
||||||
def test_keys_with_dots_or_dollars(self):
|
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
info = DictField()
|
info = DictField()
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
post = BlogPost()
|
post = BlogPost()
|
||||||
|
post.info = "my post"
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
post.validate()
|
||||||
|
|
||||||
|
post.info = ["test", "test"]
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
post.validate()
|
||||||
|
|
||||||
post.info = {"$title": "test"}
|
post.info = {"$title": "test"}
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
@ -52,34 +48,25 @@ class TestDictField(MongoDBTestCase):
|
|||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
post.validate()
|
post.validate()
|
||||||
|
|
||||||
|
post.info = {1: "test"}
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
post.validate()
|
||||||
|
|
||||||
post.info = {"nested": {"the.title": "test"}}
|
post.info = {"nested": {"the.title": "test"}}
|
||||||
if get_mongodb_version() < MONGODB_36:
|
if get_mongodb_version() < MONGODB_36:
|
||||||
# MongoDB < 3.6 rejects dots
|
with pytest.raises(ValidationError):
|
||||||
# To avoid checking the mongodb version from the DictField class
|
post.validate()
|
||||||
# we rely on MongoDB to reject the data during the save
|
|
||||||
post.validate()
|
|
||||||
with pytest.raises(InvalidDocument):
|
|
||||||
post.save()
|
|
||||||
else:
|
else:
|
||||||
post.validate()
|
post.validate()
|
||||||
|
|
||||||
post.info = {"dollar_and_dot": {"te$st.test": "test"}}
|
post.info = {"dollar_and_dot": {"te$st.test": "test"}}
|
||||||
if get_mongodb_version() < MONGODB_36:
|
if get_mongodb_version() < MONGODB_36:
|
||||||
post.validate()
|
with pytest.raises(ValidationError):
|
||||||
with pytest.raises(InvalidDocument):
|
post.validate()
|
||||||
post.save()
|
|
||||||
else:
|
else:
|
||||||
post.validate()
|
post.validate()
|
||||||
|
|
||||||
def test_general_things(self):
|
post.info = {"title": "test"}
|
||||||
"""Ensure that dict types work as expected."""
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
info = DictField()
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
post = BlogPost(info={"title": "test"})
|
|
||||||
post.save()
|
post.save()
|
||||||
|
|
||||||
post = BlogPost()
|
post = BlogPost()
|
||||||
@ -114,7 +101,7 @@ class TestDictField(MongoDBTestCase):
|
|||||||
post.info.setdefault("authors", [])
|
post.info.setdefault("authors", [])
|
||||||
post.save()
|
post.save()
|
||||||
post.reload()
|
post.reload()
|
||||||
assert post.info["authors"] == []
|
assert [] == post.info["authors"]
|
||||||
|
|
||||||
def test_dictfield_dump_document(self):
|
def test_dictfield_dump_document(self):
|
||||||
"""Ensure a DictField can handle another document's dump."""
|
"""Ensure a DictField can handle another document's dump."""
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from mongoengine import Document, EmailField, ValidationError
|
from mongoengine import *
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -28,11 +31,11 @@ class TestEmailField(MongoDBTestCase):
|
|||||||
user.validate()
|
user.validate()
|
||||||
|
|
||||||
# unicode domain
|
# unicode domain
|
||||||
user = User(email="user@пример.рф")
|
user = User(email=u"user@пример.рф")
|
||||||
user.validate()
|
user.validate()
|
||||||
|
|
||||||
# invalid unicode domain
|
# invalid unicode domain
|
||||||
user = User(email="user@пример")
|
user = User(email=u"user@пример")
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
user.validate()
|
user.validate()
|
||||||
|
|
||||||
@ -46,7 +49,7 @@ class TestEmailField(MongoDBTestCase):
|
|||||||
email = EmailField()
|
email = EmailField()
|
||||||
|
|
||||||
# unicode user shouldn't validate by default...
|
# unicode user shouldn't validate by default...
|
||||||
user = User(email="Dörte@Sörensen.example.com")
|
user = User(email=u"Dörte@Sörensen.example.com")
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
user.validate()
|
user.validate()
|
||||||
|
|
||||||
@ -54,7 +57,7 @@ class TestEmailField(MongoDBTestCase):
|
|||||||
class User(Document):
|
class User(Document):
|
||||||
email = EmailField(allow_utf8_user=True)
|
email = EmailField(allow_utf8_user=True)
|
||||||
|
|
||||||
user = User(email="Dörte@Sörensen.example.com")
|
user = User(email=u"Dörte@Sörensen.example.com")
|
||||||
user.validate()
|
user.validate()
|
||||||
|
|
||||||
def test_email_field_domain_whitelist(self):
|
def test_email_field_domain_whitelist(self):
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
from copy import deepcopy
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from bson import ObjectId
|
|
||||||
|
|
||||||
from mongoengine import (
|
from mongoengine import (
|
||||||
Document,
|
Document,
|
||||||
@ -12,10 +10,10 @@ from mongoengine import (
|
|||||||
InvalidQueryError,
|
InvalidQueryError,
|
||||||
ListField,
|
ListField,
|
||||||
LookUpError,
|
LookUpError,
|
||||||
MapField,
|
|
||||||
StringField,
|
StringField,
|
||||||
ValidationError,
|
ValidationError,
|
||||||
)
|
)
|
||||||
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -77,7 +75,7 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
|||||||
# Test non exiting attribute
|
# Test non exiting attribute
|
||||||
with pytest.raises(InvalidQueryError) as exc_info:
|
with pytest.raises(InvalidQueryError) as exc_info:
|
||||||
Person.objects(settings__notexist="bar").first()
|
Person.objects(settings__notexist="bar").first()
|
||||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
assert unicode(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||||
|
|
||||||
with pytest.raises(LookUpError):
|
with pytest.raises(LookUpError):
|
||||||
Person.objects.only("settings.notexist")
|
Person.objects.only("settings.notexist")
|
||||||
@ -113,7 +111,7 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
|||||||
# Test non exiting attribute
|
# Test non exiting attribute
|
||||||
with pytest.raises(InvalidQueryError) as exc_info:
|
with pytest.raises(InvalidQueryError) as exc_info:
|
||||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
assert unicode(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||||
|
|
||||||
# Test existing attribute
|
# Test existing attribute
|
||||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||||
@ -321,7 +319,7 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
|||||||
# Test non exiting attribute
|
# Test non exiting attribute
|
||||||
with pytest.raises(InvalidQueryError) as exc_info:
|
with pytest.raises(InvalidQueryError) as exc_info:
|
||||||
Person.objects(settings__notexist="bar").first()
|
Person.objects(settings__notexist="bar").first()
|
||||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
assert unicode(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||||
|
|
||||||
with pytest.raises(LookUpError):
|
with pytest.raises(LookUpError):
|
||||||
Person.objects.only("settings.notexist")
|
Person.objects.only("settings.notexist")
|
||||||
@ -349,35 +347,8 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
|||||||
# Test non exiting attribute
|
# Test non exiting attribute
|
||||||
with pytest.raises(InvalidQueryError) as exc_info:
|
with pytest.raises(InvalidQueryError) as exc_info:
|
||||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
assert unicode(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||||
|
|
||||||
# Test existing attribute
|
# Test existing attribute
|
||||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||||
assert Person.objects(settings__sub_foo="subfoo").first().id == p.id
|
assert Person.objects(settings__sub_foo="subfoo").first().id == p.id
|
||||||
|
|
||||||
def test_deepcopy_set__instance(self):
|
|
||||||
"""Ensure that the _instance attribute on EmbeddedDocument exists after a deepcopy"""
|
|
||||||
|
|
||||||
class Wallet(EmbeddedDocument):
|
|
||||||
money = IntField()
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
wallet = EmbeddedDocumentField(Wallet)
|
|
||||||
wallet_map = MapField(EmbeddedDocumentField(Wallet))
|
|
||||||
|
|
||||||
# Test on fresh EmbeddedDoc
|
|
||||||
emb_doc = Wallet(money=1)
|
|
||||||
assert emb_doc._instance is None
|
|
||||||
copied_emb_doc = deepcopy(emb_doc)
|
|
||||||
assert copied_emb_doc._instance is None
|
|
||||||
|
|
||||||
# Test on attached EmbeddedDoc
|
|
||||||
doc = Person(
|
|
||||||
id=ObjectId(), wallet=Wallet(money=2), wallet_map={"test": Wallet(money=2)}
|
|
||||||
)
|
|
||||||
assert doc.wallet._instance == doc
|
|
||||||
copied_emb_doc = deepcopy(doc.wallet)
|
|
||||||
assert copied_emb_doc._instance is None
|
|
||||||
|
|
||||||
copied_map_emb_doc = deepcopy(doc.wallet_map)
|
|
||||||
assert copied_map_emb_doc["test"]._instance is None
|
|
||||||
|
@ -1,145 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from bson import InvalidDocument
|
|
||||||
|
|
||||||
from mongoengine import Document, EnumField, ValidationError
|
|
||||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
|
||||||
|
|
||||||
|
|
||||||
class Status(Enum):
|
|
||||||
NEW = "new"
|
|
||||||
DONE = "done"
|
|
||||||
|
|
||||||
|
|
||||||
class Color(Enum):
|
|
||||||
RED = 1
|
|
||||||
BLUE = 2
|
|
||||||
|
|
||||||
|
|
||||||
class ModelWithEnum(Document):
|
|
||||||
status = EnumField(Status)
|
|
||||||
|
|
||||||
|
|
||||||
class TestStringEnumField(MongoDBTestCase):
|
|
||||||
def test_storage(self):
|
|
||||||
model = ModelWithEnum(status=Status.NEW).save()
|
|
||||||
assert get_as_pymongo(model) == {"_id": model.id, "status": "new"}
|
|
||||||
|
|
||||||
def test_set_enum(self):
|
|
||||||
ModelWithEnum.drop_collection()
|
|
||||||
ModelWithEnum(status=Status.NEW).save()
|
|
||||||
assert ModelWithEnum.objects(status=Status.NEW).count() == 1
|
|
||||||
assert ModelWithEnum.objects.first().status == Status.NEW
|
|
||||||
|
|
||||||
def test_set_by_value(self):
|
|
||||||
ModelWithEnum.drop_collection()
|
|
||||||
ModelWithEnum(status="new").save()
|
|
||||||
assert ModelWithEnum.objects.first().status == Status.NEW
|
|
||||||
|
|
||||||
def test_filter(self):
|
|
||||||
ModelWithEnum.drop_collection()
|
|
||||||
ModelWithEnum(status="new").save()
|
|
||||||
assert ModelWithEnum.objects(status="new").count() == 1
|
|
||||||
assert ModelWithEnum.objects(status=Status.NEW).count() == 1
|
|
||||||
assert ModelWithEnum.objects(status=Status.DONE).count() == 0
|
|
||||||
|
|
||||||
def test_change_value(self):
|
|
||||||
m = ModelWithEnum(status="new")
|
|
||||||
m.status = Status.DONE
|
|
||||||
m.save()
|
|
||||||
assert m.status == Status.DONE
|
|
||||||
|
|
||||||
m.status = "wrong"
|
|
||||||
assert m.status == "wrong"
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
m.validate()
|
|
||||||
|
|
||||||
def test_set_default(self):
|
|
||||||
class ModelWithDefault(Document):
|
|
||||||
status = EnumField(Status, default=Status.DONE)
|
|
||||||
|
|
||||||
m = ModelWithDefault().save()
|
|
||||||
assert m.status == Status.DONE
|
|
||||||
|
|
||||||
def test_enum_field_can_be_empty(self):
|
|
||||||
ModelWithEnum.drop_collection()
|
|
||||||
m = ModelWithEnum().save()
|
|
||||||
assert m.status is None
|
|
||||||
assert ModelWithEnum.objects()[0].status is None
|
|
||||||
assert ModelWithEnum.objects(status=None).count() == 1
|
|
||||||
|
|
||||||
def test_set_none_explicitly(self):
|
|
||||||
ModelWithEnum.drop_collection()
|
|
||||||
ModelWithEnum(status=None).save()
|
|
||||||
assert ModelWithEnum.objects.first().status is None
|
|
||||||
|
|
||||||
def test_cannot_create_model_with_wrong_enum_value(self):
|
|
||||||
m = ModelWithEnum(status="wrong_one")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
m.validate()
|
|
||||||
|
|
||||||
def test_partial_choices(self):
|
|
||||||
partial = [Status.DONE]
|
|
||||||
enum_field = EnumField(Status, choices=partial)
|
|
||||||
assert enum_field.choices == partial
|
|
||||||
|
|
||||||
class FancyDoc(Document):
|
|
||||||
z = enum_field
|
|
||||||
|
|
||||||
FancyDoc(z=Status.DONE).validate()
|
|
||||||
with pytest.raises(
|
|
||||||
ValidationError, match=r"Value must be one of .*Status.DONE"
|
|
||||||
):
|
|
||||||
FancyDoc(z=Status.NEW).validate()
|
|
||||||
|
|
||||||
def test_wrong_choices(self):
|
|
||||||
with pytest.raises(ValueError, match="Invalid choices"):
|
|
||||||
EnumField(Status, choices=["my", "custom", "options"])
|
|
||||||
with pytest.raises(ValueError, match="Invalid choices"):
|
|
||||||
EnumField(Status, choices=[Color.RED])
|
|
||||||
with pytest.raises(ValueError, match="Invalid choices"):
|
|
||||||
EnumField(Status, choices=[Status.DONE, Color.RED])
|
|
||||||
|
|
||||||
|
|
||||||
class ModelWithColor(Document):
|
|
||||||
color = EnumField(Color, default=Color.RED)
|
|
||||||
|
|
||||||
|
|
||||||
class TestIntEnumField(MongoDBTestCase):
|
|
||||||
def test_enum_with_int(self):
|
|
||||||
ModelWithColor.drop_collection()
|
|
||||||
m = ModelWithColor().save()
|
|
||||||
assert m.color == Color.RED
|
|
||||||
assert ModelWithColor.objects(color=Color.RED).count() == 1
|
|
||||||
assert ModelWithColor.objects(color=1).count() == 1
|
|
||||||
assert ModelWithColor.objects(color=2).count() == 0
|
|
||||||
|
|
||||||
def test_create_int_enum_by_value(self):
|
|
||||||
model = ModelWithColor(color=2).save()
|
|
||||||
assert model.color == Color.BLUE
|
|
||||||
|
|
||||||
def test_storage_enum_with_int(self):
|
|
||||||
model = ModelWithColor(color=Color.BLUE).save()
|
|
||||||
assert get_as_pymongo(model) == {"_id": model.id, "color": 2}
|
|
||||||
|
|
||||||
def test_validate_model(self):
|
|
||||||
with pytest.raises(ValidationError, match="Value must be one of"):
|
|
||||||
ModelWithColor(color=3).validate()
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError, match="Value must be one of"):
|
|
||||||
ModelWithColor(color="wrong_type").validate()
|
|
||||||
|
|
||||||
|
|
||||||
class TestFunkyEnumField(MongoDBTestCase):
|
|
||||||
def test_enum_incompatible_bson_type_fails_during_save(self):
|
|
||||||
class FunkyColor(Enum):
|
|
||||||
YELLOW = object()
|
|
||||||
|
|
||||||
class ModelWithFunkyColor(Document):
|
|
||||||
color = EnumField(FunkyColor)
|
|
||||||
|
|
||||||
m = ModelWithFunkyColor(color=FunkyColor.YELLOW)
|
|
||||||
|
|
||||||
with pytest.raises(InvalidDocument, match="[cC]annot encode object"):
|
|
||||||
m.save()
|
|
@ -1,8 +1,9 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import datetime
|
import datetime
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
from bson import DBRef, ObjectId, SON
|
||||||
import pytest
|
import pytest
|
||||||
from bson import SON, DBRef, ObjectId
|
|
||||||
|
|
||||||
from mongoengine import (
|
from mongoengine import (
|
||||||
BooleanField,
|
BooleanField,
|
||||||
@ -34,12 +35,9 @@ from mongoengine import (
|
|||||||
StringField,
|
StringField,
|
||||||
ValidationError,
|
ValidationError,
|
||||||
)
|
)
|
||||||
from mongoengine.base import (
|
from mongoengine.base import BaseField, EmbeddedDocumentList, _document_registry
|
||||||
BaseField,
|
|
||||||
EmbeddedDocumentList,
|
|
||||||
_document_registry,
|
|
||||||
)
|
|
||||||
from mongoengine.errors import DeprecatedError
|
from mongoengine.errors import DeprecatedError
|
||||||
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -295,7 +293,7 @@ class TestField(MongoDBTestCase):
|
|||||||
HandleNoneFields.drop_collection()
|
HandleNoneFields.drop_collection()
|
||||||
|
|
||||||
doc = HandleNoneFields()
|
doc = HandleNoneFields()
|
||||||
doc.str_fld = "spam ham egg"
|
doc.str_fld = u"spam ham egg"
|
||||||
doc.int_fld = 42
|
doc.int_fld = 42
|
||||||
doc.flt_fld = 4.2
|
doc.flt_fld = 4.2
|
||||||
doc.com_dt_fld = datetime.datetime.utcnow()
|
doc.com_dt_fld = datetime.datetime.utcnow()
|
||||||
@ -309,7 +307,7 @@ class TestField(MongoDBTestCase):
|
|||||||
)
|
)
|
||||||
assert res == 1
|
assert res == 1
|
||||||
|
|
||||||
# Retrieve data from db and verify it.
|
# Retrive data from db and verify it.
|
||||||
ret = HandleNoneFields.objects.all()[0]
|
ret = HandleNoneFields.objects.all()[0]
|
||||||
assert ret.str_fld is None
|
assert ret.str_fld is None
|
||||||
assert ret.int_fld is None
|
assert ret.int_fld is None
|
||||||
@ -331,19 +329,19 @@ class TestField(MongoDBTestCase):
|
|||||||
HandleNoneFields.drop_collection()
|
HandleNoneFields.drop_collection()
|
||||||
|
|
||||||
doc = HandleNoneFields()
|
doc = HandleNoneFields()
|
||||||
doc.str_fld = "spam ham egg"
|
doc.str_fld = u"spam ham egg"
|
||||||
doc.int_fld = 42
|
doc.int_fld = 42
|
||||||
doc.flt_fld = 4.2
|
doc.flt_fld = 4.2
|
||||||
doc.comp_dt_fld = datetime.datetime.utcnow()
|
doc.comp_dt_fld = datetime.datetime.utcnow()
|
||||||
doc.save()
|
doc.save()
|
||||||
|
|
||||||
# Unset all the fields
|
# Unset all the fields
|
||||||
HandleNoneFields._get_collection().update_one(
|
HandleNoneFields._get_collection().update(
|
||||||
{"_id": doc.id},
|
{"_id": doc.id},
|
||||||
{"$unset": {"str_fld": 1, "int_fld": 1, "flt_fld": 1, "comp_dt_fld": 1}},
|
{"$unset": {"str_fld": 1, "int_fld": 1, "flt_fld": 1, "comp_dt_fld": 1}},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Retrieve data from db and verify it.
|
# Retrive data from db and verify it.
|
||||||
ret = HandleNoneFields.objects.first()
|
ret = HandleNoneFields.objects.first()
|
||||||
assert ret.str_fld is None
|
assert ret.str_fld is None
|
||||||
assert ret.int_fld is None
|
assert ret.int_fld is None
|
||||||
@ -377,6 +375,34 @@ class TestField(MongoDBTestCase):
|
|||||||
person.id = str(ObjectId())
|
person.id = str(ObjectId())
|
||||||
person.validate()
|
person.validate()
|
||||||
|
|
||||||
|
def test_string_validation(self):
|
||||||
|
"""Ensure that invalid values cannot be assigned to string fields."""
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField(max_length=20)
|
||||||
|
userid = StringField(r"[0-9a-z_]+$")
|
||||||
|
|
||||||
|
person = Person(name=34)
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
person.validate()
|
||||||
|
|
||||||
|
# Test regex validation on userid
|
||||||
|
person = Person(userid="test.User")
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
person.validate()
|
||||||
|
|
||||||
|
person.userid = "test_user"
|
||||||
|
assert person.userid == "test_user"
|
||||||
|
person.validate()
|
||||||
|
|
||||||
|
# Test max length validation on name
|
||||||
|
person = Person(name="Name that is more than twenty characters")
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
person.validate()
|
||||||
|
|
||||||
|
person.name = "Shorter name"
|
||||||
|
person.validate()
|
||||||
|
|
||||||
def test_db_field_validation(self):
|
def test_db_field_validation(self):
|
||||||
"""Ensure that db_field doesn't accept invalid values."""
|
"""Ensure that db_field doesn't accept invalid values."""
|
||||||
|
|
||||||
@ -401,9 +427,9 @@ class TestField(MongoDBTestCase):
|
|||||||
def test_list_validation(self):
|
def test_list_validation(self):
|
||||||
"""Ensure that a list field only accepts lists with valid elements."""
|
"""Ensure that a list field only accepts lists with valid elements."""
|
||||||
access_level_choices = (
|
access_level_choices = (
|
||||||
("a", "Administration"),
|
("a", u"Administration"),
|
||||||
("b", "Manager"),
|
("b", u"Manager"),
|
||||||
("c", "Staff"),
|
("c", u"Staff"),
|
||||||
)
|
)
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
@ -451,7 +477,7 @@ class TestField(MongoDBTestCase):
|
|||||||
post.access_list = ["a", "b"]
|
post.access_list = ["a", "b"]
|
||||||
post.validate()
|
post.validate()
|
||||||
|
|
||||||
assert post.get_access_list_display() == "Administration, Manager"
|
assert post.get_access_list_display() == u"Administration, Manager"
|
||||||
|
|
||||||
post.comments = ["a"]
|
post.comments = ["a"]
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
@ -519,7 +545,8 @@ class TestField(MongoDBTestCase):
|
|||||||
post.validate()
|
post.validate()
|
||||||
|
|
||||||
def test_sorted_list_sorting(self):
|
def test_sorted_list_sorting(self):
|
||||||
"""Ensure that a sorted list field properly sorts values."""
|
"""Ensure that a sorted list field properly sorts values.
|
||||||
|
"""
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
class Comment(EmbeddedDocument):
|
||||||
order = IntField()
|
order = IntField()
|
||||||
@ -635,7 +662,8 @@ class TestField(MongoDBTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_list_field_manipulative_operators(self):
|
def test_list_field_manipulative_operators(self):
|
||||||
"""Ensure that ListField works with standard list operators that manipulate the list."""
|
"""Ensure that ListField works with standard list operators that manipulate the list.
|
||||||
|
"""
|
||||||
|
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
ref = StringField()
|
ref = StringField()
|
||||||
@ -1056,7 +1084,7 @@ class TestField(MongoDBTestCase):
|
|||||||
|
|
||||||
e = Simple().save()
|
e = Simple().save()
|
||||||
e.mapping = []
|
e.mapping = []
|
||||||
assert e._changed_fields == []
|
assert [] == e._changed_fields
|
||||||
|
|
||||||
class Simple(Document):
|
class Simple(Document):
|
||||||
mapping = DictField()
|
mapping = DictField()
|
||||||
@ -1065,7 +1093,7 @@ class TestField(MongoDBTestCase):
|
|||||||
|
|
||||||
e = Simple().save()
|
e = Simple().save()
|
||||||
e.mapping = {}
|
e.mapping = {}
|
||||||
assert e._changed_fields == []
|
assert [] == e._changed_fields
|
||||||
|
|
||||||
def test_slice_marks_field_as_changed(self):
|
def test_slice_marks_field_as_changed(self):
|
||||||
class Simple(Document):
|
class Simple(Document):
|
||||||
@ -1332,9 +1360,9 @@ class TestField(MongoDBTestCase):
|
|||||||
foo.delete()
|
foo.delete()
|
||||||
bar = Bar.objects.get()
|
bar = Bar.objects.get()
|
||||||
with pytest.raises(DoesNotExist):
|
with pytest.raises(DoesNotExist):
|
||||||
bar.ref
|
getattr(bar, "ref")
|
||||||
with pytest.raises(DoesNotExist):
|
with pytest.raises(DoesNotExist):
|
||||||
bar.generic_ref
|
getattr(bar, "generic_ref")
|
||||||
|
|
||||||
# When auto_dereference is disabled, there is no trouble returning DBRef
|
# When auto_dereference is disabled, there is no trouble returning DBRef
|
||||||
bar = Bar.objects.get()
|
bar = Bar.objects.get()
|
||||||
@ -1345,7 +1373,8 @@ class TestField(MongoDBTestCase):
|
|||||||
assert bar.generic_ref == {"_ref": expected, "_cls": "Foo"}
|
assert bar.generic_ref == {"_ref": expected, "_cls": "Foo"}
|
||||||
|
|
||||||
def test_list_item_dereference(self):
|
def test_list_item_dereference(self):
|
||||||
"""Ensure that DBRef items in ListFields are dereferenced."""
|
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||||
|
"""
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -1370,7 +1399,8 @@ class TestField(MongoDBTestCase):
|
|||||||
assert group_obj.members[1].name == user2.name
|
assert group_obj.members[1].name == user2.name
|
||||||
|
|
||||||
def test_recursive_reference(self):
|
def test_recursive_reference(self):
|
||||||
"""Ensure that ReferenceFields can reference their own documents."""
|
"""Ensure that ReferenceFields can reference their own documents.
|
||||||
|
"""
|
||||||
|
|
||||||
class Employee(Document):
|
class Employee(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -1397,7 +1427,8 @@ class TestField(MongoDBTestCase):
|
|||||||
assert peter.friends == friends
|
assert peter.friends == friends
|
||||||
|
|
||||||
def test_recursive_embedding(self):
|
def test_recursive_embedding(self):
|
||||||
"""Ensure that EmbeddedDocumentFields can contain their own documents."""
|
"""Ensure that EmbeddedDocumentFields can contain their own documents.
|
||||||
|
"""
|
||||||
|
|
||||||
class TreeNode(EmbeddedDocument):
|
class TreeNode(EmbeddedDocument):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -1473,7 +1504,8 @@ class TestField(MongoDBTestCase):
|
|||||||
AbstractDoc.drop_collection()
|
AbstractDoc.drop_collection()
|
||||||
|
|
||||||
def test_reference_class_with_abstract_parent(self):
|
def test_reference_class_with_abstract_parent(self):
|
||||||
"""Ensure that a class with an abstract parent can be referenced."""
|
"""Ensure that a class with an abstract parent can be referenced.
|
||||||
|
"""
|
||||||
|
|
||||||
class Sibling(Document):
|
class Sibling(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -1543,7 +1575,8 @@ class TestField(MongoDBTestCase):
|
|||||||
brother.save()
|
brother.save()
|
||||||
|
|
||||||
def test_generic_reference(self):
|
def test_generic_reference(self):
|
||||||
"""Ensure that a GenericReferenceField properly dereferences items."""
|
"""Ensure that a GenericReferenceField properly dereferences items.
|
||||||
|
"""
|
||||||
|
|
||||||
class Link(Document):
|
class Link(Document):
|
||||||
title = StringField()
|
title = StringField()
|
||||||
@ -1582,7 +1615,8 @@ class TestField(MongoDBTestCase):
|
|||||||
assert isinstance(bm.bookmark_object, Link)
|
assert isinstance(bm.bookmark_object, Link)
|
||||||
|
|
||||||
def test_generic_reference_list(self):
|
def test_generic_reference_list(self):
|
||||||
"""Ensure that a ListField properly dereferences generic references."""
|
"""Ensure that a ListField properly dereferences generic references.
|
||||||
|
"""
|
||||||
|
|
||||||
class Link(Document):
|
class Link(Document):
|
||||||
title = StringField()
|
title = StringField()
|
||||||
@ -1685,7 +1719,8 @@ class TestField(MongoDBTestCase):
|
|||||||
assert bm.bookmark_object == post_1
|
assert bm.bookmark_object == post_1
|
||||||
|
|
||||||
def test_generic_reference_string_choices(self):
|
def test_generic_reference_string_choices(self):
|
||||||
"""Ensure that a GenericReferenceField can handle choices as strings"""
|
"""Ensure that a GenericReferenceField can handle choices as strings
|
||||||
|
"""
|
||||||
|
|
||||||
class Link(Document):
|
class Link(Document):
|
||||||
title = StringField()
|
title = StringField()
|
||||||
@ -1777,7 +1812,8 @@ class TestField(MongoDBTestCase):
|
|||||||
assert user.bookmarks == [post_1]
|
assert user.bookmarks == [post_1]
|
||||||
|
|
||||||
def test_generic_reference_list_item_modification(self):
|
def test_generic_reference_list_item_modification(self):
|
||||||
"""Ensure that modifications of related documents (through generic reference) don't influence on querying"""
|
"""Ensure that modifications of related documents (through generic reference) don't influence on querying
|
||||||
|
"""
|
||||||
|
|
||||||
class Post(Document):
|
class Post(Document):
|
||||||
title = StringField()
|
title = StringField()
|
||||||
@ -1865,7 +1901,8 @@ class TestField(MongoDBTestCase):
|
|||||||
assert doc == doc2
|
assert doc == doc2
|
||||||
|
|
||||||
def test_choices_allow_using_sets_as_choices(self):
|
def test_choices_allow_using_sets_as_choices(self):
|
||||||
"""Ensure that sets can be used when setting choices"""
|
"""Ensure that sets can be used when setting choices
|
||||||
|
"""
|
||||||
|
|
||||||
class Shirt(Document):
|
class Shirt(Document):
|
||||||
size = StringField(choices={"M", "L"})
|
size = StringField(choices={"M", "L"})
|
||||||
@ -1884,7 +1921,8 @@ class TestField(MongoDBTestCase):
|
|||||||
shirt.validate()
|
shirt.validate()
|
||||||
|
|
||||||
def test_choices_validation_accept_possible_value(self):
|
def test_choices_validation_accept_possible_value(self):
|
||||||
"""Ensure that value is in a container of allowed values."""
|
"""Ensure that value is in a container of allowed values.
|
||||||
|
"""
|
||||||
|
|
||||||
class Shirt(Document):
|
class Shirt(Document):
|
||||||
size = StringField(choices=("S", "M"))
|
size = StringField(choices=("S", "M"))
|
||||||
@ -1893,7 +1931,8 @@ class TestField(MongoDBTestCase):
|
|||||||
shirt.validate()
|
shirt.validate()
|
||||||
|
|
||||||
def test_choices_validation_reject_unknown_value(self):
|
def test_choices_validation_reject_unknown_value(self):
|
||||||
"""Ensure that unallowed value are rejected upon validation"""
|
"""Ensure that unallowed value are rejected upon validation
|
||||||
|
"""
|
||||||
|
|
||||||
class Shirt(Document):
|
class Shirt(Document):
|
||||||
size = StringField(choices=("S", "M"))
|
size = StringField(choices=("S", "M"))
|
||||||
@ -1951,7 +1990,8 @@ class TestField(MongoDBTestCase):
|
|||||||
shirt1.validate()
|
shirt1.validate()
|
||||||
|
|
||||||
def test_simple_choices_validation(self):
|
def test_simple_choices_validation(self):
|
||||||
"""Ensure that value is in a container of allowed values."""
|
"""Ensure that value is in a container of allowed values.
|
||||||
|
"""
|
||||||
|
|
||||||
class Shirt(Document):
|
class Shirt(Document):
|
||||||
size = StringField(max_length=3, choices=("S", "M", "L", "XL", "XXL"))
|
size = StringField(max_length=3, choices=("S", "M", "L", "XL", "XXL"))
|
||||||
@ -2000,11 +2040,12 @@ class TestField(MongoDBTestCase):
|
|||||||
shirt.validate()
|
shirt.validate()
|
||||||
|
|
||||||
def test_simple_choices_validation_invalid_value(self):
|
def test_simple_choices_validation_invalid_value(self):
|
||||||
"""Ensure that error messages are correct."""
|
"""Ensure that error messages are correct.
|
||||||
|
"""
|
||||||
SIZES = ("S", "M", "L", "XL", "XXL")
|
SIZES = ("S", "M", "L", "XL", "XXL")
|
||||||
COLORS = (("R", "Red"), ("B", "Blue"))
|
COLORS = (("R", "Red"), ("B", "Blue"))
|
||||||
SIZE_MESSAGE = "Value must be one of ('S', 'M', 'L', 'XL', 'XXL')"
|
SIZE_MESSAGE = u"Value must be one of ('S', 'M', 'L', 'XL', 'XXL')"
|
||||||
COLOR_MESSAGE = "Value must be one of ['R', 'B']"
|
COLOR_MESSAGE = u"Value must be one of ['R', 'B']"
|
||||||
|
|
||||||
class Shirt(Document):
|
class Shirt(Document):
|
||||||
size = StringField(max_length=3, choices=SIZES)
|
size = StringField(max_length=3, choices=SIZES)
|
||||||
@ -2067,7 +2108,7 @@ class TestField(MongoDBTestCase):
|
|||||||
assert "comments" in error_dict
|
assert "comments" in error_dict
|
||||||
assert 1 in error_dict["comments"]
|
assert 1 in error_dict["comments"]
|
||||||
assert "content" in error_dict["comments"][1]
|
assert "content" in error_dict["comments"][1]
|
||||||
assert error_dict["comments"][1]["content"] == "Field is required"
|
assert error_dict["comments"][1]["content"] == u"Field is required"
|
||||||
|
|
||||||
post.comments[1].content = "here we go"
|
post.comments[1].content = "here we go"
|
||||||
post.validate()
|
post.validate()
|
||||||
@ -2077,9 +2118,9 @@ class TestField(MongoDBTestCase):
|
|||||||
a ComplexBaseField.
|
a ComplexBaseField.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class SomeField(BaseField):
|
class EnumField(BaseField):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super(EnumField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
return value
|
return value
|
||||||
@ -2088,7 +2129,7 @@ class TestField(MongoDBTestCase):
|
|||||||
return tuple(value)
|
return tuple(value)
|
||||||
|
|
||||||
class TestDoc(Document):
|
class TestDoc(Document):
|
||||||
items = ListField(SomeField())
|
items = ListField(EnumField())
|
||||||
|
|
||||||
TestDoc.drop_collection()
|
TestDoc.drop_collection()
|
||||||
|
|
||||||
@ -2232,13 +2273,6 @@ class TestField(MongoDBTestCase):
|
|||||||
with pytest.raises(FieldDoesNotExist):
|
with pytest.raises(FieldDoesNotExist):
|
||||||
Doc(bar="test")
|
Doc(bar="test")
|
||||||
|
|
||||||
def test_undefined_field_works_no_confusion_with_db_field(self):
|
|
||||||
class Doc(Document):
|
|
||||||
foo = StringField(db_field="bar")
|
|
||||||
|
|
||||||
with pytest.raises(FieldDoesNotExist):
|
|
||||||
Doc(bar="test")
|
|
||||||
|
|
||||||
|
|
||||||
class TestEmbeddedDocumentListField(MongoDBTestCase):
|
class TestEmbeddedDocumentListField(MongoDBTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@ -2581,11 +2615,11 @@ class TestEmbeddedDocumentListField(MongoDBTestCase):
|
|||||||
"""
|
"""
|
||||||
post = self.BlogPost(
|
post = self.BlogPost(
|
||||||
comments=[
|
comments=[
|
||||||
self.Comments(author="user1", message="сообщение"),
|
self.Comments(author="user1", message=u"сообщение"),
|
||||||
self.Comments(author="user2", message="хабарлама"),
|
self.Comments(author="user2", message=u"хабарлама"),
|
||||||
]
|
]
|
||||||
).save()
|
).save()
|
||||||
assert post.comments.get(message="сообщение").author == "user1"
|
assert post.comments.get(message=u"сообщение").author == "user1"
|
||||||
|
|
||||||
def test_save(self):
|
def test_save(self):
|
||||||
"""
|
"""
|
||||||
|
@ -1,17 +1,19 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import copy
|
import copy
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
import unittest
|
import unittest
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
import gridfs
|
import gridfs
|
||||||
import pytest
|
import pytest
|
||||||
|
import six
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.python_support import StringIO
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from PIL import Image # noqa: F401
|
from PIL import Image
|
||||||
|
|
||||||
HAS_PIL = True
|
HAS_PIL = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -28,7 +30,7 @@ TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png")
|
|||||||
def get_file(path):
|
def get_file(path):
|
||||||
"""Use a BytesIO instead of a file to allow
|
"""Use a BytesIO instead of a file to allow
|
||||||
to have a one-liner and avoid that the file remains opened"""
|
to have a one-liner and avoid that the file remains opened"""
|
||||||
bytes_io = BytesIO()
|
bytes_io = StringIO()
|
||||||
with open(path, "rb") as f:
|
with open(path, "rb") as f:
|
||||||
bytes_io.write(f.read())
|
bytes_io.write(f.read())
|
||||||
bytes_io.seek(0)
|
bytes_io.seek(0)
|
||||||
@ -48,14 +50,15 @@ class TestFileField(MongoDBTestCase):
|
|||||||
DemoFile.objects.create()
|
DemoFile.objects.create()
|
||||||
|
|
||||||
def test_file_fields(self):
|
def test_file_fields(self):
|
||||||
"""Ensure that file fields can be written to and their data retrieved"""
|
"""Ensure that file fields can be written to and their data retrieved
|
||||||
|
"""
|
||||||
|
|
||||||
class PutFile(Document):
|
class PutFile(Document):
|
||||||
the_file = FileField()
|
the_file = FileField()
|
||||||
|
|
||||||
PutFile.drop_collection()
|
PutFile.drop_collection()
|
||||||
|
|
||||||
text = b"Hello, World!"
|
text = six.b("Hello, World!")
|
||||||
content_type = "text/plain"
|
content_type = "text/plain"
|
||||||
|
|
||||||
putfile = PutFile()
|
putfile = PutFile()
|
||||||
@ -77,7 +80,7 @@ class TestFileField(MongoDBTestCase):
|
|||||||
PutFile.drop_collection()
|
PutFile.drop_collection()
|
||||||
|
|
||||||
putfile = PutFile()
|
putfile = PutFile()
|
||||||
putstring = BytesIO()
|
putstring = StringIO()
|
||||||
putstring.write(text)
|
putstring.write(text)
|
||||||
putstring.seek(0)
|
putstring.seek(0)
|
||||||
putfile.the_file.put(putstring, content_type=content_type)
|
putfile.the_file.put(putstring, content_type=content_type)
|
||||||
@ -90,15 +93,16 @@ class TestFileField(MongoDBTestCase):
|
|||||||
result.the_file.delete()
|
result.the_file.delete()
|
||||||
|
|
||||||
def test_file_fields_stream(self):
|
def test_file_fields_stream(self):
|
||||||
"""Ensure that file fields can be written to and their data retrieved"""
|
"""Ensure that file fields can be written to and their data retrieved
|
||||||
|
"""
|
||||||
|
|
||||||
class StreamFile(Document):
|
class StreamFile(Document):
|
||||||
the_file = FileField()
|
the_file = FileField()
|
||||||
|
|
||||||
StreamFile.drop_collection()
|
StreamFile.drop_collection()
|
||||||
|
|
||||||
text = b"Hello, World!"
|
text = six.b("Hello, World!")
|
||||||
more_text = b"Foo Bar"
|
more_text = six.b("Foo Bar")
|
||||||
content_type = "text/plain"
|
content_type = "text/plain"
|
||||||
|
|
||||||
streamfile = StreamFile()
|
streamfile = StreamFile()
|
||||||
@ -133,8 +137,8 @@ class TestFileField(MongoDBTestCase):
|
|||||||
|
|
||||||
StreamFile.drop_collection()
|
StreamFile.drop_collection()
|
||||||
|
|
||||||
text = b"Hello, World!"
|
text = six.b("Hello, World!")
|
||||||
more_text = b"Foo Bar"
|
more_text = six.b("Foo Bar")
|
||||||
|
|
||||||
streamfile = StreamFile()
|
streamfile = StreamFile()
|
||||||
streamfile.save()
|
streamfile.save()
|
||||||
@ -163,8 +167,8 @@ class TestFileField(MongoDBTestCase):
|
|||||||
class SetFile(Document):
|
class SetFile(Document):
|
||||||
the_file = FileField()
|
the_file = FileField()
|
||||||
|
|
||||||
text = b"Hello, World!"
|
text = six.b("Hello, World!")
|
||||||
more_text = b"Foo Bar"
|
more_text = six.b("Foo Bar")
|
||||||
|
|
||||||
SetFile.drop_collection()
|
SetFile.drop_collection()
|
||||||
|
|
||||||
@ -192,7 +196,7 @@ class TestFileField(MongoDBTestCase):
|
|||||||
GridDocument.drop_collection()
|
GridDocument.drop_collection()
|
||||||
|
|
||||||
with tempfile.TemporaryFile() as f:
|
with tempfile.TemporaryFile() as f:
|
||||||
f.write(b"Hello World!")
|
f.write(six.b("Hello World!"))
|
||||||
f.flush()
|
f.flush()
|
||||||
|
|
||||||
# Test without default
|
# Test without default
|
||||||
@ -209,7 +213,7 @@ class TestFileField(MongoDBTestCase):
|
|||||||
assert doc_b.the_file.grid_id == doc_c.the_file.grid_id
|
assert doc_b.the_file.grid_id == doc_c.the_file.grid_id
|
||||||
|
|
||||||
# Test with default
|
# Test with default
|
||||||
doc_d = GridDocument(the_file=b"")
|
doc_d = GridDocument(the_file=six.b(""))
|
||||||
doc_d.save()
|
doc_d.save()
|
||||||
|
|
||||||
doc_e = GridDocument.objects.with_id(doc_d.id)
|
doc_e = GridDocument.objects.with_id(doc_d.id)
|
||||||
@ -226,7 +230,8 @@ class TestFileField(MongoDBTestCase):
|
|||||||
assert ["doc_b", "doc_e"] == grid_fs.list()
|
assert ["doc_b", "doc_e"] == grid_fs.list()
|
||||||
|
|
||||||
def test_file_uniqueness(self):
|
def test_file_uniqueness(self):
|
||||||
"""Ensure that each instance of a FileField is unique"""
|
"""Ensure that each instance of a FileField is unique
|
||||||
|
"""
|
||||||
|
|
||||||
class TestFile(Document):
|
class TestFile(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -235,7 +240,7 @@ class TestFileField(MongoDBTestCase):
|
|||||||
# First instance
|
# First instance
|
||||||
test_file = TestFile()
|
test_file = TestFile()
|
||||||
test_file.name = "Hello, World!"
|
test_file.name = "Hello, World!"
|
||||||
test_file.the_file.put(b"Hello, World!")
|
test_file.the_file.put(six.b("Hello, World!"))
|
||||||
test_file.save()
|
test_file.save()
|
||||||
|
|
||||||
# Second instance
|
# Second instance
|
||||||
@ -282,7 +287,8 @@ class TestFileField(MongoDBTestCase):
|
|||||||
assert test_file.the_file.get().length == 4971
|
assert test_file.the_file.get().length == 4971
|
||||||
|
|
||||||
def test_file_boolean(self):
|
def test_file_boolean(self):
|
||||||
"""Ensure that a boolean test of a FileField indicates its presence"""
|
"""Ensure that a boolean test of a FileField indicates its presence
|
||||||
|
"""
|
||||||
|
|
||||||
class TestFile(Document):
|
class TestFile(Document):
|
||||||
the_file = FileField()
|
the_file = FileField()
|
||||||
@ -291,7 +297,7 @@ class TestFileField(MongoDBTestCase):
|
|||||||
|
|
||||||
test_file = TestFile()
|
test_file = TestFile()
|
||||||
assert not bool(test_file.the_file)
|
assert not bool(test_file.the_file)
|
||||||
test_file.the_file.put(b"Hello, World!", content_type="text/plain")
|
test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain")
|
||||||
test_file.save()
|
test_file.save()
|
||||||
assert bool(test_file.the_file)
|
assert bool(test_file.the_file)
|
||||||
|
|
||||||
@ -308,12 +314,12 @@ class TestFileField(MongoDBTestCase):
|
|||||||
assert test_file.the_file not in [{"test": 1}]
|
assert test_file.the_file not in [{"test": 1}]
|
||||||
|
|
||||||
def test_file_disk_space(self):
|
def test_file_disk_space(self):
|
||||||
"""Test disk space usage when we delete/replace a file"""
|
""" Test disk space usage when we delete/replace a file """
|
||||||
|
|
||||||
class TestFile(Document):
|
class TestFile(Document):
|
||||||
the_file = FileField()
|
the_file = FileField()
|
||||||
|
|
||||||
text = b"Hello, World!"
|
text = six.b("Hello, World!")
|
||||||
content_type = "text/plain"
|
content_type = "text/plain"
|
||||||
|
|
||||||
testfile = TestFile()
|
testfile = TestFile()
|
||||||
@ -357,7 +363,7 @@ class TestFileField(MongoDBTestCase):
|
|||||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||||
testfile.save()
|
testfile.save()
|
||||||
|
|
||||||
text = b"Bonjour, World!"
|
text = six.b("Bonjour, World!")
|
||||||
testfile.the_file.replace(text, content_type=content_type, filename="hello")
|
testfile.the_file.replace(text, content_type=content_type, filename="hello")
|
||||||
testfile.save()
|
testfile.save()
|
||||||
|
|
||||||
@ -381,7 +387,7 @@ class TestFileField(MongoDBTestCase):
|
|||||||
TestImage.drop_collection()
|
TestImage.drop_collection()
|
||||||
|
|
||||||
with tempfile.TemporaryFile() as f:
|
with tempfile.TemporaryFile() as f:
|
||||||
f.write(b"Hello World!")
|
f.write(six.b("Hello World!"))
|
||||||
f.flush()
|
f.flush()
|
||||||
|
|
||||||
t = TestImage()
|
t = TestImage()
|
||||||
@ -423,7 +429,7 @@ class TestFileField(MongoDBTestCase):
|
|||||||
@require_pil
|
@require_pil
|
||||||
def test_image_field_resize(self):
|
def test_image_field_resize(self):
|
||||||
class TestImage(Document):
|
class TestImage(Document):
|
||||||
image = ImageField(size=(185, 37, True))
|
image = ImageField(size=(185, 37))
|
||||||
|
|
||||||
TestImage.drop_collection()
|
TestImage.drop_collection()
|
||||||
|
|
||||||
@ -465,7 +471,7 @@ class TestFileField(MongoDBTestCase):
|
|||||||
@require_pil
|
@require_pil
|
||||||
def test_image_field_thumbnail(self):
|
def test_image_field_thumbnail(self):
|
||||||
class TestImage(Document):
|
class TestImage(Document):
|
||||||
image = ImageField(thumbnail_size=(92, 18, True))
|
image = ImageField(thumbnail_size=(92, 18))
|
||||||
|
|
||||||
TestImage.drop_collection()
|
TestImage.drop_collection()
|
||||||
|
|
||||||
@ -497,21 +503,21 @@ class TestFileField(MongoDBTestCase):
|
|||||||
# First instance
|
# First instance
|
||||||
test_file = TestFile()
|
test_file = TestFile()
|
||||||
test_file.name = "Hello, World!"
|
test_file.name = "Hello, World!"
|
||||||
test_file.the_file.put(b"Hello, World!", name="hello.txt")
|
test_file.the_file.put(six.b("Hello, World!"), name="hello.txt")
|
||||||
test_file.save()
|
test_file.save()
|
||||||
|
|
||||||
data = get_db("test_files").macumba.files.find_one()
|
data = get_db("test_files").macumba.files.find_one()
|
||||||
assert data.get("name") == "hello.txt"
|
assert data.get("name") == "hello.txt"
|
||||||
|
|
||||||
test_file = TestFile.objects.first()
|
test_file = TestFile.objects.first()
|
||||||
assert test_file.the_file.read() == b"Hello, World!"
|
assert test_file.the_file.read() == six.b("Hello, World!")
|
||||||
|
|
||||||
test_file = TestFile.objects.first()
|
test_file = TestFile.objects.first()
|
||||||
test_file.the_file = b"Hello, World!"
|
test_file.the_file = six.b("HELLO, WORLD!")
|
||||||
test_file.save()
|
test_file.save()
|
||||||
|
|
||||||
test_file = TestFile.objects.first()
|
test_file = TestFile.objects.first()
|
||||||
assert test_file.the_file.read() == b"Hello, World!"
|
assert test_file.the_file.read() == six.b("HELLO, WORLD!")
|
||||||
|
|
||||||
def test_copyable(self):
|
def test_copyable(self):
|
||||||
class PutFile(Document):
|
class PutFile(Document):
|
||||||
@ -519,7 +525,7 @@ class TestFileField(MongoDBTestCase):
|
|||||||
|
|
||||||
PutFile.drop_collection()
|
PutFile.drop_collection()
|
||||||
|
|
||||||
text = b"Hello, World!"
|
text = six.b("Hello, World!")
|
||||||
content_type = "text/plain"
|
content_type = "text/plain"
|
||||||
|
|
||||||
putfile = PutFile()
|
putfile = PutFile()
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import pytest
|
import pytest
|
||||||
|
import six
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -18,7 +21,8 @@ class TestFloatField(MongoDBTestCase):
|
|||||||
assert 1 == TestDocument.objects(float_fld__ne=1).count()
|
assert 1 == TestDocument.objects(float_fld__ne=1).count()
|
||||||
|
|
||||||
def test_validation(self):
|
def test_validation(self):
|
||||||
"""Ensure that invalid values cannot be assigned to float fields."""
|
"""Ensure that invalid values cannot be assigned to float fields.
|
||||||
|
"""
|
||||||
|
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
height = FloatField(min_value=0.1, max_value=3.5)
|
height = FloatField(min_value=0.1, max_value=3.5)
|
||||||
@ -48,8 +52,9 @@ class TestFloatField(MongoDBTestCase):
|
|||||||
|
|
||||||
big_person = BigPerson()
|
big_person = BigPerson()
|
||||||
|
|
||||||
big_person.height = int(0)
|
for value, value_type in enumerate(six.integer_types):
|
||||||
big_person.validate()
|
big_person.height = value_type(value)
|
||||||
|
big_person.validate()
|
||||||
|
|
||||||
big_person.height = 2 ** 500
|
big_person.height = 2 ** 500
|
||||||
big_person.validate()
|
big_person.validate()
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
@ -8,7 +9,7 @@ class TestGeoField(MongoDBTestCase):
|
|||||||
def _test_for_expected_error(self, Cls, loc, expected):
|
def _test_for_expected_error(self, Cls, loc, expected):
|
||||||
try:
|
try:
|
||||||
Cls(loc=loc).validate()
|
Cls(loc=loc).validate()
|
||||||
self.fail(f"Should not validate the location {loc}")
|
self.fail("Should not validate the location {0}".format(loc))
|
||||||
except ValidationError as e:
|
except ValidationError as e:
|
||||||
assert expected == e.to_dict()["loc"]
|
assert expected == e.to_dict()["loc"]
|
||||||
|
|
||||||
@ -290,7 +291,8 @@ class TestGeoField(MongoDBTestCase):
|
|||||||
Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate()
|
Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate()
|
||||||
|
|
||||||
def test_indexes_geopoint(self):
|
def test_indexes_geopoint(self):
|
||||||
"""Ensure that indexes are created automatically for GeoPointFields."""
|
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||||
|
"""
|
||||||
|
|
||||||
class Event(Document):
|
class Event(Document):
|
||||||
title = StringField()
|
title = StringField()
|
||||||
@ -316,7 +318,8 @@ class TestGeoField(MongoDBTestCase):
|
|||||||
assert geo_indicies == [{"fields": [("venue.location", "2d")]}]
|
assert geo_indicies == [{"fields": [("venue.location", "2d")]}]
|
||||||
|
|
||||||
def test_indexes_2dsphere(self):
|
def test_indexes_2dsphere(self):
|
||||||
"""Ensure that indexes are created automatically for GeoPointFields."""
|
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||||
|
"""
|
||||||
|
|
||||||
class Event(Document):
|
class Event(Document):
|
||||||
title = StringField()
|
title = StringField()
|
||||||
@ -330,7 +333,8 @@ class TestGeoField(MongoDBTestCase):
|
|||||||
assert {"fields": [("point", "2dsphere")]} in geo_indicies
|
assert {"fields": [("point", "2dsphere")]} in geo_indicies
|
||||||
|
|
||||||
def test_indexes_2dsphere_embedded(self):
|
def test_indexes_2dsphere_embedded(self):
|
||||||
"""Ensure that indexes are created automatically for GeoPointFields."""
|
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||||
|
"""
|
||||||
|
|
||||||
class Venue(EmbeddedDocument):
|
class Venue(EmbeddedDocument):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -377,7 +381,7 @@ class TestGeoField(MongoDBTestCase):
|
|||||||
|
|
||||||
meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]}
|
meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]}
|
||||||
|
|
||||||
assert Log._geo_indices() == []
|
assert [] == Log._geo_indices()
|
||||||
|
|
||||||
Log.drop_collection()
|
Log.drop_collection()
|
||||||
Log.ensure_indexes()
|
Log.ensure_indexes()
|
||||||
@ -397,7 +401,7 @@ class TestGeoField(MongoDBTestCase):
|
|||||||
"indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}]
|
"indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}]
|
||||||
}
|
}
|
||||||
|
|
||||||
assert Log._geo_indices() == []
|
assert [] == Log._geo_indices()
|
||||||
|
|
||||||
Log.drop_collection()
|
Log.drop_collection()
|
||||||
Log.ensure_indexes()
|
Log.ensure_indexes()
|
||||||
|
@ -1,12 +1,15 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestIntField(MongoDBTestCase):
|
class TestIntField(MongoDBTestCase):
|
||||||
def test_int_validation(self):
|
def test_int_validation(self):
|
||||||
"""Ensure that invalid values cannot be assigned to int fields."""
|
"""Ensure that invalid values cannot be assigned to int fields.
|
||||||
|
"""
|
||||||
|
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
age = IntField(min_value=0, max_value=110)
|
age = IntField(min_value=0, max_value=110)
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
import pytest
|
# -*- coding: utf-8 -*-
|
||||||
from bson import DBRef, ObjectId
|
from bson import DBRef, ObjectId
|
||||||
|
import pytest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.base import LazyReference
|
from mongoengine.base import LazyReference
|
||||||
from mongoengine.context_managers import query_counter
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -151,7 +152,7 @@ class TestLazyReferenceField(MongoDBTestCase):
|
|||||||
LazyReference(BadDoc, animal.pk),
|
LazyReference(BadDoc, animal.pk),
|
||||||
):
|
):
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
Ocurrence(person="test", animal=bad).save()
|
p = Ocurrence(person="test", animal=bad).save()
|
||||||
|
|
||||||
def test_lazy_reference_query_conversion(self):
|
def test_lazy_reference_query_conversion(self):
|
||||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||||
@ -330,70 +331,6 @@ class TestLazyReferenceField(MongoDBTestCase):
|
|||||||
occ.in_embedded.in_list = [animal1.id, animal2.id]
|
occ.in_embedded.in_list = [animal1.id, animal2.id]
|
||||||
check_fields_type(occ)
|
check_fields_type(occ)
|
||||||
|
|
||||||
def test_lazy_reference_embedded_dereferencing(self):
|
|
||||||
# Test case for #2375
|
|
||||||
|
|
||||||
# -- Test documents
|
|
||||||
|
|
||||||
class Author(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class AuthorReference(EmbeddedDocument):
|
|
||||||
author = LazyReferenceField(Author)
|
|
||||||
|
|
||||||
class Book(Document):
|
|
||||||
authors = EmbeddedDocumentListField(AuthorReference)
|
|
||||||
|
|
||||||
# -- Cleanup
|
|
||||||
|
|
||||||
Author.drop_collection()
|
|
||||||
Book.drop_collection()
|
|
||||||
|
|
||||||
# -- Create test data
|
|
||||||
|
|
||||||
author_1 = Author(name="A1").save()
|
|
||||||
author_2 = Author(name="A2").save()
|
|
||||||
author_3 = Author(name="A3").save()
|
|
||||||
book = Book(
|
|
||||||
authors=[
|
|
||||||
AuthorReference(author=author_1),
|
|
||||||
AuthorReference(author=author_2),
|
|
||||||
AuthorReference(author=author_3),
|
|
||||||
]
|
|
||||||
).save()
|
|
||||||
|
|
||||||
with query_counter() as qc:
|
|
||||||
book = Book.objects.first()
|
|
||||||
# Accessing the list must not trigger dereferencing.
|
|
||||||
book.authors
|
|
||||||
assert qc == 1
|
|
||||||
|
|
||||||
for ref in book.authors:
|
|
||||||
with pytest.raises(AttributeError):
|
|
||||||
ref["author"].name
|
|
||||||
assert isinstance(ref.author, LazyReference)
|
|
||||||
assert isinstance(ref.author.id, ObjectId)
|
|
||||||
|
|
||||||
def test_lazy_reference_in_list_with_changed_element(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocurrence(Document):
|
|
||||||
in_list = ListField(LazyReferenceField(Animal))
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocurrence.drop_collection()
|
|
||||||
|
|
||||||
animal1 = Animal(name="doggo").save()
|
|
||||||
|
|
||||||
animal1.tag = "blue"
|
|
||||||
|
|
||||||
occ = Ocurrence(in_list=[animal1]).save()
|
|
||||||
animal1.save()
|
|
||||||
assert isinstance(occ.in_list[0], LazyReference)
|
|
||||||
assert occ.in_list[0].pk == animal1.pk
|
|
||||||
|
|
||||||
|
|
||||||
class TestGenericLazyReferenceField(MongoDBTestCase):
|
class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||||
def test_generic_lazy_reference_simple(self):
|
def test_generic_lazy_reference_simple(self):
|
||||||
@ -449,7 +386,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
|||||||
mineral = Mineral(name="Granite").save()
|
mineral = Mineral(name="Granite").save()
|
||||||
|
|
||||||
occ_animal = Ocurrence(living_thing=animal, thing=animal).save()
|
occ_animal = Ocurrence(living_thing=animal, thing=animal).save()
|
||||||
_ = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
Ocurrence(living_thing=mineral).save()
|
Ocurrence(living_thing=mineral).save()
|
||||||
|
|
||||||
@ -521,7 +458,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
|||||||
baddoc = BadDoc().save()
|
baddoc = BadDoc().save()
|
||||||
for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)):
|
for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)):
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
Ocurrence(person="test", animal=bad).save()
|
p = Ocurrence(person="test", animal=bad).save()
|
||||||
|
|
||||||
def test_generic_lazy_reference_query_conversion(self):
|
def test_generic_lazy_reference_query_conversion(self):
|
||||||
class Member(Document):
|
class Member(Document):
|
||||||
|
@ -1,28 +1,19 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import pytest
|
import pytest
|
||||||
from bson.int64 import Int64
|
import six
|
||||||
|
|
||||||
|
try:
|
||||||
|
from bson.int64 import Int64
|
||||||
|
except ImportError:
|
||||||
|
Int64 = long
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
|
||||||
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestLongField(MongoDBTestCase):
|
class TestLongField(MongoDBTestCase):
|
||||||
def test_storage(self):
|
|
||||||
class Person(Document):
|
|
||||||
value = LongField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
person = Person(value=5000)
|
|
||||||
person.save()
|
|
||||||
assert get_as_pymongo(person) == {"_id": person.id, "value": 5000}
|
|
||||||
|
|
||||||
def test_construction_does_not_fail_with_invalid_value(self):
|
|
||||||
class Person(Document):
|
|
||||||
value = LongField()
|
|
||||||
|
|
||||||
person = Person(value="not_an_int")
|
|
||||||
assert person.value == "not_an_int"
|
|
||||||
|
|
||||||
def test_long_field_is_considered_as_int64(self):
|
def test_long_field_is_considered_as_int64(self):
|
||||||
"""
|
"""
|
||||||
Tests that long fields are stored as long in mongo, even if long
|
Tests that long fields are stored as long in mongo, even if long
|
||||||
@ -37,24 +28,28 @@ class TestLongField(MongoDBTestCase):
|
|||||||
assert isinstance(
|
assert isinstance(
|
||||||
db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64
|
db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64
|
||||||
)
|
)
|
||||||
assert isinstance(doc.some_long, int)
|
assert isinstance(doc.some_long, six.integer_types)
|
||||||
|
|
||||||
def test_long_validation(self):
|
def test_long_validation(self):
|
||||||
"""Ensure that invalid values cannot be assigned to long fields."""
|
"""Ensure that invalid values cannot be assigned to long fields.
|
||||||
|
"""
|
||||||
|
|
||||||
class TestDocument(Document):
|
class TestDocument(Document):
|
||||||
value = LongField(min_value=0, max_value=110)
|
value = LongField(min_value=0, max_value=110)
|
||||||
|
|
||||||
TestDocument(value=50).validate()
|
doc = TestDocument()
|
||||||
|
doc.value = 50
|
||||||
|
doc.validate()
|
||||||
|
|
||||||
|
doc.value = -1
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
TestDocument(value=-1).validate()
|
doc.validate()
|
||||||
|
doc.value = 120
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
TestDocument(value=120).validate()
|
doc.validate()
|
||||||
|
doc.value = "ten"
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
TestDocument(value="ten").validate()
|
doc.validate()
|
||||||
|
|
||||||
def test_long_ne_operator(self):
|
def test_long_ne_operator(self):
|
||||||
class TestDocument(Document):
|
class TestDocument(Document):
|
||||||
@ -65,5 +60,4 @@ class TestLongField(MongoDBTestCase):
|
|||||||
TestDocument(long_fld=None).save()
|
TestDocument(long_fld=None).save()
|
||||||
TestDocument(long_fld=1).save()
|
TestDocument(long_fld=1).save()
|
||||||
|
|
||||||
assert TestDocument.objects(long_fld__ne=None).count() == 1
|
assert 1 == TestDocument.objects(long_fld__ne=None).count()
|
||||||
assert TestDocument.objects(long_fld__ne=1).count() == 1
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
@ -135,11 +136,11 @@ class TestMapField(MongoDBTestCase):
|
|||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
tree = BlogPost(info_dict={"éééé": {"description": "VALUE: éééé"}})
|
tree = BlogPost(info_dict={u"éééé": {"description": u"VALUE: éééé"}})
|
||||||
|
|
||||||
tree.save()
|
tree.save()
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
BlogPost.objects.get(id=tree.id).info_dict["éééé"].description
|
BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description
|
||||||
== "VALUE: éééé"
|
== u"VALUE: éééé"
|
||||||
)
|
)
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from bson import DBRef, SON
|
||||||
import pytest
|
import pytest
|
||||||
from bson import SON, DBRef
|
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
@ -87,7 +88,7 @@ class TestReferenceField(MongoDBTestCase):
|
|||||||
parent = ReferenceField("self", dbref=False)
|
parent = ReferenceField("self", dbref=False)
|
||||||
|
|
||||||
p = Person(name="Steve", parent=DBRef("person", "abcdefghijklmnop"))
|
p = Person(name="Steve", parent=DBRef("person", "abcdefghijklmnop"))
|
||||||
assert p.to_mongo() == SON([("name", "Steve"), ("parent", "abcdefghijklmnop")])
|
assert p.to_mongo() == SON([("name", u"Steve"), ("parent", "abcdefghijklmnop")])
|
||||||
|
|
||||||
def test_objectid_reference_fields(self):
|
def test_objectid_reference_fields(self):
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
@ -107,7 +108,8 @@ class TestReferenceField(MongoDBTestCase):
|
|||||||
assert p.parent == p1
|
assert p.parent == p1
|
||||||
|
|
||||||
def test_undefined_reference(self):
|
def test_undefined_reference(self):
|
||||||
"""Ensure that ReferenceFields may reference undefined Documents."""
|
"""Ensure that ReferenceFields may reference undefined Documents.
|
||||||
|
"""
|
||||||
|
|
||||||
class Product(Document):
|
class Product(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
@ -1,4 +1,7 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -18,7 +21,7 @@ class TestSequenceField(MongoDBTestCase):
|
|||||||
assert c["next"] == 10
|
assert c["next"] == 10
|
||||||
|
|
||||||
ids = [i.id for i in Person.objects]
|
ids = [i.id for i in Person.objects]
|
||||||
assert ids == list(range(1, 11))
|
assert ids == range(1, 11)
|
||||||
|
|
||||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||||
assert c["next"] == 10
|
assert c["next"] == 10
|
||||||
@ -73,7 +76,7 @@ class TestSequenceField(MongoDBTestCase):
|
|||||||
assert c["next"] == 10
|
assert c["next"] == 10
|
||||||
|
|
||||||
ids = [i.id for i in Person.objects]
|
ids = [i.id for i in Person.objects]
|
||||||
assert ids == list(range(1, 11))
|
assert ids == range(1, 11)
|
||||||
|
|
||||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||||
assert c["next"] == 10
|
assert c["next"] == 10
|
||||||
@ -98,10 +101,10 @@ class TestSequenceField(MongoDBTestCase):
|
|||||||
assert c["next"] == 10
|
assert c["next"] == 10
|
||||||
|
|
||||||
ids = [i.id for i in Person.objects]
|
ids = [i.id for i in Person.objects]
|
||||||
assert ids == list(range(1, 11))
|
assert ids == range(1, 11)
|
||||||
|
|
||||||
counters = [i.counter for i in Person.objects]
|
counters = [i.counter for i in Person.objects]
|
||||||
assert counters == list(range(1, 11))
|
assert counters == range(1, 11)
|
||||||
|
|
||||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||||
assert c["next"] == 10
|
assert c["next"] == 10
|
||||||
@ -163,10 +166,10 @@ class TestSequenceField(MongoDBTestCase):
|
|||||||
assert c["next"] == 10
|
assert c["next"] == 10
|
||||||
|
|
||||||
ids = [i.id for i in Person.objects]
|
ids = [i.id for i in Person.objects]
|
||||||
assert ids == list(range(1, 11))
|
assert ids == range(1, 11)
|
||||||
|
|
||||||
_id = [i.id for i in Animal.objects]
|
id = [i.id for i in Animal.objects]
|
||||||
assert _id == list(range(1, 11))
|
assert id == range(1, 11)
|
||||||
|
|
||||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||||
assert c["next"] == 10
|
assert c["next"] == 10
|
||||||
@ -190,7 +193,7 @@ class TestSequenceField(MongoDBTestCase):
|
|||||||
assert c["next"] == 10
|
assert c["next"] == 10
|
||||||
|
|
||||||
ids = [i.id for i in Person.objects]
|
ids = [i.id for i in Person.objects]
|
||||||
assert ids == [str(i) for i in range(1, 11)]
|
assert ids == map(str, range(1, 11))
|
||||||
|
|
||||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||||
assert c["next"] == 10
|
assert c["next"] == 10
|
||||||
@ -264,34 +267,12 @@ class TestSequenceField(MongoDBTestCase):
|
|||||||
foo = Foo(name="Foo")
|
foo = Foo(name="Foo")
|
||||||
foo.save()
|
foo.save()
|
||||||
|
|
||||||
assert "base.counter" not in self.db["mongoengine.counters"].find().distinct(
|
assert not (
|
||||||
"_id"
|
"base.counter" in self.db["mongoengine.counters"].find().distinct("_id")
|
||||||
)
|
)
|
||||||
existing_counters = self.db["mongoengine.counters"].find().distinct("_id")
|
assert ("foo.counter" and "bar.counter") in self.db[
|
||||||
assert "foo.counter" in existing_counters
|
"mongoengine.counters"
|
||||||
assert "bar.counter" in existing_counters
|
].find().distinct("_id")
|
||||||
assert foo.counter == bar.counter
|
assert foo.counter == bar.counter
|
||||||
assert foo._fields["counter"].owner_document == Foo
|
assert foo._fields["counter"].owner_document == Foo
|
||||||
assert bar._fields["counter"].owner_document == Bar
|
assert bar._fields["counter"].owner_document == Bar
|
||||||
|
|
||||||
def test_sequence_setattr_not_incrementing_counter(self):
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
id = SequenceField(primary_key=True)
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
self.db["mongoengine.counters"].drop()
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
for x in range(10):
|
|
||||||
Person(name="Person %s" % x).save()
|
|
||||||
|
|
||||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
|
||||||
assert c["next"] == 10
|
|
||||||
|
|
||||||
# Setting SequenceField field value should not increment counter:
|
|
||||||
new_person = Person()
|
|
||||||
new_person.id = 1100
|
|
||||||
|
|
||||||
# Counter should still be at 10
|
|
||||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
|
||||||
assert c["next"] == 10
|
|
||||||
|
@ -1,43 +0,0 @@
|
|||||||
import pytest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
|
||||||
|
|
||||||
|
|
||||||
class TestStringField(MongoDBTestCase):
|
|
||||||
def test_storage(self):
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
person = Person(name="test123")
|
|
||||||
person.save()
|
|
||||||
assert get_as_pymongo(person) == {"_id": person.id, "name": "test123"}
|
|
||||||
|
|
||||||
def test_validation(self):
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField(max_length=20, min_length=2)
|
|
||||||
userid = StringField(r"[0-9a-z_]+$")
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError, match="only accepts string values"):
|
|
||||||
Person(name=34).validate()
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError, match="value is too short"):
|
|
||||||
Person(name="s").validate()
|
|
||||||
|
|
||||||
# Test regex validation on userid
|
|
||||||
person = Person(userid="test.User")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
person.userid = "test_user"
|
|
||||||
assert person.userid == "test_user"
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
# Test max length validation on name
|
|
||||||
person = Person(name="Name that is more than twenty characters")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
person = Person(name="a friendl name", userid="7a757668sqjdkqlsdkq")
|
|
||||||
person.validate()
|
|
@ -1,6 +1,8 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -26,19 +28,20 @@ class TestURLField(MongoDBTestCase):
|
|||||||
url = URLField()
|
url = URLField()
|
||||||
|
|
||||||
link = Link()
|
link = Link()
|
||||||
link.url = "http://привет.com"
|
link.url = u"http://привет.com"
|
||||||
|
|
||||||
# TODO fix URL validation - this *IS* a valid URL
|
# TODO fix URL validation - this *IS* a valid URL
|
||||||
# For now we just want to make sure that the error message is correct
|
# For now we just want to make sure that the error message is correct
|
||||||
with pytest.raises(ValidationError) as exc_info:
|
with pytest.raises(ValidationError) as exc_info:
|
||||||
link.validate()
|
link.validate()
|
||||||
assert (
|
assert (
|
||||||
str(exc_info.value)
|
unicode(exc_info.value)
|
||||||
== "ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])"
|
== u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])"
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_url_scheme_validation(self):
|
def test_url_scheme_validation(self):
|
||||||
"""Ensure that URLFields validate urls with specific schemes properly."""
|
"""Ensure that URLFields validate urls with specific schemes properly.
|
||||||
|
"""
|
||||||
|
|
||||||
class Link(Document):
|
class Link(Document):
|
||||||
url = URLField()
|
url = URLField()
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
@ -17,7 +18,8 @@ class TestUUIDField(MongoDBTestCase):
|
|||||||
assert get_as_pymongo(person) == {"_id": person.id, "api_key": str(uid)}
|
assert get_as_pymongo(person) == {"_id": person.id, "api_key": str(uid)}
|
||||||
|
|
||||||
def test_field_string(self):
|
def test_field_string(self):
|
||||||
"""Test UUID fields storing as String"""
|
"""Test UUID fields storing as String
|
||||||
|
"""
|
||||||
Person.drop_collection()
|
Person.drop_collection()
|
||||||
|
|
||||||
uu = uuid.uuid4()
|
uu = uuid.uuid4()
|
||||||
|
@ -53,7 +53,7 @@ signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest)
|
|||||||
signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest)
|
signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest)
|
||||||
|
|
||||||
|
|
||||||
class Mixin:
|
class Mixin(object):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
|
|
||||||
|
@ -148,7 +148,8 @@ class TestOnlyExcludeAll(unittest.TestCase):
|
|||||||
assert qs._loaded_fields.as_dict() == {"c": {"$slice": 2}, "a": 1}
|
assert qs._loaded_fields.as_dict() == {"c": {"$slice": 2}, "a": 1}
|
||||||
|
|
||||||
def test_only(self):
|
def test_only(self):
|
||||||
"""Ensure that QuerySet.only only returns the requested fields."""
|
"""Ensure that QuerySet.only only returns the requested fields.
|
||||||
|
"""
|
||||||
person = self.Person(name="test", age=25)
|
person = self.Person(name="test", age=25)
|
||||||
person.save()
|
person.save()
|
||||||
|
|
||||||
@ -364,7 +365,8 @@ class TestOnlyExcludeAll(unittest.TestCase):
|
|||||||
Email.drop_collection()
|
Email.drop_collection()
|
||||||
|
|
||||||
def test_slicing_fields(self):
|
def test_slicing_fields(self):
|
||||||
"""Ensure that query slicing an array works."""
|
"""Ensure that query slicing an array works.
|
||||||
|
"""
|
||||||
|
|
||||||
class Numbers(Document):
|
class Numbers(Document):
|
||||||
n = ListField(IntField())
|
n = ListField(IntField())
|
||||||
@ -399,7 +401,8 @@ class TestOnlyExcludeAll(unittest.TestCase):
|
|||||||
assert numbers.n == [-5, -4, -3, -2, -1]
|
assert numbers.n == [-5, -4, -3, -2, -1]
|
||||||
|
|
||||||
def test_slicing_nested_fields(self):
|
def test_slicing_nested_fields(self):
|
||||||
"""Ensure that query slicing an embedded array works."""
|
"""Ensure that query slicing an embedded array works.
|
||||||
|
"""
|
||||||
|
|
||||||
class EmbeddedNumber(EmbeddedDocument):
|
class EmbeddedNumber(EmbeddedDocument):
|
||||||
n = ListField(IntField())
|
n = ListField(IntField())
|
||||||
|
@ -2,6 +2,7 @@ import datetime
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -495,8 +496,8 @@ class TestGeoQueries(MongoDBTestCase):
|
|||||||
p.save()
|
p.save()
|
||||||
qs = Place.objects().only("location")
|
qs = Place.objects().only("location")
|
||||||
assert qs.as_pymongo()[0]["location"] == {
|
assert qs.as_pymongo()[0]["location"] == {
|
||||||
"type": "Point",
|
u"type": u"Point",
|
||||||
"coordinates": [24.946861267089844, 60.16311983618494],
|
u"coordinates": [24.946861267089844, 60.16311983618494],
|
||||||
}
|
}
|
||||||
|
|
||||||
def test_2dsphere_point_sets_correctly(self):
|
def test_2dsphere_point_sets_correctly(self):
|
||||||
|
@ -1,12 +1,6 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from mongoengine import (
|
from mongoengine import Document, IntField, ListField, StringField, connect
|
||||||
Document,
|
|
||||||
IntField,
|
|
||||||
ListField,
|
|
||||||
StringField,
|
|
||||||
connect,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Doc(Document):
|
class Doc(Document):
|
||||||
@ -19,7 +13,7 @@ class TestFindAndModify(unittest.TestCase):
|
|||||||
connect(db="mongoenginetest")
|
connect(db="mongoenginetest")
|
||||||
Doc.drop_collection()
|
Doc.drop_collection()
|
||||||
|
|
||||||
def _assert_db_equal(self, docs):
|
def assertDbEqual(self, docs):
|
||||||
assert list(Doc._collection.find().sort("id")) == docs
|
assert list(Doc._collection.find().sort("id")) == docs
|
||||||
|
|
||||||
def test_modify(self):
|
def test_modify(self):
|
||||||
@ -28,7 +22,7 @@ class TestFindAndModify(unittest.TestCase):
|
|||||||
|
|
||||||
old_doc = Doc.objects(id=1).modify(set__value=-1)
|
old_doc = Doc.objects(id=1).modify(set__value=-1)
|
||||||
assert old_doc.to_json() == doc.to_json()
|
assert old_doc.to_json() == doc.to_json()
|
||||||
self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
||||||
|
|
||||||
def test_modify_with_new(self):
|
def test_modify_with_new(self):
|
||||||
Doc(id=0, value=0).save()
|
Doc(id=0, value=0).save()
|
||||||
@ -37,18 +31,18 @@ class TestFindAndModify(unittest.TestCase):
|
|||||||
new_doc = Doc.objects(id=1).modify(set__value=-1, new=True)
|
new_doc = Doc.objects(id=1).modify(set__value=-1, new=True)
|
||||||
doc.value = -1
|
doc.value = -1
|
||||||
assert new_doc.to_json() == doc.to_json()
|
assert new_doc.to_json() == doc.to_json()
|
||||||
self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
||||||
|
|
||||||
def test_modify_not_existing(self):
|
def test_modify_not_existing(self):
|
||||||
Doc(id=0, value=0).save()
|
Doc(id=0, value=0).save()
|
||||||
assert Doc.objects(id=1).modify(set__value=-1) is None
|
assert Doc.objects(id=1).modify(set__value=-1) is None
|
||||||
self._assert_db_equal([{"_id": 0, "value": 0}])
|
self.assertDbEqual([{"_id": 0, "value": 0}])
|
||||||
|
|
||||||
def test_modify_with_upsert(self):
|
def test_modify_with_upsert(self):
|
||||||
Doc(id=0, value=0).save()
|
Doc(id=0, value=0).save()
|
||||||
old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True)
|
old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True)
|
||||||
assert old_doc is None
|
assert old_doc is None
|
||||||
self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
|
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
|
||||||
|
|
||||||
def test_modify_with_upsert_existing(self):
|
def test_modify_with_upsert_existing(self):
|
||||||
Doc(id=0, value=0).save()
|
Doc(id=0, value=0).save()
|
||||||
@ -56,13 +50,13 @@ class TestFindAndModify(unittest.TestCase):
|
|||||||
|
|
||||||
old_doc = Doc.objects(id=1).modify(set__value=-1, upsert=True)
|
old_doc = Doc.objects(id=1).modify(set__value=-1, upsert=True)
|
||||||
assert old_doc.to_json() == doc.to_json()
|
assert old_doc.to_json() == doc.to_json()
|
||||||
self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
||||||
|
|
||||||
def test_modify_with_upsert_with_new(self):
|
def test_modify_with_upsert_with_new(self):
|
||||||
Doc(id=0, value=0).save()
|
Doc(id=0, value=0).save()
|
||||||
new_doc = Doc.objects(id=1).modify(upsert=True, new=True, set__value=1)
|
new_doc = Doc.objects(id=1).modify(upsert=True, new=True, set__value=1)
|
||||||
assert new_doc.to_mongo() == {"_id": 1, "value": 1}
|
assert new_doc.to_mongo() == {"_id": 1, "value": 1}
|
||||||
self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
|
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
|
||||||
|
|
||||||
def test_modify_with_remove(self):
|
def test_modify_with_remove(self):
|
||||||
Doc(id=0, value=0).save()
|
Doc(id=0, value=0).save()
|
||||||
@ -70,12 +64,12 @@ class TestFindAndModify(unittest.TestCase):
|
|||||||
|
|
||||||
old_doc = Doc.objects(id=1).modify(remove=True)
|
old_doc = Doc.objects(id=1).modify(remove=True)
|
||||||
assert old_doc.to_json() == doc.to_json()
|
assert old_doc.to_json() == doc.to_json()
|
||||||
self._assert_db_equal([{"_id": 0, "value": 0}])
|
self.assertDbEqual([{"_id": 0, "value": 0}])
|
||||||
|
|
||||||
def test_find_and_modify_with_remove_not_existing(self):
|
def test_find_and_modify_with_remove_not_existing(self):
|
||||||
Doc(id=0, value=0).save()
|
Doc(id=0, value=0).save()
|
||||||
assert Doc.objects(id=1).modify(remove=True) is None
|
assert Doc.objects(id=1).modify(remove=True) is None
|
||||||
self._assert_db_equal([{"_id": 0, "value": 0}])
|
self.assertDbEqual([{"_id": 0, "value": 0}])
|
||||||
|
|
||||||
def test_modify_with_order_by(self):
|
def test_modify_with_order_by(self):
|
||||||
Doc(id=0, value=3).save()
|
Doc(id=0, value=3).save()
|
||||||
@ -85,7 +79,7 @@ class TestFindAndModify(unittest.TestCase):
|
|||||||
|
|
||||||
old_doc = Doc.objects().order_by("-id").modify(set__value=-1)
|
old_doc = Doc.objects().order_by("-id").modify(set__value=-1)
|
||||||
assert old_doc.to_json() == doc.to_json()
|
assert old_doc.to_json() == doc.to_json()
|
||||||
self._assert_db_equal(
|
self.assertDbEqual(
|
||||||
[
|
[
|
||||||
{"_id": 0, "value": 3},
|
{"_id": 0, "value": 3},
|
||||||
{"_id": 1, "value": 2},
|
{"_id": 1, "value": 2},
|
||||||
@ -100,7 +94,7 @@ class TestFindAndModify(unittest.TestCase):
|
|||||||
|
|
||||||
old_doc = Doc.objects(id=1).only("id").modify(set__value=-1)
|
old_doc = Doc.objects(id=1).only("id").modify(set__value=-1)
|
||||||
assert old_doc.to_mongo() == {"_id": 1}
|
assert old_doc.to_mongo() == {"_id": 1}
|
||||||
self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
||||||
|
|
||||||
def test_modify_with_push(self):
|
def test_modify_with_push(self):
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
import pickle
|
import pickle
|
||||||
|
import unittest
|
||||||
|
|
||||||
from mongoengine import Document, IntField, StringField
|
from mongoengine import Document, IntField, StringField
|
||||||
|
from mongoengine.connection import connect
|
||||||
from tests.utils import MongoDBTestCase
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -16,15 +18,18 @@ class TestQuerysetPickable(MongoDBTestCase):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super().setUp()
|
super(TestQuerysetPickable, self).setUp()
|
||||||
self.john = Person.objects.create(name="John", age=21)
|
self.john = Person.objects.create(name="John", age=21)
|
||||||
|
|
||||||
def test_picke_simple_qs(self):
|
def test_picke_simple_qs(self):
|
||||||
|
|
||||||
qs = Person.objects.all()
|
qs = Person.objects.all()
|
||||||
|
|
||||||
pickle.dumps(qs)
|
pickle.dumps(qs)
|
||||||
|
|
||||||
def _get_loaded(self, qs):
|
def _get_loaded(self, qs):
|
||||||
s = pickle.dumps(qs)
|
s = pickle.dumps(qs)
|
||||||
|
|
||||||
return pickle.loads(s)
|
return pickle.loads(s)
|
||||||
|
|
||||||
def test_unpickle(self):
|
def test_unpickle(self):
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,3 +1,5 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
@ -64,7 +66,7 @@ class TestQuerysetAggregate(MongoDBTestCase):
|
|||||||
|
|
||||||
pipeline = [{"$match": {"name": "Isabella Luanna"}}]
|
pipeline = [{"$match": {"name": "Isabella Luanna"}}]
|
||||||
data = Person.objects().aggregate(pipeline)
|
data = Person.objects().aggregate(pipeline)
|
||||||
assert list(data) == [{"_id": p1.pk, "age": 16, "name": "Isabella Luanna"}]
|
assert list(data) == [{u"_id": p1.pk, u"age": 16, u"name": u"Isabella Luanna"}]
|
||||||
|
|
||||||
def test_queryset_aggregation_with_skip(self):
|
def test_queryset_aggregation_with_skip(self):
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
@ -248,34 +250,6 @@ class TestQuerysetAggregate(MongoDBTestCase):
|
|||||||
|
|
||||||
assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}]
|
assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}]
|
||||||
|
|
||||||
def test_queryset_aggregation_geonear_aggregation_on_pointfield(self):
|
|
||||||
"""test ensures that $geonear can be used as a 1-stage pipeline and that
|
|
||||||
MongoEngine does not interfer with such pipeline (#2473)
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Aggr(Document):
|
|
||||||
name = StringField()
|
|
||||||
c = PointField()
|
|
||||||
|
|
||||||
Aggr.drop_collection()
|
|
||||||
|
|
||||||
agg1 = Aggr(name="X", c=[10.634584, 35.8245029]).save()
|
|
||||||
agg2 = Aggr(name="Y", c=[10.634584, 35.8245029]).save()
|
|
||||||
|
|
||||||
pipeline = [
|
|
||||||
{
|
|
||||||
"$geoNear": {
|
|
||||||
"near": {"type": "Point", "coordinates": [10.634584, 35.8245029]},
|
|
||||||
"distanceField": "c",
|
|
||||||
"spherical": True,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
assert list(Aggr.objects.aggregate(*pipeline)) == [
|
|
||||||
{"_id": agg1.id, "c": 0.0, "name": "X"},
|
|
||||||
{"_id": agg2.id, "c": 0.0, "name": "Y"},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import pytest
|
|
||||||
from bson.son import SON
|
from bson.son import SON
|
||||||
|
import pytest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.queryset import Q, transform
|
from mongoengine.queryset import Q, transform
|
||||||
@ -12,7 +12,8 @@ class TestTransform(unittest.TestCase):
|
|||||||
connect(db="mongoenginetest")
|
connect(db="mongoenginetest")
|
||||||
|
|
||||||
def test_transform_query(self):
|
def test_transform_query(self):
|
||||||
"""Ensure that the _transform_query function operates correctly."""
|
"""Ensure that the _transform_query function operates correctly.
|
||||||
|
"""
|
||||||
assert transform.query(name="test", age=30) == {"name": "test", "age": 30}
|
assert transform.query(name="test", age=30) == {"name": "test", "age": 30}
|
||||||
assert transform.query(age__lt=30) == {"age": {"$lt": 30}}
|
assert transform.query(age__lt=30) == {"age": {"$lt": 30}}
|
||||||
assert transform.query(age__gt=20, age__lt=50) == {
|
assert transform.query(age__gt=20, age__lt=50) == {
|
||||||
@ -23,12 +24,6 @@ class TestTransform(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
assert transform.query(friend__age__gte=30) == {"friend.age": {"$gte": 30}}
|
assert transform.query(friend__age__gte=30) == {"friend.age": {"$gte": 30}}
|
||||||
assert transform.query(name__exists=True) == {"name": {"$exists": True}}
|
assert transform.query(name__exists=True) == {"name": {"$exists": True}}
|
||||||
assert transform.query(name=["Mark"], __raw__={"name": {"$in": "Tom"}}) == {
|
|
||||||
"$and": [{"name": ["Mark"]}, {"name": {"$in": "Tom"}}]
|
|
||||||
}
|
|
||||||
assert transform.query(name__in=["Tom"], __raw__={"name": "Mark"}) == {
|
|
||||||
"$and": [{"name": {"$in": ["Tom"]}}, {"name": "Mark"}]
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_transform_update(self):
|
def test_transform_update(self):
|
||||||
class LisDoc(Document):
|
class LisDoc(Document):
|
||||||
@ -87,7 +82,8 @@ class TestTransform(unittest.TestCase):
|
|||||||
assert update == {"$set": {"tags": ["mongo", "db"]}}
|
assert update == {"$set": {"tags": ["mongo", "db"]}}
|
||||||
|
|
||||||
def test_query_field_name(self):
|
def test_query_field_name(self):
|
||||||
"""Ensure that the correct field name is used when querying."""
|
"""Ensure that the correct field name is used when querying.
|
||||||
|
"""
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
class Comment(EmbeddedDocument):
|
||||||
content = StringField(db_field="commentContent")
|
content = StringField(db_field="commentContent")
|
||||||
@ -104,17 +100,18 @@ class TestTransform(unittest.TestCase):
|
|||||||
post = BlogPost(**data)
|
post = BlogPost(**data)
|
||||||
post.save()
|
post.save()
|
||||||
|
|
||||||
qs = BlogPost.objects(title=data["title"])
|
assert "postTitle" in BlogPost.objects(title=data["title"])._query
|
||||||
assert qs._query == {"postTitle": data["title"]}
|
assert not ("title" in BlogPost.objects(title=data["title"])._query)
|
||||||
assert qs.count() == 1
|
assert BlogPost.objects(title=data["title"]).count() == 1
|
||||||
|
|
||||||
qs = BlogPost.objects(pk=post.id)
|
assert "_id" in BlogPost.objects(pk=post.id)._query
|
||||||
assert qs._query == {"_id": post.id}
|
assert BlogPost.objects(pk=post.id).count() == 1
|
||||||
assert qs.count() == 1
|
|
||||||
|
|
||||||
qs = BlogPost.objects(comments__content="test")
|
assert (
|
||||||
assert qs._query == {"postComments.commentContent": "test"}
|
"postComments.commentContent"
|
||||||
assert qs.count() == 1
|
in BlogPost.objects(comments__content="test")._query
|
||||||
|
)
|
||||||
|
assert BlogPost.objects(comments__content="test").count() == 1
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
@ -327,7 +324,7 @@ class TestTransform(unittest.TestCase):
|
|||||||
word = Word(word="abc", index=1)
|
word = Word(word="abc", index=1)
|
||||||
update = transform.update(MainDoc, pull__content__text=word)
|
update = transform.update(MainDoc, pull__content__text=word)
|
||||||
assert update == {
|
assert update == {
|
||||||
"$pull": {"content.text": SON([("word", "abc"), ("index", 1)])}
|
"$pull": {"content.text": SON([("word", u"abc"), ("index", 1)])}
|
||||||
}
|
}
|
||||||
|
|
||||||
update = transform.update(MainDoc, pull__content__heading="xyz")
|
update = transform.update(MainDoc, pull__content__heading="xyz")
|
||||||
@ -341,31 +338,6 @@ class TestTransform(unittest.TestCase):
|
|||||||
)
|
)
|
||||||
assert update == {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}}
|
assert update == {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}}
|
||||||
|
|
||||||
def test_transform_embedded_document_list_fields(self):
|
|
||||||
"""
|
|
||||||
Test added to check filtering
|
|
||||||
EmbeddedDocumentListField which is inside a EmbeddedDocumentField
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Drink(EmbeddedDocument):
|
|
||||||
id = StringField()
|
|
||||||
meta = {"strict": False}
|
|
||||||
|
|
||||||
class Shop(Document):
|
|
||||||
drinks = EmbeddedDocumentListField(Drink)
|
|
||||||
|
|
||||||
Shop.drop_collection()
|
|
||||||
drinks = [Drink(id="drink_1"), Drink(id="drink_2")]
|
|
||||||
Shop.objects.create(drinks=drinks)
|
|
||||||
q_obj = transform.query(
|
|
||||||
Shop, drinks__all=[{"$elemMatch": {"_id": x.id}} for x in drinks]
|
|
||||||
)
|
|
||||||
assert q_obj == {
|
|
||||||
"drinks": {"$all": [{"$elemMatch": {"_id": x.id}} for x in drinks]}
|
|
||||||
}
|
|
||||||
|
|
||||||
Shop.drop_collection()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -2,8 +2,8 @@ import datetime
|
|||||||
import re
|
import re
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import pytest
|
|
||||||
from bson import ObjectId
|
from bson import ObjectId
|
||||||
|
import pytest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.errors import InvalidQueryError
|
from mongoengine.errors import InvalidQueryError
|
||||||
@ -23,7 +23,8 @@ class TestQ(unittest.TestCase):
|
|||||||
self.Person = Person
|
self.Person = Person
|
||||||
|
|
||||||
def test_empty_q(self):
|
def test_empty_q(self):
|
||||||
"""Ensure that empty Q objects won't hurt."""
|
"""Ensure that empty Q objects won't hurt.
|
||||||
|
"""
|
||||||
q1 = Q()
|
q1 = Q()
|
||||||
q2 = Q(age__gte=18)
|
q2 = Q(age__gte=18)
|
||||||
q3 = Q()
|
q3 = Q()
|
||||||
@ -57,7 +58,8 @@ class TestQ(unittest.TestCase):
|
|||||||
assert Post.objects.filter(Q(created_user=user)).count() == 1
|
assert Post.objects.filter(Q(created_user=user)).count() == 1
|
||||||
|
|
||||||
def test_and_combination(self):
|
def test_and_combination(self):
|
||||||
"""Ensure that Q-objects correctly AND together."""
|
"""Ensure that Q-objects correctly AND together.
|
||||||
|
"""
|
||||||
|
|
||||||
class TestDoc(Document):
|
class TestDoc(Document):
|
||||||
x = IntField()
|
x = IntField()
|
||||||
@ -87,7 +89,8 @@ class TestQ(unittest.TestCase):
|
|||||||
assert query.to_query(TestDoc) == mongo_query
|
assert query.to_query(TestDoc) == mongo_query
|
||||||
|
|
||||||
def test_or_combination(self):
|
def test_or_combination(self):
|
||||||
"""Ensure that Q-objects correctly OR together."""
|
"""Ensure that Q-objects correctly OR together.
|
||||||
|
"""
|
||||||
|
|
||||||
class TestDoc(Document):
|
class TestDoc(Document):
|
||||||
x = IntField()
|
x = IntField()
|
||||||
@ -98,7 +101,8 @@ class TestQ(unittest.TestCase):
|
|||||||
assert query == {"$or": [{"x": {"$lt": 3}}, {"x": {"$gt": 7}}]}
|
assert query == {"$or": [{"x": {"$lt": 3}}, {"x": {"$gt": 7}}]}
|
||||||
|
|
||||||
def test_and_or_combination(self):
|
def test_and_or_combination(self):
|
||||||
"""Ensure that Q-objects handle ANDing ORed components."""
|
"""Ensure that Q-objects handle ANDing ORed components.
|
||||||
|
"""
|
||||||
|
|
||||||
class TestDoc(Document):
|
class TestDoc(Document):
|
||||||
x = IntField()
|
x = IntField()
|
||||||
@ -132,7 +136,8 @@ class TestQ(unittest.TestCase):
|
|||||||
assert 2 == TestDoc.objects(q1 & q2).count()
|
assert 2 == TestDoc.objects(q1 & q2).count()
|
||||||
|
|
||||||
def test_or_and_or_combination(self):
|
def test_or_and_or_combination(self):
|
||||||
"""Ensure that Q-objects handle ORing ANDed ORed components. :)"""
|
"""Ensure that Q-objects handle ORing ANDed ORed components. :)
|
||||||
|
"""
|
||||||
|
|
||||||
class TestDoc(Document):
|
class TestDoc(Document):
|
||||||
x = IntField()
|
x = IntField()
|
||||||
@ -203,7 +208,8 @@ class TestQ(unittest.TestCase):
|
|||||||
assert test.count() == 3
|
assert test.count() == 3
|
||||||
|
|
||||||
def test_q(self):
|
def test_q(self):
|
||||||
"""Ensure that Q objects may be used to query for documents."""
|
"""Ensure that Q objects may be used to query for documents.
|
||||||
|
"""
|
||||||
|
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
title = StringField()
|
title = StringField()
|
||||||
@ -280,7 +286,8 @@ class TestQ(unittest.TestCase):
|
|||||||
self.Person.objects.filter("user1")
|
self.Person.objects.filter("user1")
|
||||||
|
|
||||||
def test_q_regex(self):
|
def test_q_regex(self):
|
||||||
"""Ensure that Q objects can be queried using regexes."""
|
"""Ensure that Q objects can be queried using regexes.
|
||||||
|
"""
|
||||||
person = self.Person(name="Guido van Rossum")
|
person = self.Person(name="Guido van Rossum")
|
||||||
person.save()
|
person.save()
|
||||||
|
|
||||||
@ -313,7 +320,8 @@ class TestQ(unittest.TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_q_lists(self):
|
def test_q_lists(self):
|
||||||
"""Ensure that Q objects query ListFields correctly."""
|
"""Ensure that Q objects query ListFields correctly.
|
||||||
|
"""
|
||||||
|
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
tags = ListField(StringField())
|
tags = ListField(StringField())
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user