Compare commits
No commits in common. "master" and "v0.7rc1" have entirely different histories.
143
.github/workflows/github-actions.yml
vendored
143
.github/workflows/github-actions.yml
vendored
@ -1,143 +0,0 @@
|
|||||||
name: MongoengineCI
|
|
||||||
on:
|
|
||||||
# All PR
|
|
||||||
pull_request:
|
|
||||||
# master branch merge
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
# release tags
|
|
||||||
create:
|
|
||||||
tags:
|
|
||||||
- 'v[0-9]+\.[0-9]+\.[0-9]+*'
|
|
||||||
env:
|
|
||||||
MONGODB_3_6: 3.6.14
|
|
||||||
MONGODB_4_0: 4.0.23
|
|
||||||
MONGODB_4_2: 4.2
|
|
||||||
MONGODB_4_4: 4.4
|
|
||||||
|
|
||||||
PYMONGO_3_4: 3.4
|
|
||||||
PYMONGO_3_6: 3.6
|
|
||||||
PYMONGO_3_9: 3.9
|
|
||||||
PYMONGO_3_11: 3.11
|
|
||||||
|
|
||||||
MAIN_PYTHON_VERSION: 3.7
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
linting:
|
|
||||||
# Run pre-commit (https://pre-commit.com/)
|
|
||||||
# which runs pre-configured linter & autoformatter
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python 3.7
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
- run: bash .github/workflows/install_ci_python_dep.sh
|
|
||||||
- run: pre-commit run -a
|
|
||||||
|
|
||||||
test:
|
|
||||||
# Test suite run against recent python versions
|
|
||||||
# and against a few combination of MongoDB and pymongo
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
python-version: [3.6, 3.7, 3.8, 3.9, "3.10", pypy3]
|
|
||||||
MONGODB: [$MONGODB_4_0]
|
|
||||||
PYMONGO: [$PYMONGO_3_11]
|
|
||||||
include:
|
|
||||||
- python-version: 3.7
|
|
||||||
MONGODB: $MONGODB_3_6
|
|
||||||
PYMONGO: $PYMONGO_3_9
|
|
||||||
- python-version: 3.7
|
|
||||||
MONGODB: $MONGODB_4_2
|
|
||||||
PYMONGO: $PYMONGO_3_6
|
|
||||||
- python-version: 3.7
|
|
||||||
MONGODB: $MONGODB_4_4
|
|
||||||
PYMONGO: $PYMONGO_3_11
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: install mongo and ci dependencies
|
|
||||||
run: |
|
|
||||||
bash .github/workflows/install_mongo.sh ${{ matrix.MONGODB }}
|
|
||||||
bash .github/workflows/install_ci_python_dep.sh
|
|
||||||
bash .github/workflows/start_mongo.sh ${{ matrix.MONGODB }}
|
|
||||||
- name: tox dry-run (to pre-install venv)
|
|
||||||
run: tox -e $(echo py${{ matrix.python-version }}-mg${{ matrix.PYMONGO }} | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder"
|
|
||||||
- name: Run test suite
|
|
||||||
run: tox -e $(echo py${{ matrix.python-version }}-mg${{ matrix.PYMONGO }} | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine"
|
|
||||||
- name: Send coverage to Coveralls
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
COVERALLS_SERVICE_NAME: github
|
|
||||||
if: ${{ matrix.python-version == env.MAIN_PYTHON_VERSION }}
|
|
||||||
run: coveralls
|
|
||||||
|
|
||||||
build_doc_dryrun:
|
|
||||||
# ensures that readthedocs can be built continuously
|
|
||||||
# to avoid that it breaks when new releases are being created
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
- name: install python dep
|
|
||||||
run: |
|
|
||||||
pip install -e .
|
|
||||||
pip install -r docs/requirements.txt
|
|
||||||
- name: build doc
|
|
||||||
run: |
|
|
||||||
cd docs
|
|
||||||
make html-readthedocs
|
|
||||||
|
|
||||||
build-n-publish-dummy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [linting, test, build_doc_dryrun]
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@master
|
|
||||||
- name: Set up Python 3.7
|
|
||||||
uses: actions/setup-python@v1
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
- name: build dummy wheel for test-pypi
|
|
||||||
run: |
|
|
||||||
pip install wheel
|
|
||||||
python setup.py egg_info -b ".dev`date '+%Y%m%d%H%M%S'`" build sdist bdist_wheel
|
|
||||||
# - name: publish test-pypi
|
|
||||||
# # Although working and recommended, test-pypi has a limit
|
|
||||||
# # in the size of projects so it's better to avoid publishing
|
|
||||||
# # until there is a way to garbage collect these dummy releases
|
|
||||||
# uses: pypa/gh-action-pypi-publish@master
|
|
||||||
# with:
|
|
||||||
# password: ${{ secrets.test_pypi_token }}
|
|
||||||
# repository_url: https://test.pypi.org/legacy/
|
|
||||||
|
|
||||||
build-n-publish:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [linting, test, build_doc_dryrun, build-n-publish-dummy]
|
|
||||||
if: github.event_name == 'create' && startsWith(github.ref, 'refs/tags/v')
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@master
|
|
||||||
- name: Set up Python 3.7
|
|
||||||
uses: actions/setup-python@v1
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
# todo separate build from publish
|
|
||||||
# https://stackoverflow.com/questions/59349905/which-properties-does-github-event-in-a-github-workflow-have
|
|
||||||
- name: build dummy wheel for test-pypi
|
|
||||||
run: |
|
|
||||||
pip install wheel
|
|
||||||
python setup.py sdist bdist_wheel
|
|
||||||
- name: publish pypi
|
|
||||||
uses: pypa/gh-action-pypi-publish@master
|
|
||||||
with:
|
|
||||||
password: ${{ secrets.pypi_token }}
|
|
5
.github/workflows/install_ci_python_dep.sh
vendored
5
.github/workflows/install_ci_python_dep.sh
vendored
@ -1,5 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
pip install --upgrade pip
|
|
||||||
pip install coveralls
|
|
||||||
pip install pre-commit
|
|
||||||
pip install tox
|
|
18
.github/workflows/install_mongo.sh
vendored
18
.github/workflows/install_mongo.sh
vendored
@ -1,18 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
MONGODB=$1
|
|
||||||
|
|
||||||
# Mongo > 4.0 follows different name convention for download links
|
|
||||||
mongo_build=mongodb-linux-x86_64-${MONGODB}
|
|
||||||
|
|
||||||
if [[ "$MONGODB" == *"4.2"* ]]; then
|
|
||||||
mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest
|
|
||||||
elif [[ "$MONGODB" == *"4.4"* ]]; then
|
|
||||||
mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest
|
|
||||||
fi
|
|
||||||
|
|
||||||
wget http://fastdl.mongodb.org/linux/$mongo_build.tgz
|
|
||||||
tar xzf $mongo_build.tgz
|
|
||||||
|
|
||||||
mongodb_dir=$(find ${PWD}/ -type d -name "mongodb-linux-x86_64*")
|
|
||||||
$mongodb_dir/bin/mongod --version
|
|
9
.github/workflows/start_mongo.sh
vendored
9
.github/workflows/start_mongo.sh
vendored
@ -1,9 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
MONGODB=$1
|
|
||||||
|
|
||||||
mongodb_dir=$(find ${PWD}/ -type d -name "mongodb-linux-x86_64*")
|
|
||||||
|
|
||||||
mkdir $mongodb_dir/data
|
|
||||||
$mongodb_dir/bin/mongod --dbpath $mongodb_dir/data --logpath $mongodb_dir/mongodb.log --fork
|
|
||||||
mongo --eval 'db.version();' # Make sure mongo is awake
|
|
12
.gitignore
vendored
12
.gitignore
vendored
@ -1,15 +1,8 @@
|
|||||||
|
.*
|
||||||
!.gitignore
|
!.gitignore
|
||||||
*~
|
*~
|
||||||
*.py[co]
|
*.py[co]
|
||||||
.*.sw[po]
|
.*.sw[po]
|
||||||
.cache/
|
|
||||||
.coverage
|
|
||||||
.coveragerc
|
|
||||||
.env
|
|
||||||
.idea/
|
|
||||||
.pytest_cache/
|
|
||||||
.tox/
|
|
||||||
.eggs/
|
|
||||||
*.egg
|
*.egg
|
||||||
docs/.build
|
docs/.build
|
||||||
docs/_build
|
docs/_build
|
||||||
@ -20,6 +13,5 @@ env/
|
|||||||
.settings
|
.settings
|
||||||
.project
|
.project
|
||||||
.pydevproject
|
.pydevproject
|
||||||
|
tests/test_bugfix.py
|
||||||
htmlcov/
|
htmlcov/
|
||||||
venv
|
|
||||||
venv3
|
|
||||||
|
@ -1,26 +0,0 @@
|
|||||||
fail_fast: false
|
|
||||||
repos:
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
||||||
rev: v4.0.1
|
|
||||||
hooks:
|
|
||||||
- id: check-merge-conflict
|
|
||||||
- id: debug-statements
|
|
||||||
- id: trailing-whitespace
|
|
||||||
- id: end-of-file-fixer
|
|
||||||
- repo: https://github.com/ambv/black
|
|
||||||
rev: 21.5b2
|
|
||||||
hooks:
|
|
||||||
- id: black
|
|
||||||
- repo: https://gitlab.com/pycqa/flake8
|
|
||||||
rev: 3.9.2
|
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
|
||||||
rev: v2.19.1
|
|
||||||
hooks:
|
|
||||||
- id: pyupgrade
|
|
||||||
args: [--py36-plus]
|
|
||||||
- repo: https://github.com/pycqa/isort
|
|
||||||
rev: 5.8.0
|
|
||||||
hooks:
|
|
||||||
- id: isort
|
|
@ -1,20 +0,0 @@
|
|||||||
# .readthedocs.yml
|
|
||||||
# Read the Docs configuration file
|
|
||||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
|
||||||
|
|
||||||
# Required
|
|
||||||
version: 2
|
|
||||||
|
|
||||||
# Build documentation in the docs/ directory with Sphinx
|
|
||||||
sphinx:
|
|
||||||
configuration: docs/conf.py
|
|
||||||
|
|
||||||
# Optionally set the version of Python and requirements required to build your docs
|
|
||||||
python:
|
|
||||||
version: 3.7
|
|
||||||
install:
|
|
||||||
- requirements: docs/requirements.txt
|
|
||||||
# docs/conf.py is importing mongoengine
|
|
||||||
# so mongoengine needs to be installed as well
|
|
||||||
- method: setuptools
|
|
||||||
path: .
|
|
15
.travis.yml
Normal file
15
.travis.yml
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# http://travis-ci.org/#!/MongoEngine/mongoengine
|
||||||
|
language: python
|
||||||
|
python:
|
||||||
|
- 2.5
|
||||||
|
- 2.6
|
||||||
|
- 2.7
|
||||||
|
- 3.1
|
||||||
|
- 3.2
|
||||||
|
install:
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi
|
||||||
|
- python setup.py install
|
||||||
|
script:
|
||||||
|
- python setup.py test
|
155
AUTHORS
155
AUTHORS
@ -12,10 +12,12 @@ Laine Herron https://github.com/LaineHerron
|
|||||||
|
|
||||||
CONTRIBUTORS
|
CONTRIBUTORS
|
||||||
|
|
||||||
Derived from the git logs, inevitably incomplete but all of whom and others
|
Dervived from the git logs, inevitably incomplete but all of whom and others
|
||||||
have submitted patches, reported bugs and generally helped make MongoEngine
|
have submitted patches, reported bugs and generally helped make MongoEngine
|
||||||
that much better:
|
that much better:
|
||||||
|
|
||||||
|
* Harry Marr
|
||||||
|
* Ross Lawley
|
||||||
* blackbrrr
|
* blackbrrr
|
||||||
* Florian Schlachter
|
* Florian Schlachter
|
||||||
* Vincent Driessen
|
* Vincent Driessen
|
||||||
@ -23,7 +25,7 @@ that much better:
|
|||||||
* flosch
|
* flosch
|
||||||
* Deepak Thukral
|
* Deepak Thukral
|
||||||
* Colin Howe
|
* Colin Howe
|
||||||
* Wilson Júnior (https://github.com/wpjunior)
|
* Wilson Júnior
|
||||||
* Alistair Roche
|
* Alistair Roche
|
||||||
* Dan Crosta
|
* Dan Crosta
|
||||||
* Viktor Kerkez
|
* Viktor Kerkez
|
||||||
@ -75,7 +77,7 @@ that much better:
|
|||||||
* Adam Parrish
|
* Adam Parrish
|
||||||
* jpfarias
|
* jpfarias
|
||||||
* jonrscott
|
* jonrscott
|
||||||
* Alice Zoë Bevan-McGregor (https://github.com/amcgregor/)
|
* Alice Zoë Bevan-McGregor
|
||||||
* Stephen Young
|
* Stephen Young
|
||||||
* tkloc
|
* tkloc
|
||||||
* aid
|
* aid
|
||||||
@ -104,7 +106,7 @@ that much better:
|
|||||||
* Adam Reeve
|
* Adam Reeve
|
||||||
* Anthony Nemitz
|
* Anthony Nemitz
|
||||||
* deignacio
|
* deignacio
|
||||||
* Shaun Duncan
|
* shaunduncan
|
||||||
* Meir Kriheli
|
* Meir Kriheli
|
||||||
* Andrey Fedoseev
|
* Andrey Fedoseev
|
||||||
* aparajita
|
* aparajita
|
||||||
@ -119,147 +121,4 @@ that much better:
|
|||||||
* Anton Kolechkin
|
* Anton Kolechkin
|
||||||
* Sergey Nikitin
|
* Sergey Nikitin
|
||||||
* psychogenic
|
* psychogenic
|
||||||
* Stefan Wójcik (https://github.com/wojcikstefan)
|
* Stefan Wójcik
|
||||||
* dimonb
|
|
||||||
* Garry Polley
|
|
||||||
* James Slagle
|
|
||||||
* Adrian Scott
|
|
||||||
* Peter Teichman
|
|
||||||
* Jakub Kot
|
|
||||||
* Jorge Bastida
|
|
||||||
* Aleksandr Sorokoumov
|
|
||||||
* Yohan Graterol
|
|
||||||
* bool-dev
|
|
||||||
* Russ Weeks
|
|
||||||
* Paul Swartz
|
|
||||||
* Sundar Raman
|
|
||||||
* Benoit Louy
|
|
||||||
* Loic Raucy (https://github.com/lraucy)
|
|
||||||
* hellysmile
|
|
||||||
* Jaepil Jeong
|
|
||||||
* Daniil Sharou
|
|
||||||
* Pete Campton
|
|
||||||
* Martyn Smith
|
|
||||||
* Marcelo Anton
|
|
||||||
* Aleksey Porfirov (https://github.com/lexqt)
|
|
||||||
* Nicolas Trippar
|
|
||||||
* Manuel Hermann
|
|
||||||
* Gustavo Gawryszewski
|
|
||||||
* Max Countryman
|
|
||||||
* caitifbrito
|
|
||||||
* lcya86 刘春洋
|
|
||||||
* Martin Alderete (https://github.com/malderete)
|
|
||||||
* Nick Joyce
|
|
||||||
* Jared Forsyth
|
|
||||||
* Kenneth Falck
|
|
||||||
* Lukasz Balcerzak
|
|
||||||
* Nicolas Cortot
|
|
||||||
* Alex (https://github.com/kelsta)
|
|
||||||
* Jin Zhang
|
|
||||||
* Daniel Axtens
|
|
||||||
* Leo-Naeka
|
|
||||||
* Ryan Witt (https://github.com/ryanwitt)
|
|
||||||
* Jiequan (https://github.com/Jiequan)
|
|
||||||
* hensom (https://github.com/hensom)
|
|
||||||
* zhy0216 (https://github.com/zhy0216)
|
|
||||||
* istinspring (https://github.com/istinspring)
|
|
||||||
* Massimo Santini (https://github.com/mapio)
|
|
||||||
* Nigel McNie (https://github.com/nigelmcnie)
|
|
||||||
* ygbourhis (https://github.com/ygbourhis)
|
|
||||||
* Bob Dickinson (https://github.com/BobDickinson)
|
|
||||||
* Michael Bartnett (https://github.com/michaelbartnett)
|
|
||||||
* Alon Horev (https://github.com/alonho)
|
|
||||||
* Kelvin Hammond (https://github.com/kelvinhammond)
|
|
||||||
* Jatin Chopra (https://github.com/jatin)
|
|
||||||
* Paul Uithol (https://github.com/PaulUithol)
|
|
||||||
* Thom Knowles (https://github.com/fleat)
|
|
||||||
* Paul (https://github.com/squamous)
|
|
||||||
* Olivier Cortès (https://github.com/Karmak23)
|
|
||||||
* crazyzubr (https://github.com/crazyzubr)
|
|
||||||
* FrankSomething (https://github.com/FrankSomething)
|
|
||||||
* Alexandr Morozov (https://github.com/LK4D4)
|
|
||||||
* mishudark (https://github.com/mishudark)
|
|
||||||
* Joe Friedl (https://github.com/grampajoe)
|
|
||||||
* Daniel Ward (https://github.com/danielward)
|
|
||||||
* Aniket Deshpande (https://github.com/anicake)
|
|
||||||
* rfkrocktk (https://github.com/rfkrocktk)
|
|
||||||
* Gustavo Andrés Angulo (https://github.com/woakas)
|
|
||||||
* Dmytro Popovych (https://github.com/drudim)
|
|
||||||
* Tom (https://github.com/tomprimozic)
|
|
||||||
* j0hnsmith (https://github.com/j0hnsmith)
|
|
||||||
* Damien Churchill (https://github.com/damoxc)
|
|
||||||
* Jonathan Simon Prates (https://github.com/jonathansp)
|
|
||||||
* Thiago Papageorgiou (https://github.com/tmpapageorgiou)
|
|
||||||
* Omer Katz (https://github.com/thedrow)
|
|
||||||
* Falcon Dai (https://github.com/falcondai)
|
|
||||||
* Polyrabbit (https://github.com/polyrabbit)
|
|
||||||
* Sagiv Malihi (https://github.com/sagivmalihi)
|
|
||||||
* Dmitry Konishchev (https://github.com/KonishchevDmitry)
|
|
||||||
* Martyn Smith (https://github.com/martynsmith)
|
|
||||||
* Andrei Zbikowski (https://github.com/b1naryth1ef)
|
|
||||||
* Ronald van Rij (https://github.com/ronaldvanrij)
|
|
||||||
* François Schmidts (https://github.com/jaesivsm)
|
|
||||||
* Eric Plumb (https://github.com/professorplumb)
|
|
||||||
* Damien Churchill (https://github.com/damoxc)
|
|
||||||
* Aleksandr Sorokoumov (https://github.com/Gerrrr)
|
|
||||||
* Clay McClure (https://github.com/claymation)
|
|
||||||
* Bruno Rocha (https://github.com/rochacbruno)
|
|
||||||
* Norberto Leite (https://github.com/nleite)
|
|
||||||
* Bob Cribbs (https://github.com/bocribbz)
|
|
||||||
* Jay Shirley (https://github.com/jshirley)
|
|
||||||
* David Bordeynik (https://github.com/DavidBord)
|
|
||||||
* Axel Haustant (https://github.com/noirbizarre)
|
|
||||||
* David Czarnecki (https://github.com/czarneckid)
|
|
||||||
* Vyacheslav Murashkin (https://github.com/a4tunado)
|
|
||||||
* André Ericson https://github.com/aericson)
|
|
||||||
* Mikhail Moshnogorsky (https://github.com/mikhailmoshnogorsky)
|
|
||||||
* Diego Berrocal (https://github.com/cestdiego)
|
|
||||||
* Matthew Ellison (https://github.com/seglberg)
|
|
||||||
* Jimmy Shen (https://github.com/jimmyshen)
|
|
||||||
* J. Fernando Sánchez (https://github.com/balkian)
|
|
||||||
* Michael Chase (https://github.com/rxsegrxup)
|
|
||||||
* Eremeev Danil (https://github.com/elephanter)
|
|
||||||
* Catstyle Lee (https://github.com/Catstyle)
|
|
||||||
* Kiryl Yermakou (https://github.com/rma4ok)
|
|
||||||
* Matthieu Rigal (https://github.com/MRigal)
|
|
||||||
* Charanpal Dhanjal (https://github.com/charanpald)
|
|
||||||
* Emmanuel Leblond (https://github.com/touilleMan)
|
|
||||||
* Breeze.Kay (https://github.com/9nix00)
|
|
||||||
* Vicki Donchenko (https://github.com/kivistein)
|
|
||||||
* Emile Caron (https://github.com/emilecaron)
|
|
||||||
* Amit Lichtenberg (https://github.com/amitlicht)
|
|
||||||
* Gang Li (https://github.com/iici-gli)
|
|
||||||
* Lars Butler (https://github.com/larsbutler)
|
|
||||||
* George Macon (https://github.com/gmacon)
|
|
||||||
* Ashley Whetter (https://github.com/AWhetter)
|
|
||||||
* Paul-Armand Verhaegen (https://github.com/paularmand)
|
|
||||||
* Steven Rossiter (https://github.com/BeardedSteve)
|
|
||||||
* Luo Peng (https://github.com/RussellLuo)
|
|
||||||
* Bryan Bennett (https://github.com/bbenne10)
|
|
||||||
* Gilb's Gilb's (https://github.com/gilbsgilbs)
|
|
||||||
* Joshua Nedrud (https://github.com/Neurostack)
|
|
||||||
* Shu Shen (https://github.com/shushen)
|
|
||||||
* xiaost7 (https://github.com/xiaost7)
|
|
||||||
* Victor Varvaryuk
|
|
||||||
* Stanislav Kaledin (https://github.com/sallyruthstruik)
|
|
||||||
* Dmitry Yantsen (https://github.com/mrTable)
|
|
||||||
* Renjianxin (https://github.com/Davidrjx)
|
|
||||||
* Erdenezul Batmunkh (https://github.com/erdenezul)
|
|
||||||
* Andy Yankovsky (https://github.com/werat)
|
|
||||||
* Bastien Gérard (https://github.com/bagerard)
|
|
||||||
* Trevor Hall (https://github.com/tjhall13)
|
|
||||||
* Gleb Voropaev (https://github.com/buggyspace)
|
|
||||||
* Paulo Amaral (https://github.com/pauloAmaral)
|
|
||||||
* Gaurav Dadhania (https://github.com/GVRV)
|
|
||||||
* Yurii Andrieiev (https://github.com/yandrieiev)
|
|
||||||
* Filip Kucharczyk (https://github.com/Pacu2)
|
|
||||||
* Eric Timmons (https://github.com/daewok)
|
|
||||||
* Matthew Simpson (https://github.com/mcsimps2)
|
|
||||||
* Leonardo Domingues (https://github.com/leodmgs)
|
|
||||||
* Agustin Barto (https://github.com/abarto)
|
|
||||||
* Stankiewicz Mateusz (https://github.com/mas15)
|
|
||||||
* Felix Schultheiß (https://github.com/felix-smashdocs)
|
|
||||||
* Jan Stein (https://github.com/janste63)
|
|
||||||
* Timothé Perez (https://github.com/AchilleAsh)
|
|
||||||
* oleksandr-l5 (https://github.com/oleksandr-l5)
|
|
||||||
* Ido Shraga (https://github.com/idoshr)
|
|
||||||
|
109
CONTRIBUTING.rst
109
CONTRIBUTING.rst
@ -1,109 +0,0 @@
|
|||||||
Contributing to MongoEngine
|
|
||||||
===========================
|
|
||||||
|
|
||||||
MongoEngine has a large `community
|
|
||||||
<https://raw.github.com/MongoEngine/mongoengine/master/AUTHORS>`_ and
|
|
||||||
contributions are always encouraged. Contributions can be as simple as
|
|
||||||
minor tweaks to the documentation. Please read these guidelines before
|
|
||||||
sending a pull request.
|
|
||||||
|
|
||||||
Bugfixes and New Features
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
Before starting to write code, look for existing `tickets
|
|
||||||
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
|
|
||||||
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
|
|
||||||
issue or feature request. That way you avoid working on something
|
|
||||||
that might not be of interest or that has already been addressed. If in doubt
|
|
||||||
post to the `user group <http://groups.google.com/group/mongoengine-users>`
|
|
||||||
|
|
||||||
Supported Interpreters
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
MongoEngine supports CPython 3.5 and newer as well as Pypy3.
|
|
||||||
Language features not supported by all interpreters can not be used.
|
|
||||||
|
|
||||||
Python3 codebase
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
Since 0.20, the codebase is exclusively Python 3.
|
|
||||||
|
|
||||||
Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs.
|
|
||||||
Travis runs the tests against the main Python 3.x versions.
|
|
||||||
|
|
||||||
|
|
||||||
Style Guide
|
|
||||||
-----------
|
|
||||||
|
|
||||||
MongoEngine's codebase is auto-formatted with `black <https://github.com/python/black>`_, imports are ordered with `isort <https://pycqa.github.io/isort/>`_
|
|
||||||
and other tools like flake8 are also used. Those tools will run as part of the CI and will fail in case the code is not formatted properly.
|
|
||||||
|
|
||||||
To install all development tools, simply run the following commands:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ python -m pip install -r requirements-dev.txt
|
|
||||||
|
|
||||||
|
|
||||||
You can install `pre-commit <https://pre-commit.com/>`_ into your git hooks,
|
|
||||||
to automatically check and fix any formatting issue before creating a
|
|
||||||
git commit.
|
|
||||||
|
|
||||||
To enable ``pre-commit`` simply run:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ pre-commit install
|
|
||||||
|
|
||||||
See the ``.pre-commit-config.yaml`` configuration file for more information
|
|
||||||
on how it works.
|
|
||||||
|
|
||||||
pre-commit will now run upon every commit and will reject anything that doesn't comply.
|
|
||||||
|
|
||||||
You can also run all the checks with ``pre-commit run -a``, this is what is used in the CI.
|
|
||||||
|
|
||||||
Testing
|
|
||||||
-------
|
|
||||||
|
|
||||||
All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
|
|
||||||
and any pull requests are automatically tested. Any pull requests without
|
|
||||||
tests will take longer to be integrated and might be refused.
|
|
||||||
|
|
||||||
You may also submit a simple failing test as a pull request if you don't know
|
|
||||||
how to fix it, it will be easier for other people to work on it and it may get
|
|
||||||
fixed faster.
|
|
||||||
|
|
||||||
General Guidelines
|
|
||||||
------------------
|
|
||||||
|
|
||||||
- Avoid backward breaking changes if at all possible.
|
|
||||||
- If you *have* to introduce a breaking change, make it very clear in your
|
|
||||||
pull request's description. Also, describe how users of this package
|
|
||||||
should adapt to the breaking change in docs/upgrade.rst.
|
|
||||||
- Write inline documentation for new classes and methods.
|
|
||||||
- Write tests and make sure they pass (make sure you have a mongod
|
|
||||||
running on the default port, then execute ``python setup.py test``
|
|
||||||
from the cmd line to run the test suite).
|
|
||||||
- Ensure tests pass on all supported Python, PyMongo, and MongoDB versions.
|
|
||||||
You can test various Python and PyMongo versions locally by executing
|
|
||||||
``tox``. For different MongoDB versions, you can rely on our automated
|
|
||||||
Travis tests.
|
|
||||||
- Add enhancements or problematic bug fixes to docs/changelog.rst.
|
|
||||||
- Add yourself to AUTHORS :)
|
|
||||||
|
|
||||||
Documentation
|
|
||||||
-------------
|
|
||||||
|
|
||||||
To contribute to the `API documentation
|
|
||||||
<http://docs.mongoengine.org/en/latest/apireference.html>`_
|
|
||||||
just make your changes to the inline documentation of the appropriate
|
|
||||||
`source code <https://github.com/MongoEngine/mongoengine>`_ or `rst file
|
|
||||||
<https://github.com/MongoEngine/mongoengine/tree/master/docs>`_ in a
|
|
||||||
branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_.
|
|
||||||
You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_
|
|
||||||
button.
|
|
||||||
|
|
||||||
If you want to test your documentation changes locally, you need to install
|
|
||||||
the ``sphinx`` and ``sphinx_rtd_theme`` packages. Once these are installed,
|
|
||||||
go to the ``docs`` directory, run ``make html`` and inspect the updated docs
|
|
||||||
by running ``open _build/html/index.html``.
|
|
2
LICENSE
2
LICENSE
@ -1,4 +1,4 @@
|
|||||||
Copyright (c) 2009 See AUTHORS
|
Copyright (c) 2009-2012 See AUTHORS
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person
|
Permission is hereby granted, free of charge, to any person
|
||||||
obtaining a copy of this software and associated documentation
|
obtaining a copy of this software and associated documentation
|
||||||
|
123
README.rst
123
README.rst
@ -4,78 +4,39 @@ MongoEngine
|
|||||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||||
:Repository: https://github.com/MongoEngine/mongoengine
|
:Repository: https://github.com/MongoEngine/mongoengine
|
||||||
:Author: Harry Marr (http://github.com/hmarr)
|
:Author: Harry Marr (http://github.com/hmarr)
|
||||||
:Maintainer: Stefan Wójcik (http://github.com/wojcikstefan)
|
:Maintainer: Ross Lawley (http://github.com/rozza)
|
||||||
|
|
||||||
.. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master
|
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
|
||||||
:target: https://travis-ci.org/MongoEngine/mongoengine
|
:target: http://travis-ci.org/MongoEngine/mongoengine
|
||||||
|
|
||||||
.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master
|
|
||||||
:target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
|
||||||
:target: https://github.com/ambv/black
|
|
||||||
|
|
||||||
About
|
About
|
||||||
=====
|
=====
|
||||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||||
Documentation is available at https://mongoengine-odm.readthedocs.io - there
|
Documentation available at http://mongoengine-odm.rtfd.org - there is currently
|
||||||
is currently a `tutorial <https://mongoengine-odm.readthedocs.io/tutorial.html>`_,
|
a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
|
||||||
a `user guide <https://mongoengine-odm.readthedocs.io/guide/index.html>`_, and
|
<http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference
|
||||||
an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_.
|
<http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
|
||||||
|
|
||||||
Supported MongoDB Versions
|
|
||||||
==========================
|
|
||||||
MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions
|
|
||||||
should be supported as well, but aren't actively tested at the moment. Make
|
|
||||||
sure to open an issue or submit a pull request if you experience any problems
|
|
||||||
with MongoDB version > 4.0.
|
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||||
`pip <https://pip.pypa.io/>`_. You can then use ``python -m pip install -U mongoengine``.
|
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||||
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
||||||
and thus you can use ``easy_install -U mongoengine``. Another option is
|
setup.py install``.
|
||||||
`pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine``
|
|
||||||
to both create the virtual environment and install the package. Otherwise, you can
|
|
||||||
download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
|
|
||||||
run ``python setup.py install``.
|
|
||||||
|
|
||||||
The support for Python2 was dropped with MongoEngine 0.20.0
|
|
||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
============
|
============
|
||||||
All of the dependencies can easily be installed via `python -m pip <https://pip.pypa.io/>`_.
|
- pymongo 2.1.1+
|
||||||
At the very least, you'll need these two packages to use MongoEngine:
|
- sphinx (optional - for documentation generation)
|
||||||
|
|
||||||
- pymongo>=3.4
|
|
||||||
|
|
||||||
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
|
||||||
|
|
||||||
- dateutil>=2.1.0
|
|
||||||
|
|
||||||
If you need to use an ``ImageField`` or ``ImageGridFsProxy``:
|
|
||||||
|
|
||||||
- Pillow>=2.0.0
|
|
||||||
|
|
||||||
If you need to use signals:
|
|
||||||
|
|
||||||
- blinker>=1.3
|
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
========
|
========
|
||||||
Some simple examples of what MongoEngine code looks like:
|
Some simple examples of what MongoEngine code looks like::
|
||||||
|
|
||||||
.. code :: python
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
connect('mydb')
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
title = StringField(required=True, max_length=200)
|
title = StringField(required=True, max_length=200)
|
||||||
posted = DateTimeField(default=datetime.datetime.utcnow)
|
posted = DateTimeField(default=datetime.datetime.now)
|
||||||
tags = ListField(StringField(max_length=50))
|
tags = ListField(StringField(max_length=50))
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
class TextPost(BlogPost):
|
class TextPost(BlogPost):
|
||||||
content = StringField(required=True)
|
content = StringField(required=True)
|
||||||
@ -95,52 +56,36 @@ Some simple examples of what MongoEngine code looks like:
|
|||||||
|
|
||||||
# Iterate over all posts using the BlogPost superclass
|
# Iterate over all posts using the BlogPost superclass
|
||||||
>>> for post in BlogPost.objects:
|
>>> for post in BlogPost.objects:
|
||||||
... print('===', post.title, '===')
|
... print '===', post.title, '==='
|
||||||
... if isinstance(post, TextPost):
|
... if isinstance(post, TextPost):
|
||||||
... print(post.content)
|
... print post.content
|
||||||
... elif isinstance(post, LinkPost):
|
... elif isinstance(post, LinkPost):
|
||||||
... print('Link:', post.url)
|
... print 'Link:', post.url
|
||||||
|
... print
|
||||||
...
|
...
|
||||||
|
=== Using MongoEngine ===
|
||||||
|
See the tutorial
|
||||||
|
|
||||||
# Count all blog posts and its subtypes
|
=== MongoEngine Docs ===
|
||||||
>>> BlogPost.objects.count()
|
Link: hmarr.com/mongoengine
|
||||||
|
|
||||||
|
>>> len(BlogPost.objects)
|
||||||
2
|
2
|
||||||
>>> TextPost.objects.count()
|
>>> len(HtmlPost.objects)
|
||||||
1
|
1
|
||||||
>>> LinkPost.objects.count()
|
>>> len(LinkPost.objects)
|
||||||
1
|
1
|
||||||
|
|
||||||
# Count tagged posts
|
# Find tagged posts
|
||||||
>>> BlogPost.objects(tags='mongoengine').count()
|
>>> len(BlogPost.objects(tags='mongoengine'))
|
||||||
2
|
2
|
||||||
>>> BlogPost.objects(tags='mongodb').count()
|
>>> len(BlogPost.objects(tags='mongodb'))
|
||||||
1
|
1
|
||||||
|
|
||||||
Tests
|
Tests
|
||||||
=====
|
=====
|
||||||
To run the test suite, ensure you are running a local instance of MongoDB on
|
To run the test suite, ensure you are running a local instance of MongoDB on
|
||||||
the standard port and have ``pytest`` installed. Then, run ``python setup.py test``
|
the standard port, and run ``python setup.py test``.
|
||||||
or simply ``pytest``.
|
|
||||||
|
|
||||||
To run the test suite on every supported Python and PyMongo version, you can
|
|
||||||
use ``tox``. You'll need to make sure you have each supported Python version
|
|
||||||
installed in your environment and then:
|
|
||||||
|
|
||||||
.. code-block:: shell
|
|
||||||
|
|
||||||
# Install tox
|
|
||||||
$ python -m pip install tox
|
|
||||||
# Run the test suites
|
|
||||||
$ tox
|
|
||||||
|
|
||||||
If you wish to run a subset of tests, use the pytest convention:
|
|
||||||
|
|
||||||
.. code-block:: shell
|
|
||||||
|
|
||||||
# Run all the tests in a particular test file
|
|
||||||
$ pytest tests/fields/test_fields.py
|
|
||||||
# Run only particular test class in that file
|
|
||||||
$ pytest tests/fields/test_fields.py::TestField
|
|
||||||
|
|
||||||
Community
|
Community
|
||||||
=========
|
=========
|
||||||
@ -148,7 +93,11 @@ Community
|
|||||||
<http://groups.google.com/group/mongoengine-users>`_
|
<http://groups.google.com/group/mongoengine-users>`_
|
||||||
- `MongoEngine Developers mailing list
|
- `MongoEngine Developers mailing list
|
||||||
<http://groups.google.com/group/mongoengine-dev>`_
|
<http://groups.google.com/group/mongoengine-dev>`_
|
||||||
|
- `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_
|
||||||
|
|
||||||
Contributing
|
Contributing
|
||||||
============
|
============
|
||||||
We welcome contributions! See the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_
|
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
|
||||||
|
contribute to the project, fork it on GitHub and send a pull request, all
|
||||||
|
contributions and suggestions are welcome!
|
||||||
|
|
||||||
|
199
benchmark.py
Normal file
199
benchmark.py
Normal file
@ -0,0 +1,199 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import timeit
|
||||||
|
|
||||||
|
|
||||||
|
def cprofile_main():
|
||||||
|
from pymongo import Connection
|
||||||
|
connection = Connection()
|
||||||
|
connection.drop_database('timeit_test')
|
||||||
|
connection.disconnect()
|
||||||
|
|
||||||
|
from mongoengine import Document, DictField, connect
|
||||||
|
connect("timeit_test")
|
||||||
|
|
||||||
|
class Noddy(Document):
|
||||||
|
fields = DictField()
|
||||||
|
|
||||||
|
for i in xrange(1):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key" + str(j)] = "value " + str(j)
|
||||||
|
noddy.save()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""
|
||||||
|
0.4 Performance Figures ...
|
||||||
|
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
3.86744189262
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
6.23374891281
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
|
5.33027005196
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
pass - No Cascade
|
||||||
|
|
||||||
|
0.5.X
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
3.89597702026
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
21.7735359669
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
|
19.8670389652
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
pass - No Cascade
|
||||||
|
|
||||||
|
0.6.X
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
3.81559205055
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
10.0446798801
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
|
9.51354718208
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
9.02567505836
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, force=True
|
||||||
|
8.44933390617
|
||||||
|
|
||||||
|
0.7.X
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
3.78801012039
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
9.73050498962
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
|
8.33456707001
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
8.37778115273
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, force=True
|
||||||
|
8.36906409264
|
||||||
|
"""
|
||||||
|
|
||||||
|
setup = """
|
||||||
|
from pymongo import Connection
|
||||||
|
connection = Connection()
|
||||||
|
connection.drop_database('timeit_test')
|
||||||
|
"""
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
from pymongo import Connection
|
||||||
|
connection = Connection()
|
||||||
|
|
||||||
|
db = connection.timeit_test
|
||||||
|
noddy = db.noddy
|
||||||
|
|
||||||
|
for i in xrange(10000):
|
||||||
|
example = {'fields': {}}
|
||||||
|
for j in range(20):
|
||||||
|
example['fields']["key"+str(j)] = "value "+str(j)
|
||||||
|
|
||||||
|
noddy.insert(example)
|
||||||
|
|
||||||
|
myNoddys = noddy.find()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - Pymongo"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
setup = """
|
||||||
|
from pymongo import Connection
|
||||||
|
connection = Connection()
|
||||||
|
connection.drop_database('timeit_test')
|
||||||
|
connection.disconnect()
|
||||||
|
|
||||||
|
from mongoengine import Document, DictField, connect
|
||||||
|
connect("timeit_test")
|
||||||
|
|
||||||
|
class Noddy(Document):
|
||||||
|
fields = DictField()
|
||||||
|
"""
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in xrange(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save()
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - MongoEngine"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in xrange(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(safe=False, validate=False)
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in xrange(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(safe=False, validate=False, cascade=False)
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in xrange(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(force_insert=True, safe=False, validate=False, cascade=False)
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - MongoEngine, force=True"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@ -1,142 +0,0 @@
|
|||||||
from timeit import repeat
|
|
||||||
|
|
||||||
import mongoengine
|
|
||||||
from mongoengine import (
|
|
||||||
BooleanField,
|
|
||||||
Document,
|
|
||||||
EmailField,
|
|
||||||
EmbeddedDocument,
|
|
||||||
EmbeddedDocumentField,
|
|
||||||
IntField,
|
|
||||||
ListField,
|
|
||||||
StringField,
|
|
||||||
)
|
|
||||||
|
|
||||||
mongoengine.connect(db="mongoengine_benchmark_test")
|
|
||||||
|
|
||||||
|
|
||||||
def timeit(f, n=10000):
|
|
||||||
return min(repeat(f, repeat=3, number=n)) / float(n)
|
|
||||||
|
|
||||||
|
|
||||||
def test_basic():
|
|
||||||
class Book(Document):
|
|
||||||
name = StringField()
|
|
||||||
pages = IntField()
|
|
||||||
tags = ListField(StringField())
|
|
||||||
is_published = BooleanField()
|
|
||||||
author_email = EmailField()
|
|
||||||
|
|
||||||
Book.drop_collection()
|
|
||||||
|
|
||||||
def init_book():
|
|
||||||
return Book(
|
|
||||||
name="Always be closing",
|
|
||||||
pages=100,
|
|
||||||
tags=["self-help", "sales"],
|
|
||||||
is_published=True,
|
|
||||||
author_email="alec@example.com",
|
|
||||||
)
|
|
||||||
|
|
||||||
print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6))
|
|
||||||
|
|
||||||
b = init_book()
|
|
||||||
print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6))
|
|
||||||
|
|
||||||
print(
|
|
||||||
"Doc setattr: %.3fus"
|
|
||||||
% (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6) # noqa B010
|
|
||||||
)
|
|
||||||
|
|
||||||
print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6))
|
|
||||||
|
|
||||||
print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6))
|
|
||||||
|
|
||||||
def save_book():
|
|
||||||
b._mark_as_changed("name")
|
|
||||||
b._mark_as_changed("tags")
|
|
||||||
b.save()
|
|
||||||
|
|
||||||
print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6))
|
|
||||||
|
|
||||||
son = b.to_mongo()
|
|
||||||
print(
|
|
||||||
"Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6)
|
|
||||||
)
|
|
||||||
|
|
||||||
print(
|
|
||||||
"Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6)
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_and_delete_book():
|
|
||||||
b = init_book()
|
|
||||||
b.save()
|
|
||||||
b.delete()
|
|
||||||
|
|
||||||
print(
|
|
||||||
"Init + save to database + delete: %.3fms"
|
|
||||||
% (timeit(create_and_delete_book, 10) * 10 ** 3)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_big_doc():
|
|
||||||
class Contact(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
title = StringField()
|
|
||||||
address = StringField()
|
|
||||||
|
|
||||||
class Company(Document):
|
|
||||||
name = StringField()
|
|
||||||
contacts = ListField(EmbeddedDocumentField(Contact))
|
|
||||||
|
|
||||||
Company.drop_collection()
|
|
||||||
|
|
||||||
def init_company():
|
|
||||||
return Company(
|
|
||||||
name="MongoDB, Inc.",
|
|
||||||
contacts=[
|
|
||||||
Contact(name="Contact %d" % x, title="CEO", address="Address %d" % x)
|
|
||||||
for x in range(1000)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
company = init_company()
|
|
||||||
print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3))
|
|
||||||
|
|
||||||
print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3))
|
|
||||||
|
|
||||||
company.save()
|
|
||||||
|
|
||||||
def save_company():
|
|
||||||
company._mark_as_changed("name")
|
|
||||||
company._mark_as_changed("contacts")
|
|
||||||
company.save()
|
|
||||||
|
|
||||||
print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3))
|
|
||||||
|
|
||||||
son = company.to_mongo()
|
|
||||||
print(
|
|
||||||
"Load from SON: %.3fms"
|
|
||||||
% (timeit(lambda: Company._from_son(son), 100) * 10 ** 3)
|
|
||||||
)
|
|
||||||
|
|
||||||
print(
|
|
||||||
"Load from database: %.3fms"
|
|
||||||
% (timeit(lambda: Company.objects[0], 100) * 10 ** 3)
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_and_delete_company():
|
|
||||||
c = init_company()
|
|
||||||
c.save()
|
|
||||||
c.delete()
|
|
||||||
|
|
||||||
print(
|
|
||||||
"Init + save to database + delete: %.3fms"
|
|
||||||
% (timeit(create_and_delete_company, 10) * 10 ** 3)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
test_basic()
|
|
||||||
print("-" * 100)
|
|
||||||
test_big_doc()
|
|
@ -1,161 +0,0 @@
|
|||||||
import timeit
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
setup = """
|
|
||||||
from pymongo import MongoClient
|
|
||||||
|
|
||||||
connection = MongoClient()
|
|
||||||
connection.drop_database('mongoengine_benchmark_test')
|
|
||||||
"""
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
from pymongo import MongoClient
|
|
||||||
|
|
||||||
connection = MongoClient()
|
|
||||||
|
|
||||||
db = connection.mongoengine_benchmark_test
|
|
||||||
noddy = db.noddy
|
|
||||||
|
|
||||||
for i in range(10000):
|
|
||||||
example = {'fields': {}}
|
|
||||||
for j in range(20):
|
|
||||||
example['fields']["key"+str(j)] = "value "+str(j)
|
|
||||||
|
|
||||||
noddy.insert_one(example)
|
|
||||||
|
|
||||||
myNoddys = noddy.find()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print("PyMongo: Creating 10000 dictionaries.")
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(f"{t.timeit(1)}s")
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
from pymongo import MongoClient, WriteConcern
|
|
||||||
connection = MongoClient()
|
|
||||||
|
|
||||||
db = connection.mongoengine_benchmark_test
|
|
||||||
noddy = db.noddy.with_options(write_concern=WriteConcern(w=0))
|
|
||||||
|
|
||||||
for i in range(10000):
|
|
||||||
example = {'fields': {}}
|
|
||||||
for j in range(20):
|
|
||||||
example['fields']["key"+str(j)] = "value "+str(j)
|
|
||||||
|
|
||||||
noddy.insert_one(example)
|
|
||||||
|
|
||||||
myNoddys = noddy.find()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(f"{t.timeit(1)}s")
|
|
||||||
|
|
||||||
setup = """
|
|
||||||
from pymongo import MongoClient
|
|
||||||
|
|
||||||
connection = MongoClient()
|
|
||||||
connection.drop_database('mongoengine_benchmark_test')
|
|
||||||
connection.close()
|
|
||||||
|
|
||||||
from mongoengine import Document, DictField, connect
|
|
||||||
connect("mongoengine_benchmark_test")
|
|
||||||
|
|
||||||
class Noddy(Document):
|
|
||||||
fields = DictField()
|
|
||||||
"""
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in range(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save()
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print("MongoEngine: Creating 10000 dictionaries.")
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(f"{t.timeit(1)}s")
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in range(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
fields = {}
|
|
||||||
for j in range(20):
|
|
||||||
fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.fields = fields
|
|
||||||
noddy.save()
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).")
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(f"{t.timeit(1)}s")
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in range(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(write_concern={"w": 0})
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(f"{t.timeit(1)}s")
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in range(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(write_concern={"w": 0}, validate=False)
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print(
|
|
||||||
'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).'
|
|
||||||
)
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(f"{t.timeit(1)}s")
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in range(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print(
|
|
||||||
'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).'
|
|
||||||
)
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(f"{t.timeit(1)}s")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -33,14 +33,8 @@ clean:
|
|||||||
html:
|
html:
|
||||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. Check $(BUILDDIR)/html/index.html"
|
|
||||||
|
|
||||||
html-readthedocs:
|
|
||||||
$(SPHINXBUILD) -T -E -b readthedocs $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
|
|
||||||
|
|
||||||
dirhtml:
|
dirhtml:
|
||||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||||
@echo
|
@echo
|
||||||
|
229
docs/_themes/nature/static/nature.css_t
vendored
Normal file
229
docs/_themes/nature/static/nature.css_t
vendored
Normal file
@ -0,0 +1,229 @@
|
|||||||
|
/**
|
||||||
|
* Sphinx stylesheet -- default theme
|
||||||
|
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
*/
|
||||||
|
|
||||||
|
@import url("basic.css");
|
||||||
|
|
||||||
|
/* -- page layout ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: Arial, sans-serif;
|
||||||
|
font-size: 100%;
|
||||||
|
background-color: #111;
|
||||||
|
color: #555;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.documentwrapper {
|
||||||
|
float: left;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper {
|
||||||
|
margin: 0 0 0 230px;
|
||||||
|
}
|
||||||
|
|
||||||
|
hr{
|
||||||
|
border: 1px solid #B1B4B6;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document {
|
||||||
|
background-color: #eee;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body {
|
||||||
|
background-color: #ffffff;
|
||||||
|
color: #3E4349;
|
||||||
|
padding: 0 30px 30px 30px;
|
||||||
|
font-size: 0.8em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer {
|
||||||
|
color: #555;
|
||||||
|
width: 100%;
|
||||||
|
padding: 13px 0;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 75%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer a {
|
||||||
|
color: #444;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related {
|
||||||
|
background-color: #6BA81E;
|
||||||
|
line-height: 32px;
|
||||||
|
color: #fff;
|
||||||
|
text-shadow: 0px 1px 0 #444;
|
||||||
|
font-size: 0.80em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related a {
|
||||||
|
color: #E2F3CC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
font-size: 0.75em;
|
||||||
|
line-height: 1.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper{
|
||||||
|
padding: 20px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3,
|
||||||
|
div.sphinxsidebar h4 {
|
||||||
|
font-family: Arial, sans-serif;
|
||||||
|
color: #222;
|
||||||
|
font-size: 1.2em;
|
||||||
|
font-weight: normal;
|
||||||
|
margin: 0;
|
||||||
|
padding: 5px 10px;
|
||||||
|
background-color: #ddd;
|
||||||
|
text-shadow: 1px 1px 0 white
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h4{
|
||||||
|
font-size: 1.1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3 a {
|
||||||
|
color: #444;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
div.sphinxsidebar p {
|
||||||
|
color: #888;
|
||||||
|
padding: 5px 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar p.topless {
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul {
|
||||||
|
margin: 10px 20px;
|
||||||
|
padding: 0;
|
||||||
|
color: #000;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar a {
|
||||||
|
color: #444;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar input {
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar input[type=text]{
|
||||||
|
margin-left: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- body styles ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: #005B81;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
color: #E32E00;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1,
|
||||||
|
div.body h2,
|
||||||
|
div.body h3,
|
||||||
|
div.body h4,
|
||||||
|
div.body h5,
|
||||||
|
div.body h6 {
|
||||||
|
font-family: Arial, sans-serif;
|
||||||
|
background-color: #BED4EB;
|
||||||
|
font-weight: normal;
|
||||||
|
color: #212224;
|
||||||
|
margin: 30px 0px 10px 0px;
|
||||||
|
padding: 5px 0 5px 10px;
|
||||||
|
text-shadow: 0px 1px 0 white
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; }
|
||||||
|
div.body h2 { font-size: 150%; background-color: #C8D5E3; }
|
||||||
|
div.body h3 { font-size: 120%; background-color: #D8DEE3; }
|
||||||
|
div.body h4 { font-size: 110%; background-color: #D8DEE3; }
|
||||||
|
div.body h5 { font-size: 100%; background-color: #D8DEE3; }
|
||||||
|
div.body h6 { font-size: 100%; background-color: #D8DEE3; }
|
||||||
|
|
||||||
|
a.headerlink {
|
||||||
|
color: #c60f0f;
|
||||||
|
font-size: 0.8em;
|
||||||
|
padding: 0 4px 0 4px;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.headerlink:hover {
|
||||||
|
background-color: #c60f0f;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p, div.body dd, div.body li {
|
||||||
|
line-height: 1.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p.admonition-title + p {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.highlight{
|
||||||
|
background-color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.note {
|
||||||
|
background-color: #eee;
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.seealso {
|
||||||
|
background-color: #ffc;
|
||||||
|
border: 1px solid #ff6;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.topic {
|
||||||
|
background-color: #eee;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.warning {
|
||||||
|
background-color: #ffe4e4;
|
||||||
|
border: 1px solid #f66;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title:after {
|
||||||
|
content: ":";
|
||||||
|
}
|
||||||
|
|
||||||
|
pre {
|
||||||
|
padding: 10px;
|
||||||
|
background-color: White;
|
||||||
|
color: #222;
|
||||||
|
line-height: 1.2em;
|
||||||
|
border: 1px solid #C6C9CB;
|
||||||
|
font-size: 1.2em;
|
||||||
|
margin: 1.5em 0 1.5em 0;
|
||||||
|
-webkit-box-shadow: 1px 1px 1px #d8d8d8;
|
||||||
|
-moz-box-shadow: 1px 1px 1px #d8d8d8;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt {
|
||||||
|
background-color: #ecf0f3;
|
||||||
|
color: #222;
|
||||||
|
padding: 1px 2px;
|
||||||
|
font-size: 1.2em;
|
||||||
|
font-family: monospace;
|
||||||
|
}
|
54
docs/_themes/nature/static/pygments.css
vendored
Normal file
54
docs/_themes/nature/static/pygments.css
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
.c { color: #999988; font-style: italic } /* Comment */
|
||||||
|
.k { font-weight: bold } /* Keyword */
|
||||||
|
.o { font-weight: bold } /* Operator */
|
||||||
|
.cm { color: #999988; font-style: italic } /* Comment.Multiline */
|
||||||
|
.cp { color: #999999; font-weight: bold } /* Comment.preproc */
|
||||||
|
.c1 { color: #999988; font-style: italic } /* Comment.Single */
|
||||||
|
.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
|
||||||
|
.ge { font-style: italic } /* Generic.Emph */
|
||||||
|
.gr { color: #aa0000 } /* Generic.Error */
|
||||||
|
.gh { color: #999999 } /* Generic.Heading */
|
||||||
|
.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
|
||||||
|
.go { color: #111 } /* Generic.Output */
|
||||||
|
.gp { color: #555555 } /* Generic.Prompt */
|
||||||
|
.gs { font-weight: bold } /* Generic.Strong */
|
||||||
|
.gu { color: #aaaaaa } /* Generic.Subheading */
|
||||||
|
.gt { color: #aa0000 } /* Generic.Traceback */
|
||||||
|
.kc { font-weight: bold } /* Keyword.Constant */
|
||||||
|
.kd { font-weight: bold } /* Keyword.Declaration */
|
||||||
|
.kp { font-weight: bold } /* Keyword.Pseudo */
|
||||||
|
.kr { font-weight: bold } /* Keyword.Reserved */
|
||||||
|
.kt { color: #445588; font-weight: bold } /* Keyword.Type */
|
||||||
|
.m { color: #009999 } /* Literal.Number */
|
||||||
|
.s { color: #bb8844 } /* Literal.String */
|
||||||
|
.na { color: #008080 } /* Name.Attribute */
|
||||||
|
.nb { color: #999999 } /* Name.Builtin */
|
||||||
|
.nc { color: #445588; font-weight: bold } /* Name.Class */
|
||||||
|
.no { color: #ff99ff } /* Name.Constant */
|
||||||
|
.ni { color: #800080 } /* Name.Entity */
|
||||||
|
.ne { color: #990000; font-weight: bold } /* Name.Exception */
|
||||||
|
.nf { color: #990000; font-weight: bold } /* Name.Function */
|
||||||
|
.nn { color: #555555 } /* Name.Namespace */
|
||||||
|
.nt { color: #000080 } /* Name.Tag */
|
||||||
|
.nv { color: purple } /* Name.Variable */
|
||||||
|
.ow { font-weight: bold } /* Operator.Word */
|
||||||
|
.mf { color: #009999 } /* Literal.Number.Float */
|
||||||
|
.mh { color: #009999 } /* Literal.Number.Hex */
|
||||||
|
.mi { color: #009999 } /* Literal.Number.Integer */
|
||||||
|
.mo { color: #009999 } /* Literal.Number.Oct */
|
||||||
|
.sb { color: #bb8844 } /* Literal.String.Backtick */
|
||||||
|
.sc { color: #bb8844 } /* Literal.String.Char */
|
||||||
|
.sd { color: #bb8844 } /* Literal.String.Doc */
|
||||||
|
.s2 { color: #bb8844 } /* Literal.String.Double */
|
||||||
|
.se { color: #bb8844 } /* Literal.String.Escape */
|
||||||
|
.sh { color: #bb8844 } /* Literal.String.Heredoc */
|
||||||
|
.si { color: #bb8844 } /* Literal.String.Interpol */
|
||||||
|
.sx { color: #bb8844 } /* Literal.String.Other */
|
||||||
|
.sr { color: #808000 } /* Literal.String.Regex */
|
||||||
|
.s1 { color: #bb8844 } /* Literal.String.Single */
|
||||||
|
.ss { color: #bb8844 } /* Literal.String.Symbol */
|
||||||
|
.bp { color: #999999 } /* Name.Builtin.Pseudo */
|
||||||
|
.vc { color: #ff99ff } /* Name.Variable.Class */
|
||||||
|
.vg { color: #ff99ff } /* Name.Variable.Global */
|
||||||
|
.vi { color: #ff99ff } /* Name.Variable.Instance */
|
||||||
|
.il { color: #009999 } /* Literal.Number.Integer.Long */
|
4
docs/_themes/nature/theme.conf
vendored
Normal file
4
docs/_themes/nature/theme.conf
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
[theme]
|
||||||
|
inherit = basic
|
||||||
|
stylesheet = nature.css
|
||||||
|
pygments_style = tango
|
@ -13,7 +13,6 @@ Documents
|
|||||||
|
|
||||||
.. autoclass:: mongoengine.Document
|
.. autoclass:: mongoengine.Document
|
||||||
:members:
|
:members:
|
||||||
:inherited-members:
|
|
||||||
|
|
||||||
.. attribute:: objects
|
.. attribute:: objects
|
||||||
|
|
||||||
@ -22,114 +21,54 @@ Documents
|
|||||||
|
|
||||||
.. autoclass:: mongoengine.EmbeddedDocument
|
.. autoclass:: mongoengine.EmbeddedDocument
|
||||||
:members:
|
:members:
|
||||||
:inherited-members:
|
|
||||||
|
|
||||||
.. autoclass:: mongoengine.DynamicDocument
|
.. autoclass:: mongoengine.DynamicDocument
|
||||||
:members:
|
:members:
|
||||||
:inherited-members:
|
|
||||||
|
|
||||||
.. autoclass:: mongoengine.DynamicEmbeddedDocument
|
.. autoclass:: mongoengine.DynamicEmbeddedDocument
|
||||||
:members:
|
:members:
|
||||||
:inherited-members:
|
|
||||||
|
|
||||||
.. autoclass:: mongoengine.document.MapReduceDocument
|
.. autoclass:: mongoengine.document.MapReduceDocument
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
.. autoclass:: mongoengine.ValidationError
|
.. autoclass:: mongoengine.ValidationError
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
.. autoclass:: mongoengine.FieldDoesNotExist
|
|
||||||
|
|
||||||
|
|
||||||
Context Managers
|
|
||||||
================
|
|
||||||
|
|
||||||
.. autoclass:: mongoengine.context_managers.switch_db
|
|
||||||
.. autoclass:: mongoengine.context_managers.switch_collection
|
|
||||||
.. autoclass:: mongoengine.context_managers.no_dereference
|
|
||||||
.. autoclass:: mongoengine.context_managers.query_counter
|
|
||||||
|
|
||||||
Querying
|
Querying
|
||||||
========
|
========
|
||||||
|
|
||||||
.. automodule:: mongoengine.queryset
|
.. autoclass:: mongoengine.queryset.QuerySet
|
||||||
:synopsis: Queryset level operations
|
:members:
|
||||||
|
|
||||||
.. autoclass:: mongoengine.queryset.QuerySet
|
.. automethod:: mongoengine.queryset.QuerySet.__call__
|
||||||
:members:
|
|
||||||
:inherited-members:
|
|
||||||
|
|
||||||
.. automethod:: QuerySet.__call__
|
.. autofunction:: mongoengine.queryset.queryset_manager
|
||||||
|
|
||||||
.. autoclass:: mongoengine.queryset.QuerySetNoCache
|
|
||||||
:members:
|
|
||||||
|
|
||||||
.. automethod:: mongoengine.queryset.QuerySetNoCache.__call__
|
|
||||||
|
|
||||||
.. autofunction:: mongoengine.queryset.queryset_manager
|
|
||||||
|
|
||||||
Fields
|
Fields
|
||||||
======
|
======
|
||||||
|
|
||||||
.. autoclass:: mongoengine.base.fields.BaseField
|
.. autoclass:: mongoengine.BinaryField
|
||||||
.. autoclass:: mongoengine.fields.StringField
|
.. autoclass:: mongoengine.BooleanField
|
||||||
.. autoclass:: mongoengine.fields.URLField
|
.. autoclass:: mongoengine.ComplexDateTimeField
|
||||||
.. autoclass:: mongoengine.fields.EmailField
|
.. autoclass:: mongoengine.DateTimeField
|
||||||
.. autoclass:: mongoengine.fields.EnumField
|
.. autoclass:: mongoengine.DecimalField
|
||||||
.. autoclass:: mongoengine.fields.IntField
|
.. autoclass:: mongoengine.DictField
|
||||||
.. autoclass:: mongoengine.fields.LongField
|
.. autoclass:: mongoengine.DynamicField
|
||||||
.. autoclass:: mongoengine.fields.FloatField
|
.. autoclass:: mongoengine.EmailField
|
||||||
.. autoclass:: mongoengine.fields.DecimalField
|
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||||
.. autoclass:: mongoengine.fields.BooleanField
|
.. autoclass:: mongoengine.FileField
|
||||||
.. autoclass:: mongoengine.fields.DateTimeField
|
.. autoclass:: mongoengine.FloatField
|
||||||
.. autoclass:: mongoengine.fields.ComplexDateTimeField
|
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
||||||
.. autoclass:: mongoengine.fields.EmbeddedDocumentField
|
.. autoclass:: mongoengine.GenericReferenceField
|
||||||
.. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField
|
.. autoclass:: mongoengine.GeoPointField
|
||||||
.. autoclass:: mongoengine.fields.DynamicField
|
.. autoclass:: mongoengine.ImageField
|
||||||
.. autoclass:: mongoengine.fields.ListField
|
.. autoclass:: mongoengine.IntField
|
||||||
.. autoclass:: mongoengine.fields.EmbeddedDocumentListField
|
.. autoclass:: mongoengine.ListField
|
||||||
.. autoclass:: mongoengine.fields.SortedListField
|
.. autoclass:: mongoengine.MapField
|
||||||
.. autoclass:: mongoengine.fields.DictField
|
.. autoclass:: mongoengine.ObjectIdField
|
||||||
.. autoclass:: mongoengine.fields.MapField
|
.. autoclass:: mongoengine.ReferenceField
|
||||||
.. autoclass:: mongoengine.fields.ReferenceField
|
.. autoclass:: mongoengine.SequenceField
|
||||||
.. autoclass:: mongoengine.fields.LazyReferenceField
|
.. autoclass:: mongoengine.SortedListField
|
||||||
.. autoclass:: mongoengine.fields.GenericReferenceField
|
.. autoclass:: mongoengine.StringField
|
||||||
.. autoclass:: mongoengine.fields.GenericLazyReferenceField
|
.. autoclass:: mongoengine.URLField
|
||||||
.. autoclass:: mongoengine.fields.CachedReferenceField
|
.. autoclass:: mongoengine.UUIDField
|
||||||
.. autoclass:: mongoengine.fields.BinaryField
|
|
||||||
.. autoclass:: mongoengine.fields.FileField
|
|
||||||
.. autoclass:: mongoengine.fields.ImageField
|
|
||||||
.. autoclass:: mongoengine.fields.SequenceField
|
|
||||||
.. autoclass:: mongoengine.fields.ObjectIdField
|
|
||||||
.. autoclass:: mongoengine.fields.UUIDField
|
|
||||||
.. autoclass:: mongoengine.fields.GeoPointField
|
|
||||||
.. autoclass:: mongoengine.fields.PointField
|
|
||||||
.. autoclass:: mongoengine.fields.LineStringField
|
|
||||||
.. autoclass:: mongoengine.fields.PolygonField
|
|
||||||
.. autoclass:: mongoengine.fields.MultiPointField
|
|
||||||
.. autoclass:: mongoengine.fields.MultiLineStringField
|
|
||||||
.. autoclass:: mongoengine.fields.MultiPolygonField
|
|
||||||
.. autoclass:: mongoengine.fields.GridFSError
|
|
||||||
.. autoclass:: mongoengine.fields.GridFSProxy
|
|
||||||
.. autoclass:: mongoengine.fields.ImageGridFsProxy
|
|
||||||
.. autoclass:: mongoengine.fields.ImproperlyConfigured
|
|
||||||
|
|
||||||
Embedded Document Querying
|
|
||||||
==========================
|
|
||||||
|
|
||||||
.. versionadded:: 0.9
|
|
||||||
|
|
||||||
Additional queries for Embedded Documents are available when using the
|
|
||||||
:class:`~mongoengine.EmbeddedDocumentListField` to store a list of embedded
|
|
||||||
documents.
|
|
||||||
|
|
||||||
A list of embedded documents is returned as a special list with the
|
|
||||||
following methods:
|
|
||||||
|
|
||||||
.. autoclass:: mongoengine.base.datastructures.EmbeddedDocumentList
|
|
||||||
:members:
|
|
||||||
|
|
||||||
Misc
|
|
||||||
====
|
|
||||||
|
|
||||||
.. autofunction:: mongoengine.common._import_class
|
|
||||||
|
@ -1,722 +1,40 @@
|
|||||||
|
|
||||||
|
|
||||||
=========
|
=========
|
||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
Development
|
Changes in 0.7.X
|
||||||
===========
|
|
||||||
- (Fill this out as you fix issues and develop your features).
|
|
||||||
- EnumField improvements: now `choices` limits the values of an enum to allow
|
|
||||||
- Fix deepcopy of EmbeddedDocument #2202
|
|
||||||
- Fix error when using precision=0 with DecimalField #2535
|
|
||||||
- Add support for regex and whole word text search query #2568
|
|
||||||
|
|
||||||
Changes in 0.23.1
|
|
||||||
===========
|
|
||||||
- Bug fix: ignore LazyReferenceFields when clearing _changed_fields #2484
|
|
||||||
- Improve connection doc #2481
|
|
||||||
|
|
||||||
Changes in 0.23.0
|
|
||||||
=================
|
=================
|
||||||
- Bugfix: manually setting SequenceField in DynamicDocument doesn't increment the counter #2471
|
- Added NotUniqueError for duplicate keys (MongoEngine/mongoengine#62)
|
||||||
- Add MongoDB 4.2 and 4.4 to CI
|
- Added custom collection / sequence naming for SequenceFields (MongoEngine/mongoengine#92)
|
||||||
- Add support for allowDiskUse on querysets #2468
|
- Fixed UnboundLocalError in composite index with pk field (MongoEngine/mongoengine#88)
|
||||||
|
|
||||||
Changes in 0.22.1
|
|
||||||
=================
|
|
||||||
- Declare that Py3.5 is not supported in package metadata #2449
|
|
||||||
- Moved CI from Travis to Github-Actions
|
|
||||||
|
|
||||||
Changes in 0.22.0
|
|
||||||
=================
|
|
||||||
- Fix LazyReferenceField dereferencing in embedded documents #2426
|
|
||||||
- Fix regarding the recent use of Cursor.__spec in .count() that was interfering with mongomock #2425
|
|
||||||
- Drop support for Python 3.5 by introducing f-strings in the codebase
|
|
||||||
|
|
||||||
Changes in 0.21.0
|
|
||||||
=================
|
|
||||||
- Bug fix in DynamicDocument which is not parsing known fields in constructor like Document do #2412
|
|
||||||
- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count
|
|
||||||
and Cursor.count that got deprecated in pymongo >= 3.7.
|
|
||||||
This should have a negative impact on performance of count see Issue #2219
|
|
||||||
- Fix a bug that made the queryset drop the read_preference after clone().
|
|
||||||
- Remove Py3.5 from CI as it reached EOL and add Python 3.9
|
|
||||||
- Fix some issues related with db_field/field conflict in constructor #2414
|
|
||||||
- BREAKING CHANGE: Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311
|
|
||||||
- Bug fix in ListField when updating the first item, it was saving the whole list, instead of
|
|
||||||
just replacing the first item (as usually done when updating 1 item of the list) #2392
|
|
||||||
- Add EnumField: ``mongoengine.fields.EnumField``
|
|
||||||
- Refactoring - Remove useless code related to Document.__only_fields and Queryset.only_fields
|
|
||||||
- Fix query transformation regarding special operators #2365
|
|
||||||
- Bug Fix: Document.save() fails when shard_key is not _id #2154
|
|
||||||
|
|
||||||
Changes in 0.20.0
|
|
||||||
=================
|
|
||||||
- ATTENTION: Drop support for Python2
|
|
||||||
- Add Mongo 4.0 to Travis
|
|
||||||
- Fix error when setting a string as a ComplexDateTimeField #2253
|
|
||||||
- Bump development Status classifier to Production/Stable #2232
|
|
||||||
- Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630
|
|
||||||
- Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264
|
|
||||||
- Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267
|
|
||||||
- DictField validate failed without default connection (bug introduced in 0.19.0) #2239
|
|
||||||
- Remove methods that were deprecated years ago:
|
|
||||||
- name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field
|
|
||||||
- Queryset.slave_okay() was deprecated since pymongo3
|
|
||||||
- dropDups was dropped with MongoDB3
|
|
||||||
- ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes``
|
|
||||||
- Added pre-commit for development/CI #2212
|
|
||||||
- Renamed requirements-lint.txt to requirements-dev.txt #2212
|
|
||||||
- Support for setting ReadConcern #2255
|
|
||||||
|
|
||||||
Changes in 0.19.1
|
|
||||||
=================
|
|
||||||
- Tests require Pillow < 7.0.0 as it dropped Python2 support
|
|
||||||
- DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of
|
|
||||||
pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079
|
|
||||||
|
|
||||||
Changes in 0.19.0
|
|
||||||
=================
|
|
||||||
- BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112
|
|
||||||
- Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``.
|
|
||||||
- Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``.
|
|
||||||
- This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``.
|
|
||||||
- BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113
|
|
||||||
- BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111
|
|
||||||
- If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it.
|
|
||||||
- BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103
|
|
||||||
- From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required.
|
|
||||||
- BREAKING CHANGE: A ``LazyReferenceField`` is now stored in the ``_data`` field of its parent as a ``DBRef``, ``Document``, or ``EmbeddedDocument`` (``ObjectId`` is no longer allowed). #2182
|
|
||||||
- DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210
|
|
||||||
- Added ability to check if Q or QNode are empty by parsing them to bool.
|
|
||||||
- Instead of ``Q(name="John").empty`` use ``not Q(name="John")``.
|
|
||||||
- Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125
|
|
||||||
- Only set no_cursor_timeout when requested (fixes an incompatibility with MongoDB 4.2) #2148
|
|
||||||
- ``ListField`` now accepts an optional ``max_length`` parameter. #2110
|
|
||||||
- Improve error message related to InvalidDocumentError #2180
|
|
||||||
- Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152
|
|
||||||
- Added ability to compare Q and Q operations #2204
|
|
||||||
- Added ability to use a db alias on query_counter #2194
|
|
||||||
- Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024
|
|
||||||
- Fix updates of a list field by negative index #2094
|
|
||||||
- Switch from nosetest to pytest as test runner #2114
|
|
||||||
- The codebase is now formatted using ``black``. #2109
|
|
||||||
- Documentation improvements:
|
|
||||||
- Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver.
|
|
||||||
|
|
||||||
Changes in 0.18.2
|
|
||||||
=================
|
|
||||||
- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097
|
|
||||||
- Various code clarity and documentation improvements.
|
|
||||||
|
|
||||||
Changes in 0.18.1
|
|
||||||
=================
|
|
||||||
- Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082
|
|
||||||
- Add Python 3.7 to Travis CI. #2058
|
|
||||||
|
|
||||||
Changes in 0.18.0
|
|
||||||
=================
|
|
||||||
- Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2.
|
|
||||||
- MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066
|
|
||||||
- Improve performance by avoiding a call to ``to_mongo`` in ``Document.save``. #2049
|
|
||||||
- Connection/disconnection improvements:
|
|
||||||
- Expose ``mongoengine.connection.disconnect`` and ``mongoengine.connection.disconnect_all``.
|
|
||||||
- Fix disconnecting. #566 #1599 #605 #607 #1213 #565
|
|
||||||
- Improve documentation of ``connect``/``disconnect``.
|
|
||||||
- Fix issue when using multiple connections to the same mongo with different credentials. #2047
|
|
||||||
- ``connect`` fails immediately when db name contains invalid characters. #2031 #1718
|
|
||||||
- Fix the default write concern of ``Document.save`` that was overwriting the connection write concern. #568
|
|
||||||
- Fix querying on ``List(EmbeddedDocument)`` subclasses fields. #1961 #1492
|
|
||||||
- Fix querying on ``(Generic)EmbeddedDocument`` subclasses fields. #475
|
|
||||||
- Fix ``QuerySet.aggregate`` so that it takes limit and skip value into account. #2029
|
|
||||||
- Generate unique indices for ``SortedListField`` and ``EmbeddedDocumentListFields``. #2020
|
|
||||||
- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e ``validation`` parameter of a ``Field``). It is now expected to raise a ``ValidationError`` instead of returning ``True``/``False``. #2050
|
|
||||||
- BREAKING CHANGES (associated with connection/disconnection fixes):
|
|
||||||
- Calling ``connect`` 2 times with the same alias and different parameter will raise an error (should call ``disconnect`` first).
|
|
||||||
- ``disconnect`` now clears ``mongoengine.connection._connection_settings``.
|
|
||||||
- ``disconnect`` now clears the cached attribute ``Document._collection``.
|
|
||||||
- BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552
|
|
||||||
|
|
||||||
Changes in 0.17.0
|
|
||||||
=================
|
|
||||||
- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976
|
|
||||||
- Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995
|
|
||||||
- DEPRECATION: ``EmbeddedDocument.save`` & ``.reload`` are marked as deprecated and will be removed in a next version of MongoEngine. #1552
|
|
||||||
- Fix ``QuerySet.only`` working improperly after using ``QuerySet.count`` of the same instance of a ``QuerySet``.
|
|
||||||
- Fix ``batch_size`` that was not copied when cloning a ``QuerySet`` object. #2011
|
|
||||||
- Fix ``InvalidStringData`` error when using ``modify`` on a ``BinaryField``. #1127
|
|
||||||
- Fix test suite and CI to support MongoDB v3.4. #1445
|
|
||||||
- Fix reference fields querying the database on each access if value contains orphan DBRefs.
|
|
||||||
|
|
||||||
Changes in 0.16.3
|
|
||||||
=================
|
|
||||||
- Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965
|
|
||||||
|
|
||||||
Changes in 0.16.2
|
|
||||||
=================
|
|
||||||
- Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958
|
|
||||||
|
|
||||||
Changes in 0.16.1
|
|
||||||
=================
|
|
||||||
- Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950
|
|
||||||
- Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733
|
|
||||||
- Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899
|
|
||||||
|
|
||||||
Changes in 0.16.0
|
|
||||||
=================
|
|
||||||
- POTENTIAL BREAKING CHANGES:
|
|
||||||
- ``EmbeddedDocumentField`` will no longer accept references to Document classes in its constructor. #1661
|
|
||||||
- Get rid of the ``basecls`` parameter from the ``DictField`` constructor (dead code). #1876
|
|
||||||
- Default value of the ``ComplexDateTime`` field is now ``None`` (and no longer the current datetime). #1368
|
|
||||||
- Fix an unhashable ``TypeError`` when referencing a ``Document`` with a compound key in an ``EmbeddedDocument``. #1685
|
|
||||||
- Fix a bug where an ``EmbeddedDocument`` with the same id as its parent would not be tracked for changes. #1768
|
|
||||||
- Fix the fact that a bulk ``QuerySet.insert`` was not setting primary keys of inserted document instances. #1919
|
|
||||||
- Fix a bug when referencing an abstract class in a ``ReferenceField``. #1920
|
|
||||||
- Allow modifications to the document made in ``pre_save_post_validation`` to be taken into account. #1202
|
|
||||||
- Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903
|
|
||||||
- Fix side effects of using ``QuerySet.no_dereference`` on other documents. #1677
|
|
||||||
- Fix ``TypeError`` when using lazy Django translation objects as translated choices. #1879
|
|
||||||
- Improve Python 2-3 codebase compatibility. #1889
|
|
||||||
- Fix support for changing the default value of the ``ComplexDateTime`` field. #1368
|
|
||||||
- Improve error message in case an ``EmbeddedDocumentListField`` receives an ``EmbeddedDocument`` instance instead of a list. #1877
|
|
||||||
- Fix the ``inc`` and ``dec`` operators for the ``DecimalField``. #1517 #1320
|
|
||||||
- Ignore ``killcursors`` queries in ``query_counter`` context manager. #1869
|
|
||||||
- Fix the fact that ``query_counter`` was modifying the initial profiling level in case it was != 0. #1870
|
|
||||||
- Repair the ``no_sub_classes`` context manager + fix the fact that it was swallowing exceptions. #1865
|
|
||||||
- Fix index creation error that was swallowed by ``hasattr`` under Python 2. #1688
|
|
||||||
- ``QuerySet.limit`` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611
|
|
||||||
- Bulk insert updates the IDs of the input documents instances. #1919
|
|
||||||
- Fix a harmless bug related to ``GenericReferenceField`` where modifications in the generic-referenced document were tracked in the parent. #1934
|
|
||||||
- Improve validation of the ``BinaryField``. #273
|
|
||||||
- Implement lazy regex compiling in Field classes to improve ``import mongoengine`` performance. #1806
|
|
||||||
- Update ``GridFSProxy.__str__`` so that it would always print both the filename and grid_id. #710
|
|
||||||
- Add ``__repr__`` to ``Q`` and ``QCombination`` classes. #1843
|
|
||||||
- Fix bug in the ``BaseList.__iter__`` operator (was occuring when modifying a BaseList while iterating over it). #1676
|
|
||||||
- Add a ``DateField``. #513
|
|
||||||
- Various improvements to the documentation.
|
|
||||||
- Various code quality improvements.
|
|
||||||
|
|
||||||
Changes in 0.15.3
|
|
||||||
=================
|
|
||||||
- ``Queryset.update/update_one`` methods now return an ``UpdateResult`` when ``full_result=True`` is provided and no longer a dict. #1491
|
|
||||||
- Improve ``LazyReferenceField`` and ``GenericLazyReferenceField`` with nested fields. #1704
|
|
||||||
- Fix the subfield resolve error in ``generic_emdedded_document`` query. #1651 #1652
|
|
||||||
- Use each modifier only with ``$position``. #1673 #1675
|
|
||||||
- Fix validation errors in the ``GenericEmbeddedDocumentField``. #1067
|
|
||||||
- Update cached fields when a ``fields`` argument is given. #1712
|
|
||||||
- Add a ``db`` parameter to ``register_connection`` for compatibility with ``connect``.
|
|
||||||
- Use PyMongo v3.x's ``insert_one`` and ``insert_many`` in ``Document.insert``. #1491
|
|
||||||
- Use PyMongo v3.x's ``update_one`` and ``update_many`` in ``Document.update`` and ``QuerySet.update``. #1491
|
|
||||||
- Fix how ``reload(fields)`` affects changed fields. #1371
|
|
||||||
- Fix a bug where the read-only access to the database fails when trying to create indexes. #1338
|
|
||||||
|
|
||||||
Changes in 0.15.0
|
|
||||||
=================
|
|
||||||
- Add ``LazyReferenceField`` and ``GenericLazyReferenceField``. #1230
|
|
||||||
|
|
||||||
Changes in 0.14.1
|
|
||||||
=================
|
|
||||||
- Remove ``SemiStrictDict`` and start using a regular dict for ``BaseDocument._data``. #1630
|
|
||||||
- Add support for the ``$position`` param in the ``$push`` operator. #1566
|
|
||||||
- Fix ``DateTimeField`` interpreting an empty string as today. #1533
|
|
||||||
- Add a missing ``__ne__`` method to the ``GridFSProxy`` class. #1632
|
|
||||||
- Fix ``BaseQuerySet._fields_to_db_fields``. #1553
|
|
||||||
|
|
||||||
Changes in 0.14.0
|
|
||||||
=================
|
|
||||||
- BREAKING CHANGE: Remove the ``coerce_types`` param from ``QuerySet.as_pymongo``. #1549
|
|
||||||
- POTENTIAL BREAKING CHANGE: Make ``EmbeddedDocument`` not hashable by default. #1528
|
|
||||||
- Improve code quality. #1531, #1540, #1541, #1547
|
|
||||||
|
|
||||||
Changes in 0.13.0
|
|
||||||
=================
|
|
||||||
- POTENTIAL BREAKING CHANGE: Added Unicode support to the ``EmailField``, see docs/upgrade.rst for details.
|
|
||||||
|
|
||||||
Changes in 0.12.0
|
|
||||||
=================
|
|
||||||
- POTENTIAL BREAKING CHANGE: Fix ``limit``/``skip``/``hint``/``batch_size`` chaining. #1476
|
|
||||||
- POTENTIAL BREAKING CHANGE: Change a public ``QuerySet.clone_into`` method to a private ``QuerySet._clone_into``. #1476
|
|
||||||
- Fix the way ``Document.objects.create`` works with duplicate IDs. #1485
|
|
||||||
- Fix connecting to a replica set with PyMongo 2.x. #1436
|
|
||||||
- Fix using sets in field choices. #1481
|
|
||||||
- Fix deleting items from a ``ListField``. #1318
|
|
||||||
- Fix an obscure error message when filtering by ``field__in=non_iterable``. #1237
|
|
||||||
- Fix behavior of a ``dec`` update operator. #1450
|
|
||||||
- Add a ``rename`` update operator. #1454
|
|
||||||
- Add validation for the ``db_field`` parameter. #1448
|
|
||||||
- Fix the error message displayed when querying an ``EmbeddedDocumentField`` by an invalid value. #1440
|
|
||||||
- Fix the error message displayed when validating Unicode URLs. #1486
|
|
||||||
- Raise an error when trying to save an abstract document. #1449
|
|
||||||
|
|
||||||
Changes in 0.11.0
|
|
||||||
=================
|
|
||||||
- BREAKING CHANGE: Rename ``ConnectionError`` to ``MongoEngineConnectionError`` since the former is a built-in exception name in Python v3.x. #1428
|
|
||||||
- BREAKING CHANGE: Drop Python v2.6 support. #1428
|
|
||||||
- BREAKING CHANGE: ``from mongoengine.base import ErrorClass`` won't work anymore for any error from ``mongoengine.errors`` (e.g. ``ValidationError``). Use ``from mongoengine.errors import ErrorClass instead``. #1428
|
|
||||||
- BREAKING CHANGE: Accessing a broken reference will raise a ``DoesNotExist`` error. In the past it used to return ``None``. #1334
|
|
||||||
- Fix absent rounding for the ``DecimalField`` when ``force_string`` is set. #1103
|
|
||||||
|
|
||||||
Changes in 0.10.8
|
|
||||||
=================
|
|
||||||
- Add support for ``QuerySet.batch_size``. (#1426)
|
|
||||||
- Fix a query set iteration within an iteration. #1427
|
|
||||||
- Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421
|
|
||||||
- Add an ability to filter the ``GenericReferenceField`` by an ``ObjectId`` and a ``DBRef``. #1425
|
|
||||||
- Fix cascading deletes for models with a custom primary key field. #1247
|
|
||||||
- Add ability to specify an authentication mechanism (e.g. X.509). #1333
|
|
||||||
- Add support for falsy primary keys (e.g. ``doc.pk = 0``). #1354
|
|
||||||
- Fix ``QuerySet.sum/average`` for fields w/ an explicit ``db_field``. #1417
|
|
||||||
- Fix filtering by ``embedded_doc=None``. #1422
|
|
||||||
- Add support for ``Cursor.comment``. #1420
|
|
||||||
- Fix ``doc.get_<field>_display`` methods. #1419
|
|
||||||
- Fix the ``__repr__`` method of the ``StrictDict`` #1424
|
|
||||||
- Add a deprecation warning for Python v2.6.
|
|
||||||
|
|
||||||
Changes in 0.10.7
|
|
||||||
=================
|
|
||||||
- Drop Python 3.2 support #1390
|
|
||||||
- Fix a bug where a dynamic doc has an index inside a dict field. #1278
|
|
||||||
- Fix: ``ListField`` minus index assignment does not work. #1128
|
|
||||||
- Fix cascade delete mixing among collections. #1224
|
|
||||||
- Add ``signal_kwargs`` argument to ``Document.save``, ``Document.delete`` and ``BaseQuerySet.insert`` to be passed to signals calls. #1206
|
|
||||||
- Raise ``OperationError`` when trying to do a ``drop_collection`` on document with no collection set.
|
|
||||||
- Fix a bug where a count on ``ListField`` of ``EmbeddedDocumentField`` fails. #1187
|
|
||||||
- Fix ``LongField`` values stored as int32 in Python 3. #1253
|
|
||||||
- ``MapField`` now handles unicode keys correctly. #1267
|
|
||||||
- ``ListField`` now handles negative indicies correctly. #1270
|
|
||||||
- Fix an ``AttributeError`` when initializing an ``EmbeddedDocument`` with positional args. #681
|
|
||||||
- Fix a ``no_cursor_timeout`` error with PyMongo v3.x. #1304
|
|
||||||
- Replace map-reduce based ``QuerySet.sum/average`` with aggregation-based implementations. #1336
|
|
||||||
- Fix support for ``__`` to escape field names that match operators' names in ``update``. #1351
|
|
||||||
- Fix ``BaseDocument._mark_as_changed``. #1369
|
|
||||||
- Add support for pickling ``QuerySet`` instances. #1397
|
|
||||||
- Fix connecting to a list of hosts. #1389
|
|
||||||
- Fix a bug where accessing broken references wouldn't raise a ``DoesNotExist`` error. #1334
|
|
||||||
- Fix not being able to specify ``use_db_field=False`` on ``ListField(EmbeddedDocumentField)`` instances. #1218
|
|
||||||
- Improvements to the dictionary field's docs. #1383
|
|
||||||
|
|
||||||
Changes in 0.10.6
|
|
||||||
=================
|
|
||||||
- Add support for mocking MongoEngine based on mongomock. #1151
|
|
||||||
- Fix not being able to run tests on Windows. #1153
|
|
||||||
- Allow creation of sparse compound indexes. #1114
|
|
||||||
|
|
||||||
Changes in 0.10.5
|
|
||||||
=================
|
|
||||||
- Fix for reloading of strict with special fields. #1156
|
|
||||||
|
|
||||||
Changes in 0.10.4
|
|
||||||
=================
|
|
||||||
- ``SaveConditionError`` is now importable from the top level package. #1165
|
|
||||||
- Add a ``QuerySet.upsert_one`` method. #1157
|
|
||||||
|
|
||||||
Changes in 0.10.3
|
|
||||||
=================
|
|
||||||
- Fix ``read_preference`` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042
|
|
||||||
|
|
||||||
Changes in 0.10.2
|
|
||||||
=================
|
|
||||||
- Allow shard key to point to a field in an embedded document. #551
|
|
||||||
- Allow arbirary metadata in fields. #1129
|
|
||||||
- ReferenceFields now support abstract document types. #837
|
|
||||||
|
|
||||||
Changes in 0.10.1
|
|
||||||
=================
|
|
||||||
- Fix infinite recursion with cascade delete rules under specific conditions. #1046
|
|
||||||
- Fix ``CachedReferenceField`` bug when loading cached docs as ``DBRef`` but failing to save them. #1047
|
|
||||||
- Fix ignored chained options. #842
|
|
||||||
- ``Document.save``'s ``save_condition`` error raises a ``SaveConditionError`` exception. #1070
|
|
||||||
- Fix ``Document.reload`` for the ``DynamicDocument``. #1050
|
|
||||||
- ``StrictDict`` & ``SemiStrictDict`` are shadowed at init time. #1105
|
|
||||||
- Fix ``ListField`` negative index assignment not working. #1119
|
|
||||||
- Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126
|
|
||||||
- Remove test dependencies (nose and rednose) from install dependencies. #1079
|
|
||||||
- Recursively build a query when using the ``elemMatch`` operator. #1130
|
|
||||||
- Fix instance back references for lists of embedded documents. #1131
|
|
||||||
|
|
||||||
Changes in 0.10.0
|
|
||||||
=================
|
|
||||||
- Django support was removed and will be available as a separate extension. #958
|
|
||||||
- Allow to load undeclared field with meta attribute 'strict': False #957
|
|
||||||
- Support for PyMongo 3+ #946
|
|
||||||
- Removed get_or_create() deprecated since 0.8.0. #300
|
|
||||||
- Improve Document._created status when switch collection and db #1020
|
|
||||||
- Queryset update doesn't go through field validation #453
|
|
||||||
- Added support for specifying authentication source as option ``authSource`` in URI. #967
|
|
||||||
- Fixed mark_as_changed to handle higher/lower level fields changed. #927
|
|
||||||
- ListField of embedded docs doesn't set the _instance attribute when iterating over it #914
|
|
||||||
- Support += and *= for ListField #595
|
|
||||||
- Use sets for populating dbrefs to dereference
|
|
||||||
- Fixed unpickled documents replacing the global field's list. #888
|
|
||||||
- Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910
|
|
||||||
- Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769)
|
|
||||||
- Fix for updating sorting in SortedListField. #978
|
|
||||||
- Added __ support to escape field name in fields lookup keywords that match operators names #949
|
|
||||||
- Fix for issue where FileField deletion did not free space in GridFS.
|
|
||||||
- No_dereference() not respected on embedded docs containing reference. #517
|
|
||||||
- Document save raise an exception if save_condition fails #1005
|
|
||||||
- Fixes some internal _id handling issue. #961
|
|
||||||
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
|
|
||||||
- Capped collection multiple of 256. #1011
|
|
||||||
- Added ``BaseQuerySet.aggregate_sum`` and ``BaseQuerySet.aggregate_average`` methods.
|
|
||||||
- Fix for delete with write_concern {'w': 0}. #1008
|
|
||||||
- Allow dynamic lookup for more than two parts. #882
|
|
||||||
- Added support for min_distance on geo queries. #831
|
|
||||||
- Allow to add custom metadata to fields #705
|
|
||||||
|
|
||||||
Changes in 0.9.0
|
|
||||||
================
|
|
||||||
- Update FileField when creating a new file #714
|
|
||||||
- Added ``EmbeddedDocumentListField`` for Lists of Embedded Documents. #826
|
|
||||||
- ComplexDateTimeField should fall back to None when null=True #864
|
|
||||||
- Request Support for $min, $max Field update operators #863
|
|
||||||
- ``BaseDict`` does not follow ``setdefault`` #866
|
|
||||||
- Add support for $type operator # 766
|
|
||||||
- Fix tests for pymongo 2.8+ #877
|
|
||||||
- No module named 'django.utils.importlib' (Django dev) #872
|
|
||||||
- Field Choices Now Accept Subclasses of Documents
|
|
||||||
- Ensure Indexes before Each Save #812
|
|
||||||
- Generate Unique Indices for Lists of EmbeddedDocuments #358
|
|
||||||
- Sparse fields #515
|
|
||||||
- write_concern not in params of Collection#remove #801
|
|
||||||
- Better BaseDocument equality check when not saved #798
|
|
||||||
- OperationError: Shard Keys are immutable. Tried to update id even though the document is not yet saved #771
|
|
||||||
- with_limit_and_skip for count should default like in pymongo #759
|
|
||||||
- Fix storing value of precision attribute in DecimalField #787
|
|
||||||
- Set attribute to None does not work (at least for fields with default values) #734
|
|
||||||
- Querying by a field defined in a subclass raises InvalidQueryError #744
|
|
||||||
- Add Support For MongoDB 2.6.X's maxTimeMS #778
|
|
||||||
- abstract shouldn't be inherited in EmbeddedDocument # 789
|
|
||||||
- Allow specifying the '_cls' as a field for indexes #397
|
|
||||||
- Stop ensure_indexes running on a secondaries unless connection is through mongos #746
|
|
||||||
- Not overriding default values when loading a subset of fields #399
|
|
||||||
- Saving document doesn't create new fields in existing collection #620
|
|
||||||
- Added ``Queryset.aggregate`` wrapper to aggregation framework #703
|
|
||||||
- Added support to show original model fields on to_json calls instead of db_field #697
|
|
||||||
- Added Queryset.search_text to Text indexes searchs #700
|
|
||||||
- Fixed tests for Django 1.7 #696
|
|
||||||
- Follow ReferenceFields in EmbeddedDocuments with select_related #690
|
|
||||||
- Added preliminary support for text indexes #680
|
|
||||||
- Added ``elemMatch`` operator as well - ``match`` is too obscure #653
|
|
||||||
- Added support for progressive JPEG #486 #548
|
|
||||||
- Allow strings to be used in index creation #675
|
|
||||||
- Fixed EmbeddedDoc weakref proxy issue #592
|
|
||||||
- Fixed nested reference field distinct error #583
|
|
||||||
- Fixed change tracking on nested MapFields #539
|
|
||||||
- Dynamic fields in embedded documents now visible to queryset.only() / qs.exclude() #425 #507
|
|
||||||
- Add authentication_source option to register_connection #178 #464 #573 #580 #590
|
|
||||||
- Implemented equality between Documents and DBRefs #597
|
|
||||||
- Fixed ReferenceField inside nested ListFields dereferencing problem #368
|
|
||||||
- Added the ability to reload specific document fields #100
|
|
||||||
- Added db_alias support and fixes for custom map/reduce output #586
|
|
||||||
- post_save signal now has access to delta information about field changes #594 #589
|
|
||||||
- Don't query with $orderby for qs.get() #600
|
|
||||||
- Fix id shard key save issue #636
|
|
||||||
- Fixes issue with recursive embedded document errors #557
|
|
||||||
- Fix clear_changed_fields() clearing unsaved documents bug #602
|
|
||||||
- Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x.
|
|
||||||
- Removing support for Python < 2.6.6
|
|
||||||
- Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664
|
|
||||||
- QuerySet.modify() and Document.modify() methods to provide find_and_modify() like behaviour #677 #773
|
|
||||||
- Added support for the using() method on a queryset #676
|
|
||||||
- PYPY support #673
|
|
||||||
- Connection pooling #674
|
|
||||||
- Avoid to open all documents from cursors in an if stmt #655
|
|
||||||
- Ability to clear the ordering #657
|
|
||||||
- Raise NotUniqueError in Document.update() on pymongo.errors.DuplicateKeyError #626
|
|
||||||
- Slots - memory improvements #625
|
|
||||||
- Fixed incorrectly split a query key when it ends with "_" #619
|
|
||||||
- Geo docs updates #613
|
|
||||||
- Workaround a dateutil bug #608
|
|
||||||
- Conditional save for atomic-style operations #511
|
|
||||||
- Allow dynamic dictionary-style field access #559
|
|
||||||
- Increase email field length to accommodate new TLDs #726
|
|
||||||
- index_cls is ignored when deciding to set _cls as index prefix #733
|
|
||||||
- Make 'db' argument to connection optional #737
|
|
||||||
- Allow atomic update for the entire ``DictField`` #742
|
|
||||||
- Added MultiPointField, MultiLineField, MultiPolygonField
|
|
||||||
- Fix multiple connections aliases being rewritten #748
|
|
||||||
- Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791
|
|
||||||
- Make ``in_bulk()`` respect ``no_dereference()`` #775
|
|
||||||
- Handle None from model __str__; Fixes #753 #754
|
|
||||||
- _get_changed_fields fix for embedded documents with id field. #925
|
|
||||||
|
|
||||||
Changes in 0.8.7
|
|
||||||
================
|
|
||||||
- Calling reload on deleted / nonexistent documents raises DoesNotExist (#538)
|
|
||||||
- Stop ensure_indexes running on a secondaries (#555)
|
|
||||||
- Fix circular import issue with django auth (#531) (#545)
|
|
||||||
|
|
||||||
Changes in 0.8.6
|
|
||||||
================
|
|
||||||
- Fix django auth import (#531)
|
|
||||||
|
|
||||||
Changes in 0.8.5
|
|
||||||
================
|
|
||||||
- Fix multi level nested fields getting marked as changed (#523)
|
|
||||||
- Django 1.6 login fix (#522) (#527)
|
|
||||||
- Django 1.6 session fix (#509)
|
|
||||||
- EmbeddedDocument._instance is now set when setting the attribute (#506)
|
|
||||||
- Fixed EmbeddedDocument with ReferenceField equality issue (#502)
|
|
||||||
- Fixed GenericReferenceField serialization order (#499)
|
|
||||||
- Fixed count and none bug (#498)
|
|
||||||
- Fixed bug with .only() and DictField with digit keys (#496)
|
|
||||||
- Added user_permissions to Django User object (#491, #492)
|
|
||||||
- Fix updating Geo Location fields (#488)
|
|
||||||
- Fix handling invalid dict field value (#485)
|
|
||||||
- Added app_label to MongoUser (#484)
|
|
||||||
- Use defaults when host and port are passed as None (#483)
|
|
||||||
- Fixed distinct casting issue with ListField of EmbeddedDocuments (#470)
|
|
||||||
- Fixed Django 1.6 sessions (#454, #480)
|
|
||||||
|
|
||||||
Changes in 0.8.4
|
|
||||||
================
|
|
||||||
- Remove database name necessity in uri connection schema (#452)
|
|
||||||
- Fixed "$pull" semantics for nested ListFields (#447)
|
|
||||||
- Allow fields to be named the same as query operators (#445)
|
|
||||||
- Updated field filter logic - can now exclude subclass fields (#443)
|
|
||||||
- Fixed dereference issue with embedded listfield referencefields (#439)
|
|
||||||
- Fixed slice when using inheritance causing fields to be excluded (#437)
|
|
||||||
- Fixed ._get_db() attribute after a Document.switch_db() (#441)
|
|
||||||
- Dynamic Fields store and recompose Embedded Documents / Documents correctly (#449)
|
|
||||||
- Handle dynamic fieldnames that look like digits (#434)
|
|
||||||
- Added get_user_document and improve mongo_auth module (#423)
|
|
||||||
- Added str representation of GridFSProxy (#424)
|
|
||||||
- Update transform to handle docs erroneously passed to unset (#416)
|
|
||||||
- Fixed indexing - turn off _cls (#414)
|
|
||||||
- Fixed dereference threading issue in ComplexField.__get__ (#412)
|
|
||||||
- Fixed QuerySetNoCache.count() caching (#410)
|
|
||||||
- Don't follow references in _get_changed_fields (#422, #417)
|
|
||||||
- Allow args and kwargs to be passed through to_json (#420)
|
|
||||||
|
|
||||||
Changes in 0.8.3
|
|
||||||
================
|
|
||||||
- Fixed EmbeddedDocuments with ``id`` also storing ``_id`` (#402)
|
|
||||||
- Added get_proxy_object helper to filefields (#391)
|
|
||||||
- Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365)
|
|
||||||
- Fixed sum and average mapreduce dot notation support (#375, #376, #393)
|
|
||||||
- Fixed as_pymongo to return the id (#386)
|
|
||||||
- Document.select_related() now respects ``db_alias`` (#377)
|
|
||||||
- Reload uses shard_key if applicable (#384)
|
|
||||||
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
|
|
||||||
- Fixed pickling dynamic documents ``_dynamic_fields`` (#387)
|
|
||||||
- Fixed ListField setslice and delslice dirty tracking (#390)
|
|
||||||
- Added Django 1.5 PY3 support (#392)
|
|
||||||
- Added match ($elemMatch) support for EmbeddedDocuments (#379)
|
|
||||||
- Fixed weakref being valid after reload (#374)
|
|
||||||
- Fixed queryset.get() respecting no_dereference (#373)
|
|
||||||
- Added full_result kwarg to update (#380)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Changes in 0.8.2
|
|
||||||
================
|
|
||||||
- Added compare_indexes helper (#361)
|
|
||||||
- Fixed cascading saves which weren't turned off as planned (#291)
|
|
||||||
- Fixed Datastructures so instances are a Document or EmbeddedDocument (#363)
|
|
||||||
- Improved cascading saves write performance (#361)
|
|
||||||
- Fixed ambiguity and differing behaviour regarding field defaults (#349)
|
|
||||||
- ImageFields now include PIL error messages if invalid error (#353)
|
|
||||||
- Added lock when calling doc.Delete() for when signals have no sender (#350)
|
|
||||||
- Reload forces read preference to be PRIMARY (#355)
|
|
||||||
- Querysets are now lest restrictive when querying duplicate fields (#332, #333)
|
|
||||||
- FileField now honouring db_alias (#341)
|
|
||||||
- Removed customised __set__ change tracking in ComplexBaseField (#344)
|
|
||||||
- Removed unused var in _get_changed_fields (#347)
|
|
||||||
- Added pre_save_post_validation signal (#345)
|
|
||||||
- DateTimeField now auto converts valid datetime isostrings into dates (#343)
|
|
||||||
- DateTimeField now uses dateutil for parsing if available (#343)
|
|
||||||
- Fixed Doc.objects(read_preference=X) not setting read preference (#352)
|
|
||||||
- Django session ttl index expiry fixed (#329)
|
|
||||||
- Fixed pickle.loads (#342)
|
|
||||||
- Documentation fixes
|
|
||||||
|
|
||||||
Changes in 0.8.1
|
|
||||||
================
|
|
||||||
- Fixed Python 2.6 django auth importlib issue (#326)
|
|
||||||
- Fixed pickle unsaved document regression (#327)
|
|
||||||
|
|
||||||
Changes in 0.8.0
|
|
||||||
================
|
|
||||||
- Fixed querying ReferenceField custom_id (#317)
|
|
||||||
- Fixed pickle issues with collections (#316)
|
|
||||||
- Added ``get_next_value`` preview for SequenceFields (#319)
|
|
||||||
- Added no_sub_classes context manager and queryset helper (#312)
|
|
||||||
- Querysets now utilises a local cache
|
|
||||||
- Changed __len__ behaviour in the queryset (#247, #311)
|
|
||||||
- Fixed querying string versions of ObjectIds issue with ReferenceField (#307)
|
|
||||||
- Added $setOnInsert support for upserts (#308)
|
|
||||||
- Upserts now possible with just query parameters (#309)
|
|
||||||
- Upserting is the only way to ensure docs are saved correctly (#306)
|
|
||||||
- Fixed register_delete_rule inheritance issue
|
|
||||||
- Fix cloning of sliced querysets (#303)
|
|
||||||
- Fixed update_one write concern (#302)
|
|
||||||
- Updated minimum requirement for pymongo to 2.5
|
|
||||||
- Add support for new geojson fields, indexes and queries (#299)
|
|
||||||
- If values cant be compared mark as changed (#287)
|
|
||||||
- Ensure as_pymongo() and to_json honour only() and exclude() (#293)
|
|
||||||
- Document serialization uses field order to ensure a strict order is set (#296)
|
|
||||||
- DecimalField now stores as float not string (#289)
|
|
||||||
- UUIDField now stores as a binary by default (#292)
|
|
||||||
- Added Custom User Model for Django 1.5 (#285)
|
|
||||||
- Cascading saves now default to off (#291)
|
|
||||||
- ReferenceField now store ObjectId's by default rather than DBRef (#290)
|
|
||||||
- Added ImageField support for inline replacements (#86)
|
|
||||||
- Added SequenceField.set_next_value(value) helper (#159)
|
|
||||||
- Updated .only() behaviour - now like exclude it is chainable (#202)
|
|
||||||
- Added with_limit_and_skip support to count() (#235)
|
|
||||||
- Objects queryset manager now inherited (#256)
|
|
||||||
- Updated connection to use MongoClient (#262, #274)
|
|
||||||
- Fixed db_alias and inherited Documents (#143)
|
|
||||||
- Documentation update for document errors (#124)
|
|
||||||
- Deprecated ``get_or_create`` (#35)
|
|
||||||
- Updated inheritable objects created by upsert now contain _cls (#118)
|
|
||||||
- Added support for creating documents with embedded documents in a single operation (#6)
|
|
||||||
- Added to_json and from_json to Document (#1)
|
|
||||||
- Added to_json and from_json to QuerySet (#131)
|
|
||||||
- Updated index creation now tied to Document class (#102)
|
|
||||||
- Added none() to queryset (#127)
|
|
||||||
- Updated SequenceFields to allow post processing of the calculated counter value (#141)
|
|
||||||
- Added clean method to documents for pre validation data cleaning (#60)
|
|
||||||
- Added support setting for read prefrence at a query level (#157)
|
|
||||||
- Added _instance to EmbeddedDocuments pointing to the parent (#139)
|
|
||||||
- Inheritance is off by default (#122)
|
|
||||||
- Remove _types and just use _cls for inheritance (#148)
|
|
||||||
- Only allow QNode instances to be passed as query objects (#199)
|
|
||||||
- Dynamic fields are now validated on save (#153) (#154)
|
|
||||||
- Added support for multiple slices and made slicing chainable. (#170) (#190) (#191)
|
|
||||||
- Fixed GridFSProxy __getattr__ behaviour (#196)
|
|
||||||
- Fix Django timezone support (#151)
|
|
||||||
- Simplified Q objects, removed QueryTreeTransformerVisitor (#98) (#171)
|
|
||||||
- FileFields now copyable (#198)
|
|
||||||
- Querysets now return clones and are no longer edit in place (#56)
|
|
||||||
- Added support for $maxDistance (#179)
|
|
||||||
- Uses getlasterror to test created on updated saves (#163)
|
|
||||||
- Fixed inheritance and unique index creation (#140)
|
|
||||||
- Fixed reverse delete rule with inheritance (#197)
|
|
||||||
- Fixed validation for GenericReferences which haven't been dereferenced
|
|
||||||
- Added switch_db context manager (#106)
|
|
||||||
- Added switch_db method to document instances (#106)
|
|
||||||
- Added no_dereference context manager (#82) (#61)
|
|
||||||
- Added switch_collection context manager (#220)
|
|
||||||
- Added switch_collection method to document instances (#220)
|
|
||||||
- Added support for compound primary keys (#149) (#121)
|
|
||||||
- Fixed overriding objects with custom manager (#58)
|
|
||||||
- Added no_dereference method for querysets (#82) (#61)
|
|
||||||
- Undefined data should not override instance methods (#49)
|
|
||||||
- Added Django Group and Permission (#142)
|
|
||||||
- Added Doc class and pk to Validation messages (#69)
|
|
||||||
- Fixed Documents deleted via a queryset don't call any signals (#105)
|
|
||||||
- Added the "get_decoded" method to the MongoSession class (#216)
|
|
||||||
- Fixed invalid choices error bubbling (#214)
|
|
||||||
- Updated Save so it calls $set and $unset in a single operation (#211)
|
|
||||||
- Fixed inner queryset looping (#204)
|
|
||||||
|
|
||||||
Changes in 0.7.10
|
|
||||||
=================
|
|
||||||
- Fix UnicodeEncodeError for dbref (#278)
|
|
||||||
- Allow construction using positional parameters (#268)
|
|
||||||
- Updated EmailField length to support long domains (#243)
|
|
||||||
- Added 64-bit integer support (#251)
|
|
||||||
- Added Django sessions TTL support (#224)
|
|
||||||
- Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240)
|
|
||||||
- Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242)
|
|
||||||
- Added "id" back to _data dictionary (#255)
|
|
||||||
- Only mark a field as changed if the value has changed (#258)
|
|
||||||
- Explicitly check for Document instances when dereferencing (#261)
|
|
||||||
- Fixed order_by chaining issue (#265)
|
|
||||||
- Added dereference support for tuples (#250)
|
|
||||||
- Resolve field name to db field name when using distinct(#260, #264, #269)
|
|
||||||
- Added kwargs to doc.save to help interop with django (#223, #270)
|
|
||||||
- Fixed cloning querysets in PY3
|
|
||||||
- Int fields no longer unset in save when changed to 0 (#272)
|
|
||||||
- Fixed ReferenceField query chaining bug fixed (#254)
|
|
||||||
|
|
||||||
Changes in 0.7.9
|
|
||||||
================
|
|
||||||
- Better fix handling for old style _types
|
|
||||||
- Embedded SequenceFields follow collection naming convention
|
|
||||||
|
|
||||||
Changes in 0.7.8
|
|
||||||
================
|
|
||||||
- Fix sequence fields in embedded documents (#166)
|
|
||||||
- Fix query chaining with .order_by() (#176)
|
|
||||||
- Added optional encoding and collection config for Django sessions (#180, #181, #183)
|
|
||||||
- Fixed EmailField so can add extra validation (#173, #174, #187)
|
|
||||||
- Fixed bulk inserts can now handle custom pk's (#192)
|
|
||||||
- Added as_pymongo method to return raw or cast results from pymongo (#193)
|
|
||||||
|
|
||||||
Changes in 0.7.7
|
|
||||||
================
|
|
||||||
- Fix handling for old style _types
|
|
||||||
|
|
||||||
Changes in 0.7.6
|
|
||||||
================
|
|
||||||
- Unicode fix for repr (#133)
|
|
||||||
- Allow updates with match operators (#144)
|
|
||||||
- Updated URLField - now can have a override the regex (#136)
|
|
||||||
- Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573)
|
|
||||||
- Fixed reload issue with ReferenceField where dbref=False (#138)
|
|
||||||
|
|
||||||
Changes in 0.7.5
|
|
||||||
================
|
|
||||||
- ReferenceFields with dbref=False use ObjectId instead of strings (#134)
|
|
||||||
See ticket for upgrade notes (#134)
|
|
||||||
|
|
||||||
Changes in 0.7.4
|
|
||||||
================
|
|
||||||
- Fixed index inheritance issues - firmed up testcases (#123) (#125)
|
|
||||||
|
|
||||||
Changes in 0.7.3
|
|
||||||
================
|
|
||||||
- Reverted EmbeddedDocuments meta handling - now can turn off inheritance (#119)
|
|
||||||
|
|
||||||
Changes in 0.7.2
|
|
||||||
================
|
|
||||||
- Update index spec generation so its not destructive (#113)
|
|
||||||
|
|
||||||
Changes in 0.7.1
|
|
||||||
================
|
|
||||||
- Fixed index spec inheritance (#111)
|
|
||||||
|
|
||||||
Changes in 0.7.0
|
|
||||||
================
|
|
||||||
- Updated queryset.delete so you can use with skip / limit (#107)
|
|
||||||
- Updated index creation allows kwargs to be passed through refs (#104)
|
|
||||||
- Fixed Q object merge edge case (#109)
|
|
||||||
- Fixed reloading on sharded documents (hmarr/mongoengine#569)
|
|
||||||
- Added NotUniqueError for duplicate keys (#62)
|
|
||||||
- Added custom collection / sequence naming for SequenceFields (#92)
|
|
||||||
- Fixed UnboundLocalError in composite index with pk field (#88)
|
|
||||||
- Updated ReferenceField's to optionally store ObjectId strings
|
- Updated ReferenceField's to optionally store ObjectId strings
|
||||||
this will become the default in 0.8 (#89)
|
this will become the default in 0.8 (MongoEngine/mongoengine#89)
|
||||||
- Added FutureWarning - save will default to ``cascade=False`` in 0.8
|
- Added FutureWarning - save will default to `cascade=False` in 0.8
|
||||||
- Added example of indexing embedded document fields (#75)
|
- Added example of indexing embedded document fields (MongoEngine/mongoengine#75)
|
||||||
- Fixed ImageField resizing when forcing size (#80)
|
- Fixed ImageField resizing when forcing size (MongoEngine/mongoengine#80)
|
||||||
- Add flexibility for fields handling bad data (#78)
|
- Add flexibility for fields handling bad data (MongoEngine/mongoengine#78)
|
||||||
- Embedded Documents no longer handle meta definitions
|
- Embedded Documents no longer handle meta definitions
|
||||||
- Use weakref proxies in base lists / dicts (#74)
|
- Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74)
|
||||||
- Improved queryset filtering (hmarr/mongoengine#554)
|
- Improved queryset filtering (hmarr/mongoengine#554)
|
||||||
- Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561)
|
- Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561)
|
||||||
- Fixed abstract classes and shard keys (#64)
|
- Fixed abstract classes and shard keys (MongoEngine/mongoengine#64)
|
||||||
- Fixed Python 2.5 support
|
- Fixed Python 2.5 support
|
||||||
- Added Python 3 support (thanks to Laine Heron)
|
- Added Python 3 support (thanks to Laine Heron)
|
||||||
|
|
||||||
Changes in 0.6.20
|
Changes in 0.6.20
|
||||||
=================
|
=================
|
||||||
- Added support for distinct and db_alias (#59)
|
- Added support for distinct and db_alias (MongoEngine/mongoengine#59)
|
||||||
- Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
|
- Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
|
||||||
- Fixed BinaryField lookup re (#48)
|
- Fixed BinaryField lookup re (MongoEngine/mongoengine#48)
|
||||||
|
|
||||||
Changes in 0.6.19
|
Changes in 0.6.19
|
||||||
=================
|
=================
|
||||||
|
|
||||||
- Added Binary support to UUID (#47)
|
- Added Binary support to UUID (MongoEngine/mongoengine#47)
|
||||||
- Fixed MapField lookup for fields without declared lookups (#46)
|
- Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46)
|
||||||
- Fixed BinaryField python value issue (#48)
|
- Fixed BinaryField python value issue (MongoEngine/mongoengine#48)
|
||||||
- Fixed SequenceField non numeric value lookup (#41)
|
- Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41)
|
||||||
- Fixed queryset manager issue (#52)
|
- Fixed queryset manager issue (MongoEngine/mongoengine#52)
|
||||||
- Fixed FileField comparision (hmarr/mongoengine#547)
|
- Fixed FileField comparision (hmarr/mongoengine#547)
|
||||||
|
|
||||||
Changes in 0.6.18
|
Changes in 0.6.18
|
||||||
@ -762,7 +80,7 @@ Changes in 0.6.12
|
|||||||
- Fixes error with _delta handling DBRefs
|
- Fixes error with _delta handling DBRefs
|
||||||
|
|
||||||
Changes in 0.6.11
|
Changes in 0.6.11
|
||||||
=================
|
==================
|
||||||
- Fixed inconsistency handling None values field attrs
|
- Fixed inconsistency handling None values field attrs
|
||||||
- Fixed map_field embedded db_field issue
|
- Fixed map_field embedded db_field issue
|
||||||
- Fixed .save() _delta issue with DbRefs
|
- Fixed .save() _delta issue with DbRefs
|
||||||
@ -787,7 +105,7 @@ Changes in 0.6.8
|
|||||||
================
|
================
|
||||||
- Fixed FileField losing reference when no default set
|
- Fixed FileField losing reference when no default set
|
||||||
- Removed possible race condition from FileField (grid_file)
|
- Removed possible race condition from FileField (grid_file)
|
||||||
- Added assignment to save, can now do: ``b = MyDoc(**kwargs).save()``
|
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
|
||||||
- Added support for pull operations on nested EmbeddedDocuments
|
- Added support for pull operations on nested EmbeddedDocuments
|
||||||
- Added support for choices with GenericReferenceFields
|
- Added support for choices with GenericReferenceFields
|
||||||
- Added support for choices with GenericEmbeddedDocumentFields
|
- Added support for choices with GenericEmbeddedDocumentFields
|
||||||
@ -802,7 +120,7 @@ Changes in 0.6.7
|
|||||||
- Fixed indexing on '_id' or 'pk' or 'id'
|
- Fixed indexing on '_id' or 'pk' or 'id'
|
||||||
- Invalid data from the DB now raises a InvalidDocumentError
|
- Invalid data from the DB now raises a InvalidDocumentError
|
||||||
- Cleaned up the Validation Error - docs and code
|
- Cleaned up the Validation Error - docs and code
|
||||||
- Added meta ``auto_create_index`` so you can disable index creation
|
- Added meta `auto_create_index` so you can disable index creation
|
||||||
- Added write concern options to inserts
|
- Added write concern options to inserts
|
||||||
- Fixed typo in meta for index options
|
- Fixed typo in meta for index options
|
||||||
- Bug fix Read preference now passed correctly
|
- Bug fix Read preference now passed correctly
|
||||||
@ -842,7 +160,8 @@ Changes in 0.6.1
|
|||||||
- Fix for replicaSet connections
|
- Fix for replicaSet connections
|
||||||
|
|
||||||
Changes in 0.6
|
Changes in 0.6
|
||||||
==============
|
================
|
||||||
|
|
||||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||||
- Added support for covered indexes when inheritance is off
|
- Added support for covered indexes when inheritance is off
|
||||||
- No longer always upsert on save for items with a '_id'
|
- No longer always upsert on save for items with a '_id'
|
||||||
@ -929,8 +248,8 @@ Changes in v0.5
|
|||||||
- Updated default collection naming convention
|
- Updated default collection naming convention
|
||||||
- Added Document Mixin support
|
- Added Document Mixin support
|
||||||
- Fixed queryet __repr__ mid iteration
|
- Fixed queryet __repr__ mid iteration
|
||||||
- Added hint() support, so can tell Mongo the proper index to use for the query
|
- Added hint() support, so cantell Mongo the proper index to use for the query
|
||||||
- Fixed issue with inconsistent setting of _cls breaking inherited referencing
|
- Fixed issue with inconsitent setting of _cls breaking inherited referencing
|
||||||
- Added help_text and verbose_name to fields to help with some form libs
|
- Added help_text and verbose_name to fields to help with some form libs
|
||||||
- Updated item_frequencies to handle embedded document lookups
|
- Updated item_frequencies to handle embedded document lookups
|
||||||
- Added delta tracking now only sets / unsets explicitly changed fields
|
- Added delta tracking now only sets / unsets explicitly changed fields
|
||||||
@ -1067,6 +386,7 @@ Changes in v0.1.3
|
|||||||
querying takes place
|
querying takes place
|
||||||
- A few minor bugfixes
|
- A few minor bugfixes
|
||||||
|
|
||||||
|
|
||||||
Changes in v0.1.2
|
Changes in v0.1.2
|
||||||
=================
|
=================
|
||||||
- Query values may be processed before before being used in queries
|
- Query values may be processed before before being used in queries
|
||||||
@ -1075,6 +395,7 @@ Changes in v0.1.2
|
|||||||
- Added ``BooleanField``
|
- Added ``BooleanField``
|
||||||
- Added ``Document.reload()`` method
|
- Added ``Document.reload()`` method
|
||||||
|
|
||||||
|
|
||||||
Changes in v0.1.1
|
Changes in v0.1.1
|
||||||
=================
|
=================
|
||||||
- Documents may now use capped collections
|
- Documents may now use capped collections
|
||||||
|
@ -1,77 +1,66 @@
|
|||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
connect("tumblelog")
|
connect('tumblelog')
|
||||||
|
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
class Comment(EmbeddedDocument):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
name = StringField(max_length=120)
|
name = StringField(max_length=120)
|
||||||
|
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
email = StringField(required=True)
|
email = StringField(required=True)
|
||||||
first_name = StringField(max_length=50)
|
first_name = StringField(max_length=50)
|
||||||
last_name = StringField(max_length=50)
|
last_name = StringField(max_length=50)
|
||||||
|
|
||||||
|
|
||||||
class Post(Document):
|
class Post(Document):
|
||||||
title = StringField(max_length=120, required=True)
|
title = StringField(max_length=120, required=True)
|
||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
tags = ListField(StringField(max_length=30))
|
tags = ListField(StringField(max_length=30))
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
# bugfix
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
|
|
||||||
class TextPost(Post):
|
class TextPost(Post):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
|
|
||||||
class ImagePost(Post):
|
class ImagePost(Post):
|
||||||
image_path = StringField()
|
image_path = StringField()
|
||||||
|
|
||||||
|
|
||||||
class LinkPost(Post):
|
class LinkPost(Post):
|
||||||
link_url = StringField()
|
link_url = StringField()
|
||||||
|
|
||||||
|
|
||||||
Post.drop_collection()
|
Post.drop_collection()
|
||||||
|
|
||||||
john = User(email="jdoe@example.com", first_name="John", last_name="Doe")
|
john = User(email='jdoe@example.com', first_name='John', last_name='Doe')
|
||||||
john.save()
|
john.save()
|
||||||
|
|
||||||
post1 = TextPost(title="Fun with MongoEngine", author=john)
|
post1 = TextPost(title='Fun with MongoEngine', author=john)
|
||||||
post1.content = "Took a look at MongoEngine today, looks pretty cool."
|
post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
|
||||||
post1.tags = ["mongodb", "mongoengine"]
|
post1.tags = ['mongodb', 'mongoengine']
|
||||||
post1.save()
|
post1.save()
|
||||||
|
|
||||||
post2 = LinkPost(title="MongoEngine Documentation", author=john)
|
post2 = LinkPost(title='MongoEngine Documentation', author=john)
|
||||||
post2.link_url = "http://tractiondigital.com/labs/mongoengine/docs"
|
post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
|
||||||
post2.tags = ["mongoengine"]
|
post2.tags = ['mongoengine']
|
||||||
post2.save()
|
post2.save()
|
||||||
|
|
||||||
print("ALL POSTS")
|
print 'ALL POSTS'
|
||||||
print()
|
print
|
||||||
for post in Post.objects:
|
for post in Post.objects:
|
||||||
print(post.title)
|
print post.title
|
||||||
# print '=' * post.title.count()
|
print '=' * len(post.title)
|
||||||
print("=" * 20)
|
|
||||||
|
|
||||||
if isinstance(post, TextPost):
|
if isinstance(post, TextPost):
|
||||||
print(post.content)
|
print post.content
|
||||||
|
|
||||||
if isinstance(post, LinkPost):
|
if isinstance(post, LinkPost):
|
||||||
print("Link:", post.link_url)
|
print 'Link:', post.link_url
|
||||||
|
|
||||||
print()
|
print
|
||||||
print()
|
print
|
||||||
|
|
||||||
print("POSTS TAGGED 'MONGODB'")
|
print 'POSTS TAGGED \'MONGODB\''
|
||||||
print()
|
print
|
||||||
for post in Post.objects(tags="mongodb"):
|
for post in Post.objects(tags='mongodb'):
|
||||||
print(post.title)
|
print post.title
|
||||||
print()
|
print
|
||||||
|
|
||||||
num_posts = Post.objects(tags="mongodb").count()
|
num_posts = Post.objects(tags='mongodb').count()
|
||||||
print('Found %d posts with tag "mongodb"' % num_posts)
|
print 'Found %d posts with tag "mongodb"' % num_posts
|
||||||
|
106
docs/conf.py
106
docs/conf.py
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
# MongoEngine documentation build configuration file, created by
|
# MongoEngine documentation build configuration file, created by
|
||||||
# sphinx-quickstart on Sun Nov 22 18:14:13 2009.
|
# sphinx-quickstart on Sun Nov 22 18:14:13 2009.
|
||||||
@ -10,44 +11,40 @@
|
|||||||
# All configuration values have a default; values that are commented out
|
# All configuration values have a default; values that are commented out
|
||||||
# serve to show the default.
|
# serve to show the default.
|
||||||
|
|
||||||
import os
|
import sys, os
|
||||||
import sys
|
|
||||||
|
|
||||||
import sphinx_rtd_theme
|
|
||||||
|
|
||||||
import mongoengine
|
|
||||||
|
|
||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
sys.path.insert(0, os.path.abspath(".."))
|
sys.path.append(os.path.abspath('..'))
|
||||||
|
|
||||||
# -- General configuration -----------------------------------------------------
|
# -- General configuration -----------------------------------------------------
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo", "readthedocs_ext.readthedocs"]
|
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo']
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ["_templates"]
|
templates_path = ['_templates']
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = ".rst"
|
source_suffix = '.rst'
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
# source_encoding = 'utf-8'
|
#source_encoding = 'utf-8'
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = "index"
|
master_doc = 'index'
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = "MongoEngine"
|
project = u'MongoEngine'
|
||||||
copyright = "2009, MongoEngine Authors" # noqa: A001
|
copyright = u'2009-2012, MongoEngine Authors'
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
|
import mongoengine
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = mongoengine.get_version()
|
version = mongoengine.get_version()
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
@ -55,149 +52,144 @@ release = mongoengine.get_version()
|
|||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
# language = None
|
#language = None
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
# non-false value, then it is used:
|
# non-false value, then it is used:
|
||||||
# today = ''
|
#today = ''
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
# today_fmt = '%B %d, %Y'
|
#today_fmt = '%B %d, %Y'
|
||||||
|
|
||||||
# List of documents that shouldn't be included in the build.
|
# List of documents that shouldn't be included in the build.
|
||||||
# unused_docs = []
|
#unused_docs = []
|
||||||
|
|
||||||
# List of directories, relative to source directory, that shouldn't be searched
|
# List of directories, relative to source directory, that shouldn't be searched
|
||||||
# for source files.
|
# for source files.
|
||||||
exclude_trees = ["_build"]
|
exclude_trees = ['_build']
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
# default_role = None
|
#default_role = None
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
# add_function_parentheses = True
|
#add_function_parentheses = True
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
# If true, the current module name will be prepended to all description
|
||||||
# unit titles (such as .. function::).
|
# unit titles (such as .. function::).
|
||||||
# add_module_names = True
|
#add_module_names = True
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
# output. They are ignored by default.
|
# output. They are ignored by default.
|
||||||
# show_authors = False
|
#show_authors = False
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = "sphinx"
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
# modindex_common_prefix = []
|
#modindex_common_prefix = []
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ---------------------------------------------------
|
# -- Options for HTML output ---------------------------------------------------
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||||
html_theme = "sphinx_rtd_theme"
|
html_theme = 'nature'
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
# documentation.
|
# documentation.
|
||||||
html_theme_options = {"canonical_url": "http://docs.mongoengine.org/en/latest/"}
|
#html_theme_options = {}
|
||||||
|
|
||||||
# Add any paths that contain custom themes here, relative to this directory.
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
html_theme_path = ['_themes']
|
||||||
|
|
||||||
# The name for this set of Sphinx documents. If None, it defaults to
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
# "<project> v<release> documentation".
|
# "<project> v<release> documentation".
|
||||||
# html_title = None
|
#html_title = None
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
# html_short_title = None
|
#html_short_title = None
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
# of the sidebar.
|
# of the sidebar.
|
||||||
# html_logo = None
|
#html_logo = None
|
||||||
|
|
||||||
# The name of an image file (within the static path) to use as favicon of the
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
# pixels large.
|
# pixels large.
|
||||||
html_favicon = "favicon.ico"
|
#html_favicon = None
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
# html_static_path = ['_static']
|
#html_static_path = ['_static']
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
# html_last_updated_fmt = '%b %d, %Y'
|
#html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
# typographically correct entities.
|
# typographically correct entities.
|
||||||
html_use_smartypants = True
|
html_use_smartypants = True
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
# Custom sidebar templates, maps document names to template names.
|
||||||
html_sidebars = {
|
#html_sidebars = {}
|
||||||
"index": ["globaltoc.html", "searchbox.html"],
|
|
||||||
"**": ["localtoc.html", "relations.html", "searchbox.html"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
# template names.
|
# template names.
|
||||||
# html_additional_pages = {}
|
#html_additional_pages = {}
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
# html_use_modindex = True
|
#html_use_modindex = True
|
||||||
|
|
||||||
# If false, no index is generated.
|
# If false, no index is generated.
|
||||||
# html_use_index = True
|
#html_use_index = True
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
# If true, the index is split into individual pages for each letter.
|
||||||
# html_split_index = False
|
#html_split_index = False
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
# If true, links to the reST sources are added to the pages.
|
||||||
# html_show_sourcelink = True
|
#html_show_sourcelink = True
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
# base URL from which the finished HTML is served.
|
# base URL from which the finished HTML is served.
|
||||||
# html_use_opensearch = ''
|
#html_use_opensearch = ''
|
||||||
|
|
||||||
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
# html_file_suffix = ''
|
#html_file_suffix = ''
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = "MongoEnginedoc"
|
htmlhelp_basename = 'MongoEnginedoc'
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output --------------------------------------------------
|
# -- Options for LaTeX output --------------------------------------------------
|
||||||
|
|
||||||
# The paper size ('letter' or 'a4').
|
# The paper size ('letter' or 'a4').
|
||||||
latex_paper_size = "a4"
|
latex_paper_size = 'a4'
|
||||||
|
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
# latex_font_size = '10pt'
|
#latex_font_size = '10pt'
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
("index", "MongoEngine.tex", "MongoEngine Documentation", "Ross Lawley", "manual")
|
('index', 'MongoEngine.tex', u'MongoEngine Documentation',
|
||||||
|
u'Harry Marr', 'manual'),
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
# the title page.
|
# the title page.
|
||||||
# latex_logo = None
|
#latex_logo = None
|
||||||
|
|
||||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
# not chapters.
|
# not chapters.
|
||||||
# latex_use_parts = False
|
#latex_use_parts = False
|
||||||
|
|
||||||
# Additional stuff for the LaTeX preamble.
|
# Additional stuff for the LaTeX preamble.
|
||||||
# latex_preamble = ''
|
#latex_preamble = ''
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
# latex_appendices = []
|
#latex_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
# latex_use_modindex = True
|
#latex_use_modindex = True
|
||||||
|
|
||||||
autoclass_content = "both"
|
|
||||||
|
@ -1,19 +1,90 @@
|
|||||||
|
=============================
|
||||||
|
Using MongoEngine with Django
|
||||||
|
=============================
|
||||||
|
|
||||||
|
.. note :: Updated to support Django 1.4
|
||||||
|
|
||||||
|
Connecting
|
||||||
|
==========
|
||||||
|
In your **settings.py** file, ignore the standard database settings (unless you
|
||||||
|
also plan to use the ORM in your project), and instead call
|
||||||
|
:func:`~mongoengine.connect` somewhere in the settings module.
|
||||||
|
|
||||||
|
Authentication
|
||||||
==============
|
==============
|
||||||
Django Support
|
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
||||||
==============
|
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
||||||
|
:class:`~mongoengine.Document`, but implements most of the methods and
|
||||||
|
attributes that the standard Django :class:`User` model does - so the two are
|
||||||
|
moderately compatible. Using this backend will allow you to store users in
|
||||||
|
MongoDB but still use many of the Django authentication infrastucture (such as
|
||||||
|
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
||||||
|
enable the MongoEngine auth backend, add the following to you **settings.py**
|
||||||
|
file::
|
||||||
|
|
||||||
.. note:: Django support has been split from the main MongoEngine
|
AUTHENTICATION_BACKENDS = (
|
||||||
repository. The *legacy* Django extension may be found bundled with the
|
'mongoengine.django.auth.MongoEngineBackend',
|
||||||
0.9 release of MongoEngine.
|
)
|
||||||
|
|
||||||
|
The :mod:`~mongoengine.django.auth` module also contains a
|
||||||
|
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
|
||||||
|
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
|
||||||
|
|
||||||
|
.. versionadded:: 0.1.3
|
||||||
|
|
||||||
Help Wanted!
|
Sessions
|
||||||
------------
|
========
|
||||||
|
Django allows the use of different backend stores for its sessions. MongoEngine
|
||||||
|
provides a MongoDB-based session backend for Django, which allows you to use
|
||||||
|
sessions in you Django application with just MongoDB. To enable the MongoEngine
|
||||||
|
session backend, ensure that your settings module has
|
||||||
|
``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
|
||||||
|
``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
|
||||||
|
``INSTALLED_APPS``. From there, all you need to do is add the following line
|
||||||
|
into you settings module::
|
||||||
|
|
||||||
The MongoEngine team is looking for help contributing and maintaining a new
|
SESSION_ENGINE = 'mongoengine.django.sessions'
|
||||||
Django extension for MongoEngine! If you have Django experience and would like
|
|
||||||
to help contribute to the project, please get in touch on the
|
.. versionadded:: 0.2.1
|
||||||
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
|
|
||||||
simply contributing on
|
Storage
|
||||||
`GitHub <https://github.com/MongoEngine/django-mongoengine>`_.
|
=======
|
||||||
|
With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`,
|
||||||
|
it is useful to have a Django file storage backend that wraps this. The new
|
||||||
|
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
||||||
|
Using it is very similar to using the default FileSystemStorage.::
|
||||||
|
|
||||||
|
from mongoengine.django.storage import GridFSStorage
|
||||||
|
fs = GridFSStorage()
|
||||||
|
|
||||||
|
filename = fs.save('hello.txt', 'Hello, World!')
|
||||||
|
|
||||||
|
All of the `Django Storage API methods
|
||||||
|
<http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been
|
||||||
|
implemented except :func:`path`. If the filename provided already exists, an
|
||||||
|
underscore and a number (before # the file extension, if one exists) will be
|
||||||
|
appended to the filename until the generated filename doesn't exist. The
|
||||||
|
:func:`save` method will return the new filename.::
|
||||||
|
|
||||||
|
>>> fs.exists('hello.txt')
|
||||||
|
True
|
||||||
|
>>> fs.open('hello.txt').read()
|
||||||
|
'Hello, World!'
|
||||||
|
>>> fs.size('hello.txt')
|
||||||
|
13
|
||||||
|
>>> fs.url('hello.txt')
|
||||||
|
'http://your_media_url/hello.txt'
|
||||||
|
>>> fs.open('hello.txt').name
|
||||||
|
'hello.txt'
|
||||||
|
>>> fs.listdir()
|
||||||
|
([], [u'hello.txt'])
|
||||||
|
|
||||||
|
All files will be saved and retrieved in GridFS via the :class::`FileDocument`
|
||||||
|
document, allowing easy access to the files without the GridFSStorage
|
||||||
|
backend.::
|
||||||
|
|
||||||
|
>>> from mongoengine.django.storage import FileDocument
|
||||||
|
>>> FileDocument.objects()
|
||||||
|
[<FileDocument: FileDocument object>]
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
|
12
docs/faq.rst
12
docs/faq.rst
@ -1,12 +0,0 @@
|
|||||||
==========================
|
|
||||||
Frequently Asked Questions
|
|
||||||
==========================
|
|
||||||
|
|
||||||
Does MongoEngine support asynchronous drivers (Motor, TxMongo)?
|
|
||||||
---------------------------------------------------------------
|
|
||||||
|
|
||||||
No, MongoEngine is exclusively based on PyMongo and isn't designed to support other driver.
|
|
||||||
If this is a requirement for your project, check the alternative: `uMongo`_ and `MotorEngine`_.
|
|
||||||
|
|
||||||
.. _uMongo: https://umongo.readthedocs.io/
|
|
||||||
.. _MotorEngine: https://motorengine.readthedocs.io/
|
|
@ -4,206 +4,62 @@
|
|||||||
Connecting to MongoDB
|
Connecting to MongoDB
|
||||||
=====================
|
=====================
|
||||||
|
|
||||||
Connections in MongoEngine are registered globally and are identified with aliases.
|
To connect to a running instance of :program:`mongod`, use the
|
||||||
If no ``alias`` is provided during the connection, it will use "default" as alias.
|
:func:`~mongoengine.connect` function. The first argument is the name of the
|
||||||
|
database to connect to. If the database does not exist, it will be created. If
|
||||||
To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect`
|
the database requires authentication, :attr:`username` and :attr:`password`
|
||||||
function. The first argument is the name of the database to connect to::
|
arguments may be provided::
|
||||||
|
|
||||||
from mongoengine import connect
|
from mongoengine import connect
|
||||||
connect('project1')
|
connect('project1', username='webapp', password='pwd123')
|
||||||
|
|
||||||
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
||||||
on **localhost** on port **27017**.
|
on **localhost** on port **27017**. If MongoDB is running elsewhere, you may
|
||||||
|
provide :attr:`host` and :attr:`port` arguments to
|
||||||
|
:func:`~mongoengine.connect`::
|
||||||
|
|
||||||
If MongoDB is running elsewhere, you need to provide details on how to connect. There are two ways of
|
connect('project1', host='192.168.1.35', port=12345)
|
||||||
doing this. Using a connection string in URI format (**this is the preferred method**) or individual attributes
|
|
||||||
provided as keyword arguments.
|
|
||||||
|
|
||||||
Connect with URI string
|
Uri style connections are also supported as long as you include the database
|
||||||
=======================
|
name - just supply the uri as the :attr:`host` to
|
||||||
|
:func:`~mongoengine.connect`::
|
||||||
|
|
||||||
When using a connection string in URI format you should specify the connection details
|
connect('project1', host='mongodb://localhost/database_name')
|
||||||
as the :attr:`host` to :func:`~mongoengine.connect`. In a web application context for instance, the URI
|
|
||||||
is typically read from the config file::
|
|
||||||
|
|
||||||
connect(host="mongodb://127.0.0.1:27017/my_db")
|
ReplicaSets
|
||||||
|
===========
|
||||||
|
|
||||||
If the database requires authentication, you can specify it in the
|
MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection`
|
||||||
URI. As each database can have its own users configured, you need to tell MongoDB
|
to use them please use a URI style connection and provide the `replicaSet` name in the
|
||||||
where to look for the user you are working with, that's what the ``?authSource=admin`` bit
|
connection kwargs.
|
||||||
of the MongoDB connection string is for::
|
|
||||||
|
|
||||||
# Connects to 'my_db' database by authenticating
|
|
||||||
# with given credentials against the 'admin' database (by default as authSource isn't provided)
|
|
||||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db")
|
|
||||||
|
|
||||||
# Equivalent to previous connection but explicitly states that
|
|
||||||
# it should use admin as the authentication source database
|
|
||||||
connect(host="mongodb://my_user:my_password@hostname:port/my_db?authSource=admin")
|
|
||||||
|
|
||||||
# Connects to 'my_db' database by authenticating
|
|
||||||
# with given credentials against that same database
|
|
||||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db?authSource=my_db")
|
|
||||||
|
|
||||||
The URI string can also be used to configure advanced parameters like ssl, replicaSet, etc. For more
|
|
||||||
information or example about URI string, you can refer to the `official doc <https://docs.mongodb.com/manual/reference/connection-string/>`_::
|
|
||||||
|
|
||||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db?authSource=admin&ssl=true&replicaSet=globaldb")
|
|
||||||
|
|
||||||
.. note:: URI containing SRV records (e.g "mongodb+srv://server.example.com/") can be used as well
|
|
||||||
|
|
||||||
Connect with keyword attributes
|
|
||||||
===============================
|
|
||||||
|
|
||||||
The second option for specifying the connection details is to provide the information as keyword
|
|
||||||
attributes to :func:`~mongoengine.connect`::
|
|
||||||
|
|
||||||
connect('my_db', host='127.0.0.1', port=27017)
|
|
||||||
|
|
||||||
If the database requires authentication, :attr:`username`, :attr:`password`
|
|
||||||
and :attr:`authentication_source` arguments should be provided::
|
|
||||||
|
|
||||||
connect('my_db', username='my_user', password='my_password', authentication_source='admin')
|
|
||||||
|
|
||||||
The set of attributes that :func:`~mongoengine.connect` recognizes includes but is not limited to:
|
|
||||||
:attr:`host`, :attr:`port`, :attr:`read_preference`, :attr:`username`, :attr:`password`, :attr:`authentication_source`, :attr:`authentication_mechanism`,
|
|
||||||
:attr:`replicaset`, :attr:`tls`, etc. Most of the parameters accepted by `pymongo.MongoClient <https://pymongo.readthedocs.io/en/stable/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient>`_
|
|
||||||
can be used with :func:`~mongoengine.connect` and will simply be forwarded when instantiating the `pymongo.MongoClient`.
|
|
||||||
|
|
||||||
.. note:: Database, username and password from URI string overrides
|
|
||||||
corresponding parameters in :func:`~mongoengine.connect`, this should
|
|
||||||
obviously be avoided: ::
|
|
||||||
|
|
||||||
connect(
|
|
||||||
db='test',
|
|
||||||
username='user',
|
|
||||||
password='12345',
|
|
||||||
host='mongodb://admin:qwerty@localhost/production'
|
|
||||||
)
|
|
||||||
|
|
||||||
will establish connection to ``production`` database using ``admin`` username and ``qwerty`` password.
|
|
||||||
|
|
||||||
.. note:: Calling :func:`~mongoengine.connect` without argument will establish
|
|
||||||
a connection to the "test" database by default
|
|
||||||
|
|
||||||
Read Preferences
|
|
||||||
================
|
|
||||||
|
|
||||||
As stated above, Read preferences are supported through the connection but also via individual
|
|
||||||
queries by passing the read_preference ::
|
|
||||||
|
|
||||||
from pymongo import ReadPreference
|
|
||||||
|
|
||||||
Bar.objects().read_preference(ReadPreference.PRIMARY)
|
|
||||||
Bar.objects(read_preference=ReadPreference.PRIMARY)
|
|
||||||
|
|
||||||
Multiple Databases
|
Multiple Databases
|
||||||
==================
|
==================
|
||||||
|
|
||||||
To use multiple databases you can use :func:`~mongoengine.connect` and provide
|
Multiple database support was added in MongoEngine 0.6. To use multiple
|
||||||
an `alias` name for the connection - if no `alias` is provided then "default"
|
databases you can use :func:`~mongoengine.connect` and provide an `alias` name
|
||||||
is used.
|
for the connection - if no `alias` is provided then "default" is used.
|
||||||
|
|
||||||
In the background this uses :func:`~mongoengine.register_connection` to
|
In the background this uses :func:`~mongoengine.register_connection` to
|
||||||
store the data and you can register all aliases up front if required.
|
store the data and you can register all aliases up front if required.
|
||||||
|
|
||||||
Documents defined in different database
|
Individual documents can also support multiple databases by providing a
|
||||||
---------------------------------------
|
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects
|
||||||
Individual documents can be attached to different databases by providing a
|
to point across databases and collections. Below is an example schema, using
|
||||||
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef`
|
3 different databases to store data::
|
||||||
objects to point across databases and collections. Below is an example schema,
|
|
||||||
using 3 different databases to store data::
|
|
||||||
|
|
||||||
connect(alias='user-db-alias', db='user-db')
|
|
||||||
connect(alias='book-db-alias', db='book-db')
|
|
||||||
connect(alias='users-books-db-alias', db='users-books-db')
|
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
meta = {'db_alias': 'user-db-alias'}
|
meta = {"db_alias": "user-db"}
|
||||||
|
|
||||||
class Book(Document):
|
class Book(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
meta = {'db_alias': 'book-db-alias'}
|
meta = {"db_alias": "book-db"}
|
||||||
|
|
||||||
class AuthorBooks(Document):
|
class AuthorBooks(Document):
|
||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
book = ReferenceField(Book)
|
book = ReferenceField(Book)
|
||||||
|
|
||||||
meta = {'db_alias': 'users-books-db-alias'}
|
meta = {"db_alias": "users-books-db"}
|
||||||
|
|
||||||
|
|
||||||
Disconnecting an existing connection
|
|
||||||
------------------------------------
|
|
||||||
The function :func:`~mongoengine.disconnect` can be used to
|
|
||||||
disconnect a particular connection. This can be used to change a
|
|
||||||
connection globally::
|
|
||||||
|
|
||||||
from mongoengine import connect, disconnect
|
|
||||||
connect('a_db', alias='db1')
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {'db_alias': 'db1'}
|
|
||||||
|
|
||||||
disconnect(alias='db1')
|
|
||||||
|
|
||||||
connect('another_db', alias='db1')
|
|
||||||
|
|
||||||
.. note:: Calling :func:`~mongoengine.disconnect` without argument
|
|
||||||
will disconnect the "default" connection
|
|
||||||
|
|
||||||
.. note:: Since connections gets registered globally, it is important
|
|
||||||
to use the `disconnect` function from MongoEngine and not the
|
|
||||||
`disconnect()` method of an existing connection (pymongo.MongoClient)
|
|
||||||
|
|
||||||
.. note:: :class:`~mongoengine.Document` are caching the pymongo collection.
|
|
||||||
using `disconnect` ensures that it gets cleaned as well
|
|
||||||
|
|
||||||
Context Managers
|
|
||||||
================
|
|
||||||
Sometimes you may want to switch the database or collection to query against.
|
|
||||||
For example, archiving older data into a separate database for performance
|
|
||||||
reasons or writing functions that dynamically choose collections to write
|
|
||||||
a document to.
|
|
||||||
|
|
||||||
Switch Database
|
|
||||||
---------------
|
|
||||||
The :class:`~mongoengine.context_managers.switch_db` context manager allows
|
|
||||||
you to change the database alias for a given class allowing quick and easy
|
|
||||||
access to the same User document across databases::
|
|
||||||
|
|
||||||
from mongoengine.context_managers import switch_db
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
meta = {'db_alias': 'user-db'}
|
|
||||||
|
|
||||||
with switch_db(User, 'archive-user-db') as User:
|
|
||||||
User(name='Ross').save() # Saves the 'archive-user-db'
|
|
||||||
|
|
||||||
|
|
||||||
Switch Collection
|
|
||||||
-----------------
|
|
||||||
The :func:`~mongoengine.context_managers.switch_collection` context manager
|
|
||||||
allows you to change the collection for a given class allowing quick and easy
|
|
||||||
access to the same Group document across collection::
|
|
||||||
|
|
||||||
from mongoengine.context_managers import switch_collection
|
|
||||||
|
|
||||||
class Group(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Group(name='test').save() # Saves in the default db
|
|
||||||
|
|
||||||
with switch_collection(Group, 'group2000') as Group:
|
|
||||||
Group(name='hello Group 2000 collection!').save() # Saves in group2000 collection
|
|
||||||
|
|
||||||
|
|
||||||
.. note:: Make sure any aliases have been registered with
|
|
||||||
:func:`~mongoengine.register_connection` or :func:`~mongoengine.connect`
|
|
||||||
before using the context manager.
|
|
||||||
|
@ -4,7 +4,7 @@ Defining documents
|
|||||||
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
||||||
working with relational databases, rows are stored in **tables**, which have a
|
working with relational databases, rows are stored in **tables**, which have a
|
||||||
strict **schema** that the rows follow. MongoDB stores documents in
|
strict **schema** that the rows follow. MongoDB stores documents in
|
||||||
**collections** rather than tables --- the principal difference is that no schema
|
**collections** rather than tables - the principle difference is that no schema
|
||||||
is enforced at a database level.
|
is enforced at a database level.
|
||||||
|
|
||||||
Defining a document's schema
|
Defining a document's schema
|
||||||
@ -22,16 +22,11 @@ objects** as class attributes to the document class::
|
|||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
title = StringField(max_length=200, required=True)
|
title = StringField(max_length=200, required=True)
|
||||||
date_modified = DateTimeField(default=datetime.datetime.utcnow)
|
date_modified = DateTimeField(default=datetime.datetime.now)
|
||||||
|
|
||||||
As BSON (the binary format for storing data in mongodb) is order dependent,
|
|
||||||
documents are serialized based on their field order.
|
|
||||||
|
|
||||||
.. _dynamic-document-schemas:
|
|
||||||
|
|
||||||
Dynamic document schemas
|
Dynamic document schemas
|
||||||
========================
|
========================
|
||||||
One of the benefits of MongoDB is dynamic schemas for a collection, whilst data
|
One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
|
||||||
should be planned and organised (after all explicit is better than implicit!)
|
should be planned and organised (after all explicit is better than implicit!)
|
||||||
there are scenarios where having dynamic / expando style documents is desirable.
|
there are scenarios where having dynamic / expando style documents is desirable.
|
||||||
|
|
||||||
@ -52,11 +47,10 @@ be saved ::
|
|||||||
>>> Page.objects(tags='mongoengine').count()
|
>>> Page.objects(tags='mongoengine').count()
|
||||||
>>> 1
|
>>> 1
|
||||||
|
|
||||||
.. note::
|
..note::
|
||||||
|
|
||||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||||
|
|
||||||
Dynamic fields are stored in creation order *after* any declared fields.
|
|
||||||
|
|
||||||
Fields
|
Fields
|
||||||
======
|
======
|
||||||
@ -68,42 +62,31 @@ not provided. Default values may optionally be a callable, which will be called
|
|||||||
to retrieve the value (such as in the above example). The field types available
|
to retrieve the value (such as in the above example). The field types available
|
||||||
are as follows:
|
are as follows:
|
||||||
|
|
||||||
* :class:`~mongoengine.fields.BinaryField`
|
* :class:`~mongoengine.BinaryField`
|
||||||
* :class:`~mongoengine.fields.BooleanField`
|
* :class:`~mongoengine.BooleanField`
|
||||||
* :class:`~mongoengine.fields.ComplexDateTimeField`
|
* :class:`~mongoengine.ComplexDateTimeField`
|
||||||
* :class:`~mongoengine.fields.DateTimeField`
|
* :class:`~mongoengine.DateTimeField`
|
||||||
* :class:`~mongoengine.fields.DecimalField`
|
* :class:`~mongoengine.DecimalField`
|
||||||
* :class:`~mongoengine.fields.DictField`
|
* :class:`~mongoengine.DictField`
|
||||||
* :class:`~mongoengine.fields.DynamicField`
|
* :class:`~mongoengine.DynamicField`
|
||||||
* :class:`~mongoengine.fields.EmailField`
|
* :class:`~mongoengine.EmailField`
|
||||||
* :class:`~mongoengine.fields.EmbeddedDocumentField`
|
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||||
* :class:`~mongoengine.fields.EmbeddedDocumentListField`
|
* :class:`~mongoengine.FileField`
|
||||||
* :class:`~mongoengine.fields.EnumField`
|
* :class:`~mongoengine.FloatField`
|
||||||
* :class:`~mongoengine.fields.FileField`
|
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
||||||
* :class:`~mongoengine.fields.FloatField`
|
* :class:`~mongoengine.GenericReferenceField`
|
||||||
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
* :class:`~mongoengine.GeoPointField`
|
||||||
* :class:`~mongoengine.fields.GenericReferenceField`
|
* :class:`~mongoengine.ImageField`
|
||||||
* :class:`~mongoengine.fields.GenericLazyReferenceField`
|
* :class:`~mongoengine.IntField`
|
||||||
* :class:`~mongoengine.fields.GeoPointField`
|
* :class:`~mongoengine.ListField`
|
||||||
* :class:`~mongoengine.fields.ImageField`
|
* :class:`~mongoengine.MapField`
|
||||||
* :class:`~mongoengine.fields.IntField`
|
* :class:`~mongoengine.ObjectIdField`
|
||||||
* :class:`~mongoengine.fields.ListField`
|
* :class:`~mongoengine.ReferenceField`
|
||||||
* :class:`~mongoengine.fields.LongField`
|
* :class:`~mongoengine.SequenceField`
|
||||||
* :class:`~mongoengine.fields.MapField`
|
* :class:`~mongoengine.SortedListField`
|
||||||
* :class:`~mongoengine.fields.ObjectIdField`
|
* :class:`~mongoengine.StringField`
|
||||||
* :class:`~mongoengine.fields.ReferenceField`
|
* :class:`~mongoengine.URLField`
|
||||||
* :class:`~mongoengine.fields.LazyReferenceField`
|
* :class:`~mongoengine.UUIDField`
|
||||||
* :class:`~mongoengine.fields.SequenceField`
|
|
||||||
* :class:`~mongoengine.fields.SortedListField`
|
|
||||||
* :class:`~mongoengine.fields.StringField`
|
|
||||||
* :class:`~mongoengine.fields.URLField`
|
|
||||||
* :class:`~mongoengine.fields.UUIDField`
|
|
||||||
* :class:`~mongoengine.fields.PointField`
|
|
||||||
* :class:`~mongoengine.fields.LineStringField`
|
|
||||||
* :class:`~mongoengine.fields.PolygonField`
|
|
||||||
* :class:`~mongoengine.fields.MultiPointField`
|
|
||||||
* :class:`~mongoengine.fields.MultiLineStringField`
|
|
||||||
* :class:`~mongoengine.fields.MultiPolygonField`
|
|
||||||
|
|
||||||
Field arguments
|
Field arguments
|
||||||
---------------
|
---------------
|
||||||
@ -113,6 +96,9 @@ arguments can be set on all fields:
|
|||||||
:attr:`db_field` (Default: None)
|
:attr:`db_field` (Default: None)
|
||||||
The MongoDB field name.
|
The MongoDB field name.
|
||||||
|
|
||||||
|
:attr:`name` (Default: None)
|
||||||
|
The mongoengine field name.
|
||||||
|
|
||||||
:attr:`required` (Default: False)
|
:attr:`required` (Default: False)
|
||||||
If set to True and the field is not set on the document instance, a
|
If set to True and the field is not set on the document instance, a
|
||||||
:class:`~mongoengine.ValidationError` will be raised when the document is
|
:class:`~mongoengine.ValidationError` will be raised when the document is
|
||||||
@ -121,10 +107,10 @@ arguments can be set on all fields:
|
|||||||
:attr:`default` (Default: None)
|
:attr:`default` (Default: None)
|
||||||
A value to use when no value is set for this field.
|
A value to use when no value is set for this field.
|
||||||
|
|
||||||
The definition of default parameters follow `the general rules on Python
|
The definion of default parameters follow `the general rules on Python
|
||||||
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
|
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
|
||||||
which means that some care should be taken when dealing with default mutable objects
|
which means that some care should be taken when dealing with default mutable objects
|
||||||
(like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`)::
|
(like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`)::
|
||||||
|
|
||||||
class ExampleFirst(Document):
|
class ExampleFirst(Document):
|
||||||
# Default an empty list
|
# Default an empty list
|
||||||
@ -139,7 +125,6 @@ arguments can be set on all fields:
|
|||||||
# instead to just an object
|
# instead to just an object
|
||||||
values = ListField(IntField(), default=[1,2,3])
|
values = ListField(IntField(), default=[1,2,3])
|
||||||
|
|
||||||
.. note:: Unsetting a field with a default value will revert back to the default.
|
|
||||||
|
|
||||||
:attr:`unique` (Default: False)
|
:attr:`unique` (Default: False)
|
||||||
When True, no documents in the collection will have the same value for this
|
When True, no documents in the collection will have the same value for this
|
||||||
@ -150,16 +135,13 @@ arguments can be set on all fields:
|
|||||||
field, will not have two documents in the collection with the same value.
|
field, will not have two documents in the collection with the same value.
|
||||||
|
|
||||||
:attr:`primary_key` (Default: False)
|
:attr:`primary_key` (Default: False)
|
||||||
When True, use this field as a primary key for the collection. `DictField`
|
When True, use this field as a primary key for the collection.
|
||||||
and `EmbeddedDocuments` both support being the primary key for a document.
|
|
||||||
|
|
||||||
.. note:: If set, this field is also accessible through the `pk` field.
|
|
||||||
|
|
||||||
:attr:`choices` (Default: None)
|
:attr:`choices` (Default: None)
|
||||||
An iterable (e.g. list, tuple or set) of choices to which the value of this
|
An iterable (e.g. a list or tuple) of choices to which the value of this
|
||||||
field should be limited.
|
field should be limited.
|
||||||
|
|
||||||
Can either be nested tuples of value (stored in mongo) and a
|
Can be either be a nested tuples of value (stored in mongo) and a
|
||||||
human readable key ::
|
human readable key ::
|
||||||
|
|
||||||
SIZE = (('S', 'Small'),
|
SIZE = (('S', 'Small'),
|
||||||
@ -179,33 +161,18 @@ arguments can be set on all fields:
|
|||||||
class Shirt(Document):
|
class Shirt(Document):
|
||||||
size = StringField(max_length=3, choices=SIZE)
|
size = StringField(max_length=3, choices=SIZE)
|
||||||
|
|
||||||
:attr:`validation` (Optional)
|
:attr:`help_text` (Default: None)
|
||||||
A callable to validate the value of the field.
|
Optional help text to output with the field - used by form libraries
|
||||||
The callable takes the value as parameter and should raise a ValidationError
|
|
||||||
if validation fails
|
|
||||||
|
|
||||||
e.g ::
|
:attr:`verbose_name` (Default: None)
|
||||||
|
Optional human-readable name for the field - used by form libraries
|
||||||
def _not_empty(val):
|
|
||||||
if not val:
|
|
||||||
raise ValidationError('value can not be empty')
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField(validation=_not_empty)
|
|
||||||
|
|
||||||
|
|
||||||
:attr:`**kwargs` (Optional)
|
|
||||||
You can supply additional metadata as arbitrary additional keyword
|
|
||||||
arguments. You can not override existing attributes, however. Common
|
|
||||||
choices include `help_text` and `verbose_name`, commonly used by form and
|
|
||||||
widget libraries.
|
|
||||||
|
|
||||||
|
|
||||||
List fields
|
List fields
|
||||||
-----------
|
-----------
|
||||||
MongoDB allows storing lists of items. To add a list of items to a
|
MongoDB allows the storage of lists of items. To add a list of items to a
|
||||||
:class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field
|
:class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field
|
||||||
type. :class:`~mongoengine.fields.ListField` takes another field object as its first
|
type. :class:`~mongoengine.ListField` takes another field object as its first
|
||||||
argument, which specifies which type elements may be stored within the list::
|
argument, which specifies which type elements may be stored within the list::
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
@ -223,7 +190,7 @@ inherit from :class:`~mongoengine.EmbeddedDocument` rather than
|
|||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
To embed the document within another document, use the
|
To embed the document within another document, use the
|
||||||
:class:`~mongoengine.fields.EmbeddedDocumentField` field type, providing the embedded
|
:class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded
|
||||||
document class as the first argument::
|
document class as the first argument::
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
@ -233,22 +200,19 @@ document class as the first argument::
|
|||||||
comment2 = Comment(content='Nice article!')
|
comment2 = Comment(content='Nice article!')
|
||||||
page = Page(comments=[comment1, comment2])
|
page = Page(comments=[comment1, comment2])
|
||||||
|
|
||||||
Embedded documents can also leverage the flexibility of :ref:`dynamic-document-schemas:`
|
|
||||||
by inheriting :class:`~mongoengine.DynamicEmbeddedDocument`.
|
|
||||||
|
|
||||||
Dictionary Fields
|
Dictionary Fields
|
||||||
-----------------
|
-----------------
|
||||||
Often, an embedded document may be used instead of a dictionary – generally
|
Often, an embedded document may be used instead of a dictionary -- generally
|
||||||
embedded documents are recommended as dictionaries don’t support validation
|
this is recommended as dictionaries don't support validation or custom field
|
||||||
or custom field types. However, sometimes you will not know the structure of what you want to
|
types. However, sometimes you will not know the structure of what you want to
|
||||||
store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
|
store; in this situation a :class:`~mongoengine.DictField` is appropriate::
|
||||||
|
|
||||||
class SurveyResponse(Document):
|
class SurveyResponse(Document):
|
||||||
date = DateTimeField()
|
date = DateTimeField()
|
||||||
user = ReferenceField(User)
|
user = ReferenceField(User)
|
||||||
answers = DictField()
|
answers = DictField()
|
||||||
|
|
||||||
survey_response = SurveyResponse(date=datetime.utcnow(), user=request.user)
|
survey_response = SurveyResponse(date=datetime.now(), user=request.user)
|
||||||
response_form = ResponseForm(request.POST)
|
response_form = ResponseForm(request.POST)
|
||||||
survey_response.answers = response_form.cleaned_data()
|
survey_response.answers = response_form.cleaned_data()
|
||||||
survey_response.save()
|
survey_response.save()
|
||||||
@ -259,7 +223,7 @@ other objects, so are the most flexible field type available.
|
|||||||
Reference fields
|
Reference fields
|
||||||
----------------
|
----------------
|
||||||
References may be stored to other documents in the database using the
|
References may be stored to other documents in the database using the
|
||||||
:class:`~mongoengine.fields.ReferenceField`. Pass in another document class as the
|
:class:`~mongoengine.ReferenceField`. Pass in another document class as the
|
||||||
first argument to the constructor, then simply assign document objects to the
|
first argument to the constructor, then simply assign document objects to the
|
||||||
field::
|
field::
|
||||||
|
|
||||||
@ -280,9 +244,9 @@ field::
|
|||||||
The :class:`User` object is automatically turned into a reference behind the
|
The :class:`User` object is automatically turned into a reference behind the
|
||||||
scenes, and dereferenced when the :class:`Page` object is retrieved.
|
scenes, and dereferenced when the :class:`Page` object is retrieved.
|
||||||
|
|
||||||
To add a :class:`~mongoengine.fields.ReferenceField` that references the document
|
To add a :class:`~mongoengine.ReferenceField` that references the document
|
||||||
being defined, use the string ``'self'`` in place of the document class as the
|
being defined, use the string ``'self'`` in place of the document class as the
|
||||||
argument to :class:`~mongoengine.fields.ReferenceField`'s constructor. To reference a
|
argument to :class:`~mongoengine.ReferenceField`'s constructor. To reference a
|
||||||
document that has not yet been defined, use the name of the undefined document
|
document that has not yet been defined, use the name of the undefined document
|
||||||
as the constructor's argument::
|
as the constructor's argument::
|
||||||
|
|
||||||
@ -295,12 +259,12 @@ as the constructor's argument::
|
|||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
|
|
||||||
.. _many-to-many-with-listfields:
|
.. _one-to-many-with-listfields:
|
||||||
|
|
||||||
Many to Many with ListFields
|
One to Many with ListFields
|
||||||
'''''''''''''''''''''''''''
|
'''''''''''''''''''''''''''
|
||||||
|
|
||||||
If you are implementing a many to many relationship via a list of references,
|
If you are implementing a one to many relationship via a list of references,
|
||||||
then the references are stored as DBRefs and to query you need to pass an
|
then the references are stored as DBRefs and to query you need to pass an
|
||||||
instance of the object to the query::
|
instance of the object to the query::
|
||||||
|
|
||||||
@ -323,12 +287,6 @@ instance of the object to the query::
|
|||||||
# Find all pages that both Bob and John have authored
|
# Find all pages that both Bob and John have authored
|
||||||
Page.objects(authors__all=[bob, john])
|
Page.objects(authors__all=[bob, john])
|
||||||
|
|
||||||
# Remove Bob from the authors for a page.
|
|
||||||
Page.objects(id='...').update_one(pull__authors=bob)
|
|
||||||
|
|
||||||
# Add John to the authors for a page.
|
|
||||||
Page.objects(id='...').update_one(push__authors=john)
|
|
||||||
|
|
||||||
|
|
||||||
Dealing with deletion of referred documents
|
Dealing with deletion of referred documents
|
||||||
'''''''''''''''''''''''''''''''''''''''''''
|
'''''''''''''''''''''''''''''''''''''''''''
|
||||||
@ -340,11 +298,12 @@ reference with a delete rule specification. A delete rule is specified by
|
|||||||
supplying the :attr:`reverse_delete_rule` attributes on the
|
supplying the :attr:`reverse_delete_rule` attributes on the
|
||||||
:class:`ReferenceField` definition, like this::
|
:class:`ReferenceField` definition, like this::
|
||||||
|
|
||||||
class ProfilePage(Document):
|
class Employee(Document):
|
||||||
employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE)
|
...
|
||||||
|
profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY)
|
||||||
|
|
||||||
The declaration in this example means that when an :class:`Employee` object is
|
The declaration in this example means that when an :class:`Employee` object is
|
||||||
removed, the :class:`ProfilePage` that references that employee is removed as
|
removed, the :class:`ProfilePage` that belongs to that employee is removed as
|
||||||
well. If a whole batch of employees is removed, all profile pages that are
|
well. If a whole batch of employees is removed, all profile pages that are
|
||||||
linked are removed as well.
|
linked are removed as well.
|
||||||
|
|
||||||
@ -357,15 +316,15 @@ Its value can take any of the following constants:
|
|||||||
Deletion is denied if there still exist references to the object being
|
Deletion is denied if there still exist references to the object being
|
||||||
deleted.
|
deleted.
|
||||||
:const:`mongoengine.NULLIFY`
|
:const:`mongoengine.NULLIFY`
|
||||||
Any object's fields still referring to the object being deleted are set to None
|
Any object's fields still referring to the object being deleted are removed
|
||||||
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
||||||
:const:`mongoengine.CASCADE`
|
:const:`mongoengine.CASCADE`
|
||||||
Any object containing fields that are referring to the object being deleted
|
Any object containing fields that are refererring to the object being deleted
|
||||||
are deleted first.
|
are deleted first.
|
||||||
:const:`mongoengine.PULL`
|
:const:`mongoengine.PULL`
|
||||||
Removes the reference to the object (using MongoDB's "pull" operation)
|
Removes the reference to the object (using MongoDB's "pull" operation)
|
||||||
from any object's fields of
|
from any object's fields of
|
||||||
:class:`~mongoengine.fields.ListField` (:class:`~mongoengine.fields.ReferenceField`).
|
:class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`).
|
||||||
|
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
@ -384,10 +343,11 @@ Its value can take any of the following constants:
|
|||||||
In Django, be sure to put all apps that have such delete rule declarations in
|
In Django, be sure to put all apps that have such delete rule declarations in
|
||||||
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
||||||
|
|
||||||
|
|
||||||
Generic reference fields
|
Generic reference fields
|
||||||
''''''''''''''''''''''''
|
''''''''''''''''''''''''
|
||||||
A second kind of reference field also exists,
|
A second kind of reference field also exists,
|
||||||
:class:`~mongoengine.fields.GenericReferenceField`. This allows you to reference any
|
:class:`~mongoengine.GenericReferenceField`. This allows you to reference any
|
||||||
kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
||||||
:class:`~mongoengine.Document` subclass as a constructor argument::
|
:class:`~mongoengine.Document` subclass as a constructor argument::
|
||||||
|
|
||||||
@ -411,18 +371,18 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Using :class:`~mongoengine.fields.GenericReferenceField`\ s is slightly less
|
Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less
|
||||||
efficient than the standard :class:`~mongoengine.fields.ReferenceField`\ s, so if
|
efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if
|
||||||
you will only be referencing one document type, prefer the standard
|
you will only be referencing one document type, prefer the standard
|
||||||
:class:`~mongoengine.fields.ReferenceField`.
|
:class:`~mongoengine.ReferenceField`.
|
||||||
|
|
||||||
Uniqueness constraints
|
Uniqueness constraints
|
||||||
----------------------
|
----------------------
|
||||||
MongoEngine allows you to specify that a field should be unique across a
|
MongoEngine allows you to specify that a field should be unique across a
|
||||||
collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's
|
collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's
|
||||||
constructor. If you try to save a document that has the same value for a unique
|
constructor. If you try to save a document that has the same value for a unique
|
||||||
field as a document that is already in the database, a
|
field as a document that is already in the database, a
|
||||||
:class:`~mongoengine.NotUniqueError` will be raised. You may also specify
|
:class:`~mongoengine.OperationError` will be raised. You may also specify
|
||||||
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
|
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
|
||||||
either a single field name, or a list or tuple of field names::
|
either a single field name, or a list or tuple of field names::
|
||||||
|
|
||||||
@ -431,15 +391,28 @@ either a single field name, or a list or tuple of field names::
|
|||||||
first_name = StringField()
|
first_name = StringField()
|
||||||
last_name = StringField(unique_with='first_name')
|
last_name = StringField(unique_with='first_name')
|
||||||
|
|
||||||
|
Skipping Document validation on save
|
||||||
|
------------------------------------
|
||||||
|
You can also skip the whole document validation process by setting
|
||||||
|
``validate=False`` when caling the :meth:`~mongoengine.document.Document.save`
|
||||||
|
method::
|
||||||
|
|
||||||
|
class Recipient(Document):
|
||||||
|
name = StringField()
|
||||||
|
email = EmailField()
|
||||||
|
|
||||||
|
recipient = Recipient(name='admin', email='root@localhost')
|
||||||
|
recipient.save() # will raise a ValidationError while
|
||||||
|
recipient.save(validate=False) # won't
|
||||||
|
|
||||||
Document collections
|
Document collections
|
||||||
====================
|
====================
|
||||||
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
||||||
will have their own **collection** in the database. The name of the collection
|
will have their own **collection** in the database. The name of the collection
|
||||||
is by default the name of the class converted to snake_case (e.g if your Document class
|
is by default the name of the class, coverted to lowercase (so in the example
|
||||||
is named `CompanyUser`, the corresponding collection would be `company_user`). If you need
|
above, the collection would be called `page`). If you need to change the name
|
||||||
to change the name of the collection (e.g. to use MongoEngine with an existing database),
|
of the collection (e.g. to use MongoEngine with an existing database), then
|
||||||
then create a class dictionary attribute called :attr:`meta` on your document, and
|
create a class dictionary attribute called :attr:`meta` on your document, and
|
||||||
set :attr:`collection` to the name of the collection that you want your
|
set :attr:`collection` to the name of the collection that you want your
|
||||||
document class to use::
|
document class to use::
|
||||||
|
|
||||||
@ -453,10 +426,8 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
|
|||||||
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
||||||
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
||||||
stored in the collection, and :attr:`max_size` is the maximum size of the
|
stored in the collection, and :attr:`max_size` is the maximum size of the
|
||||||
collection in bytes. :attr:`max_size` is rounded up to the next multiple of 256
|
collection in bytes. If :attr:`max_size` is not specified and
|
||||||
by MongoDB internally and mongoengine before. Use also a multiple of 256 to
|
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
|
||||||
avoid confusions. If :attr:`max_size` is not specified and
|
|
||||||
:attr:`max_documents` is, :attr:`max_size` defaults to 10485760 bytes (10MB).
|
|
||||||
The following example shows a :class:`Log` document that will be limited to
|
The following example shows a :class:`Log` document that will be limited to
|
||||||
1000 entries and 2MB of disk space::
|
1000 entries and 2MB of disk space::
|
||||||
|
|
||||||
@ -464,159 +435,58 @@ The following example shows a :class:`Log` document that will be limited to
|
|||||||
ip_address = StringField()
|
ip_address = StringField()
|
||||||
meta = {'max_documents': 1000, 'max_size': 2000000}
|
meta = {'max_documents': 1000, 'max_size': 2000000}
|
||||||
|
|
||||||
.. defining-indexes_
|
|
||||||
|
|
||||||
Indexes
|
Indexes
|
||||||
=======
|
=======
|
||||||
|
|
||||||
You can specify indexes on collections to make querying faster. This is done
|
You can specify indexes on collections to make querying faster. This is done
|
||||||
by creating a list of index specifications called :attr:`indexes` in the
|
by creating a list of index specifications called :attr:`indexes` in the
|
||||||
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
||||||
either be a single field name, a tuple containing multiple field names, or a
|
either be a single field name, a tuple containing multiple field names, or a
|
||||||
dictionary containing a full index definition.
|
dictionary containing a full index definition. A direction may be specified on
|
||||||
|
fields by prefixing the field name with a **+** or a **-** sign. Note that
|
||||||
A direction may be specified on fields by prefixing the field name with a
|
direction only matters on multi-field indexes. ::
|
||||||
**+** (for ascending) or a **-** sign (for descending). Note that direction
|
|
||||||
only matters on compound indexes. Text indexes may be specified by prefixing
|
|
||||||
the field name with a **$**. Hashed indexes may be specified by prefixing
|
|
||||||
the field name with a **#**::
|
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
category = IntField()
|
|
||||||
title = StringField()
|
title = StringField()
|
||||||
rating = StringField()
|
rating = StringField()
|
||||||
created = DateTimeField()
|
|
||||||
meta = {
|
meta = {
|
||||||
'indexes': [
|
'indexes': ['title', ('title', '-rating')]
|
||||||
'title', # single-field index
|
|
||||||
'$title', # text index
|
|
||||||
'#title', # hashed index
|
|
||||||
('title', '-rating'), # compound index
|
|
||||||
('category', '_cls'), # compound index
|
|
||||||
{
|
|
||||||
'fields': ['created'],
|
|
||||||
'expireAfterSeconds': 3600 # ttl index
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
If a dictionary is passed then additional options become available. Valid options include,
|
If a dictionary is passed then the following options are available:
|
||||||
but are not limited to:
|
|
||||||
|
|
||||||
|
|
||||||
:attr:`fields` (Default: None)
|
:attr:`fields` (Default: None)
|
||||||
The fields to index. Specified in the same format as described above.
|
The fields to index. Specified in the same format as described above.
|
||||||
|
|
||||||
:attr:`cls` (Default: True)
|
:attr:`types` (Default: True)
|
||||||
If you have polymorphic models that inherit and have
|
Whether the index should have the :attr:`_types` field added automatically
|
||||||
:attr:`allow_inheritance` turned on, you can configure whether the index
|
to the start of the index.
|
||||||
should have the :attr:`_cls` field added automatically to the start of the
|
|
||||||
index.
|
|
||||||
|
|
||||||
:attr:`sparse` (Default: False)
|
:attr:`sparse` (Default: False)
|
||||||
Whether the index should be sparse.
|
Whether the index should be sparse.
|
||||||
|
|
||||||
:attr:`unique` (Default: False)
|
:attr:`unique` (Default: False)
|
||||||
Whether the index should be unique.
|
Whether the index should be sparse.
|
||||||
|
|
||||||
:attr:`expireAfterSeconds` (Optional)
|
.. note ::
|
||||||
Allows you to automatically expire data from a collection by setting the
|
|
||||||
time in seconds to expire the a field.
|
|
||||||
|
|
||||||
:attr:`name` (Optional)
|
To index embedded files / dictionary fields use 'dot' notation eg:
|
||||||
Allows you to specify a name for the index
|
`rank.title`
|
||||||
|
|
||||||
:attr:`collation` (Optional)
|
.. warning::
|
||||||
Allows to create case insensitive indexes (MongoDB v3.4+ only)
|
|
||||||
|
|
||||||
.. note::
|
Inheritance adds extra indices.
|
||||||
|
If don't need inheritance for a document turn inheritance off -
|
||||||
|
see :ref:`document-inheritance`.
|
||||||
|
|
||||||
Additional options are forwarded as **kwargs to pymongo's create_index method.
|
|
||||||
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
|
|
||||||
|
|
||||||
Global index default options
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
There are a few top level defaults for all indexes that can be set::
|
|
||||||
|
|
||||||
class Page(Document):
|
|
||||||
title = StringField()
|
|
||||||
rating = StringField()
|
|
||||||
meta = {
|
|
||||||
'index_opts': {},
|
|
||||||
'index_background': True,
|
|
||||||
'index_cls': False,
|
|
||||||
'auto_create_index': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
:attr:`index_opts` (Optional)
|
|
||||||
Set any default index options - see the `full options list <https://docs.mongodb.com/manual/reference/method/db.collection.createIndex/#db.collection.createIndex>`_
|
|
||||||
|
|
||||||
:attr:`index_background` (Optional)
|
|
||||||
Set the default value for if an index should be indexed in the background
|
|
||||||
|
|
||||||
:attr:`index_cls` (Optional)
|
|
||||||
A way to turn off a specific index for _cls.
|
|
||||||
|
|
||||||
:attr:`auto_create_index` (Optional)
|
|
||||||
When this is True (default), MongoEngine will ensure that the correct
|
|
||||||
indexes exist in MongoDB each time a command is run. This can be disabled
|
|
||||||
in systems where indexes are managed separately. Disabling this will improve
|
|
||||||
performance.
|
|
||||||
|
|
||||||
|
|
||||||
Compound Indexes and Indexing sub documents
|
|
||||||
-------------------------------------------
|
|
||||||
|
|
||||||
Compound indexes can be created by adding the Embedded field or dictionary
|
|
||||||
field name to the index definition.
|
|
||||||
|
|
||||||
Sometimes its more efficient to index parts of Embedded / dictionary fields,
|
|
||||||
in this case use 'dot' notation to identify the value to index eg: `rank.title`
|
|
||||||
|
|
||||||
.. _geospatial-indexes:
|
|
||||||
|
|
||||||
Geospatial indexes
|
Geospatial indexes
|
||||||
------------------
|
---------------------------
|
||||||
|
|
||||||
The best geo index for mongodb is the new "2dsphere", which has an improved
|
|
||||||
spherical model and provides better performance and more options when querying.
|
|
||||||
The following fields will explicitly add a "2dsphere" index:
|
|
||||||
|
|
||||||
- :class:`~mongoengine.fields.PointField`
|
|
||||||
- :class:`~mongoengine.fields.LineStringField`
|
|
||||||
- :class:`~mongoengine.fields.PolygonField`
|
|
||||||
- :class:`~mongoengine.fields.MultiPointField`
|
|
||||||
- :class:`~mongoengine.fields.MultiLineStringField`
|
|
||||||
- :class:`~mongoengine.fields.MultiPolygonField`
|
|
||||||
|
|
||||||
As "2dsphere" indexes can be part of a compound index, you may not want the
|
|
||||||
automatic index but would prefer a compound index. In this example we turn off
|
|
||||||
auto indexing and explicitly declare a compound index on ``location`` and ``datetime``::
|
|
||||||
|
|
||||||
class Log(Document):
|
|
||||||
location = PointField(auto_index=False)
|
|
||||||
datetime = DateTimeField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Pre MongoDB 2.4 Geo
|
|
||||||
'''''''''''''''''''
|
|
||||||
|
|
||||||
.. note:: For MongoDB < 2.4 this is still current, however the new 2dsphere
|
|
||||||
index is a big improvement over the previous 2D model - so upgrading is
|
|
||||||
advised.
|
|
||||||
|
|
||||||
Geospatial indexes will be automatically created for all
|
Geospatial indexes will be automatically created for all
|
||||||
:class:`~mongoengine.fields.GeoPointField`\ s
|
:class:`~mongoengine.GeoPointField`\ s
|
||||||
|
|
||||||
It is also possible to explicitly define geospatial indexes. This is
|
It is also possible to explicitly define geospatial indexes. This is
|
||||||
useful if you need to define a geospatial index on a subfield of a
|
useful if you need to define a geospatial index on a subfield of a
|
||||||
:class:`~mongoengine.fields.DictField` or a custom field that contains a
|
:class:`~mongoengine.DictField` or a custom field that contains a
|
||||||
point. To create a geospatial index you must prefix the field with the
|
point. To create a geospatial index you must prefix the field with the
|
||||||
***** sign. ::
|
***** sign. ::
|
||||||
|
|
||||||
@ -628,35 +498,6 @@ point. To create a geospatial index you must prefix the field with the
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
Time To Live (TTL) indexes
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
A special index type that allows you to automatically expire data from a
|
|
||||||
collection after a given period. See the official
|
|
||||||
`ttl <http://docs.mongodb.org/manual/tutorial/expire-data/#expire-data-from-collections-by-setting-ttl>`_
|
|
||||||
documentation for more information. A common usecase might be session data::
|
|
||||||
|
|
||||||
class Session(Document):
|
|
||||||
created = DateTimeField(default=datetime.utcnow)
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
.. warning:: TTL indexes happen on the MongoDB server and not in the application
|
|
||||||
code, therefore no signals will be fired on document deletion.
|
|
||||||
If you need signals to be fired on deletion, then you must handle the
|
|
||||||
deletion of Documents in your application code.
|
|
||||||
|
|
||||||
Comparing Indexes
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
Use :func:`mongoengine.Document.compare_indexes` to compare actual indexes in
|
|
||||||
the database to those that your document definitions define. This is useful
|
|
||||||
for maintenance purposes and ensuring you have the correct indexes for your
|
|
||||||
schema.
|
|
||||||
|
|
||||||
Ordering
|
Ordering
|
||||||
========
|
========
|
||||||
A default ordering can be specified for your
|
A default ordering can be specified for your
|
||||||
@ -700,17 +541,12 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
|||||||
Shard keys
|
Shard keys
|
||||||
==========
|
==========
|
||||||
|
|
||||||
If your collection is sharded by multiple keys, then you can improve shard
|
If your collection is sharded, then you need to specify the shard key as a tuple,
|
||||||
routing (and thus the performance of your application) by specifying the shard
|
using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`.
|
||||||
key, using the :attr:`shard_key` attribute of
|
This ensures that the shard key is sent with the query when calling the
|
||||||
:attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple.
|
:meth:`~mongoengine.document.Document.save` or
|
||||||
|
:meth:`~mongoengine.document.Document.update` method on an existing
|
||||||
This ensures that the full shard key is sent with the query when calling
|
:class:`-mongoengine.Document` instance::
|
||||||
methods such as :meth:`~mongoengine.document.Document.save`,
|
|
||||||
:meth:`~mongoengine.document.Document.update`,
|
|
||||||
:meth:`~mongoengine.document.Document.modify`, or
|
|
||||||
:meth:`~mongoengine.document.Document.delete` on an existing
|
|
||||||
:class:`~mongoengine.Document` instance::
|
|
||||||
|
|
||||||
class LogEntry(Document):
|
class LogEntry(Document):
|
||||||
machine = StringField()
|
machine = StringField()
|
||||||
@ -719,8 +555,7 @@ methods such as :meth:`~mongoengine.document.Document.save`,
|
|||||||
data = StringField()
|
data = StringField()
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
'shard_key': ('machine', 'timestamp'),
|
'shard_key': ('machine', 'timestamp',)
|
||||||
'indexes': ('machine', 'timestamp'),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.. _document-inheritance:
|
.. _document-inheritance:
|
||||||
@ -730,12 +565,10 @@ Document inheritance
|
|||||||
|
|
||||||
To create a specialised type of a :class:`~mongoengine.Document` you have
|
To create a specialised type of a :class:`~mongoengine.Document` you have
|
||||||
defined, you may subclass it and add any extra fields or methods you may need.
|
defined, you may subclass it and add any extra fields or methods you may need.
|
||||||
As this new class is not a direct subclass of
|
As this is new class is not a direct subclass of
|
||||||
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
||||||
will use the same collection as its superclass uses. This allows for more
|
will use the same collection as its superclass uses. This allows for more
|
||||||
convenient and efficient retrieval of related documents -- all you need do is
|
convenient and efficient retrieval of related documents::
|
||||||
set :attr:`allow_inheritance` to True in the :attr:`meta` data for a
|
|
||||||
document.::
|
|
||||||
|
|
||||||
# Stored in a collection named 'page'
|
# Stored in a collection named 'page'
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
@ -747,71 +580,25 @@ document.::
|
|||||||
class DatedPage(Page):
|
class DatedPage(Page):
|
||||||
date = DateTimeField()
|
date = DateTimeField()
|
||||||
|
|
||||||
.. note:: From 0.8 onwards :attr:`allow_inheritance` defaults
|
.. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta.
|
||||||
to False, meaning you must set it to True to use inheritance.
|
|
||||||
|
|
||||||
Setting :attr:`allow_inheritance` to True should also be used in
|
|
||||||
:class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it
|
|
||||||
|
|
||||||
When it comes to querying using :attr:`.objects()`, querying `Page.objects()` will query
|
|
||||||
both `Page` and `DatedPage` whereas querying `DatedPage` will only query the `DatedPage` documents.
|
|
||||||
Behind the scenes, MongoEngine deals with inheritance by adding a :attr:`_cls` attribute that contains
|
|
||||||
the class name in every documents. When a document is loaded, MongoEngine checks
|
|
||||||
it's :attr:`_cls` attribute and use that class to construct the instance.::
|
|
||||||
|
|
||||||
Page(title='a funky title').save()
|
|
||||||
DatedPage(title='another title', date=datetime.utcnow()).save()
|
|
||||||
|
|
||||||
print(Page.objects().count()) # 2
|
|
||||||
print(DatedPage.objects().count()) # 1
|
|
||||||
|
|
||||||
# print documents in their native form
|
|
||||||
# we remove 'id' to avoid polluting the output with unnecessary detail
|
|
||||||
qs = Page.objects.exclude('id').as_pymongo()
|
|
||||||
print(list(qs))
|
|
||||||
# [
|
|
||||||
# {'_cls': u 'Page', 'title': 'a funky title'},
|
|
||||||
# {'_cls': u 'Page.DatedPage', 'title': u 'another title', 'date': datetime.datetime(2019, 12, 13, 20, 16, 59, 993000)}
|
|
||||||
# ]
|
|
||||||
|
|
||||||
Working with existing data
|
Working with existing data
|
||||||
--------------------------
|
--------------------------
|
||||||
As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and
|
To enable correct retrieval of documents involved in this kind of heirarchy,
|
||||||
easily get working with existing data. Just define the document to match
|
two extra attributes are stored on each document in the database: :attr:`_cls`
|
||||||
the expected schema in your database ::
|
and :attr:`_types`. These are hidden from the user through the MongoEngine
|
||||||
|
interface, but may not be present if you are trying to use MongoEngine with
|
||||||
|
an existing database. For this reason, you may disable this inheritance
|
||||||
|
mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling
|
||||||
|
you to work with existing databases. To disable inheritance on a document
|
||||||
|
class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
||||||
|
dictionary::
|
||||||
|
|
||||||
# Will work with data in an existing collection named 'cmsPage'
|
# Will work with data in an existing collection named 'cmsPage'
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
title = StringField(max_length=200, required=True)
|
title = StringField(max_length=200, required=True)
|
||||||
meta = {
|
meta = {
|
||||||
'collection': 'cmsPage'
|
'collection': 'cmsPage',
|
||||||
|
'allow_inheritance': False,
|
||||||
}
|
}
|
||||||
|
|
||||||
If you have wildly varying schemas then using a
|
|
||||||
:class:`~mongoengine.DynamicDocument` might be more appropriate, instead of
|
|
||||||
defining all possible field types.
|
|
||||||
|
|
||||||
If you use :class:`~mongoengine.Document` and the database contains data that
|
|
||||||
isn't defined then that data will be stored in the `document._data` dictionary.
|
|
||||||
|
|
||||||
Abstract classes
|
|
||||||
================
|
|
||||||
|
|
||||||
If you want to add some extra functionality to a group of Document classes but
|
|
||||||
you don't need or want the overhead of inheritance you can use the
|
|
||||||
:attr:`abstract` attribute of :attr:`~mongoengine.Document.meta`.
|
|
||||||
This won't turn on :ref:`document-inheritance` but will allow you to keep your
|
|
||||||
code DRY::
|
|
||||||
|
|
||||||
class BaseDocument(Document):
|
|
||||||
meta = {
|
|
||||||
'abstract': True,
|
|
||||||
}
|
|
||||||
def check_permissions(self):
|
|
||||||
...
|
|
||||||
|
|
||||||
class User(BaseDocument):
|
|
||||||
...
|
|
||||||
|
|
||||||
Now the User class will have access to the inherited `check_permissions` method
|
|
||||||
and won't store any of the extra `_cls` information.
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
Documents instances
|
Documents instances
|
||||||
===================
|
===================
|
||||||
To create a new document object, create an instance of the relevant document
|
To create a new document object, create an instance of the relevant document
|
||||||
class, providing values for its fields as constructor keyword arguments.
|
class, providing values for its fields as its constructor keyword arguments.
|
||||||
You may provide values for any of the fields on the document::
|
You may provide values for any of the fields on the document::
|
||||||
|
|
||||||
>>> page = Page(title="Test Page")
|
>>> page = Page(title="Test Page")
|
||||||
@ -30,25 +30,21 @@ already exist, then any changes will be updated atomically. For example::
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Changes to documents are tracked and on the whole perform ``set`` operations.
|
Changes to documents are tracked and on the whole perform `set` operations.
|
||||||
|
|
||||||
* ``list_field.push(0)`` --- *sets* the resulting list
|
* ``list_field.pop(0)`` - *sets* the resulting list
|
||||||
* ``del(list_field)`` --- *unsets* whole list
|
* ``del(list_field)`` - *unsets* whole list
|
||||||
|
|
||||||
With lists its preferable to use ``Doc.update(push__list_field=0)`` as
|
|
||||||
this stops the whole list being updated --- stopping any race conditions.
|
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
:ref:`guide-atomic-updates`
|
:ref:`guide-atomic-updates`
|
||||||
|
|
||||||
Cascading Saves
|
Cascading Saves
|
||||||
---------------
|
---------------
|
||||||
If your document contains :class:`~mongoengine.fields.ReferenceField` or
|
If your document contains :class:`~mongoengine.ReferenceField` or
|
||||||
:class:`~mongoengine.fields.GenericReferenceField` objects, then by default the
|
:class:`~mongoengine.GenericReferenceField` objects, then by default the
|
||||||
:meth:`~mongoengine.Document.save` method will not save any changes to
|
:meth:`~mongoengine.Document.save` method will automatically save any changes to
|
||||||
those objects. If you want all references to be saved also, noting each
|
those objects as well. If this is not desired passing :attr:`cascade` as False
|
||||||
save is a separate query, then passing :attr:`cascade` as True
|
to the save method turns this feature off.
|
||||||
to the save method will cascade any saves.
|
|
||||||
|
|
||||||
Deleting documents
|
Deleting documents
|
||||||
------------------
|
------------------
|
||||||
@ -85,13 +81,12 @@ you may still use :attr:`id` to access the primary key if you want::
|
|||||||
>>> bob.id == bob.email == 'bob@example.com'
|
>>> bob.id == bob.email == 'bob@example.com'
|
||||||
True
|
True
|
||||||
|
|
||||||
You can also access the document's "primary key" using the :attr:`pk` field,
|
You can also access the document's "primary key" using the :attr:`pk` field; in
|
||||||
it's an alias to :attr:`id`::
|
is an alias to :attr:`id`::
|
||||||
|
|
||||||
>>> page = Page(title="Another Test Page")
|
>>> page = Page(title="Another Test Page")
|
||||||
>>> page.save()
|
>>> page.save()
|
||||||
>>> page.id == page.pk
|
>>> page.id == page.pk
|
||||||
True
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
|
@ -2,55 +2,52 @@
|
|||||||
GridFS
|
GridFS
|
||||||
======
|
======
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
Writing
|
Writing
|
||||||
-------
|
-------
|
||||||
|
|
||||||
GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field
|
GridFS support comes in the form of the :class:`~mongoengine.FileField` field
|
||||||
object. This field acts as a file-like object and provides a couple of
|
object. This field acts as a file-like object and provides a couple of
|
||||||
different ways of inserting and retrieving data. Arbitrary metadata such as
|
different ways of inserting and retrieving data. Arbitrary metadata such as
|
||||||
content type can also be stored alongside the files. The object returned when accessing a
|
content type can also be stored alongside the files. In the following example,
|
||||||
FileField is a proxy to `Pymongo's GridFS <https://api.mongodb.com/python/current/examples/gridfs.html#gridfs-example>`_
|
a document is created to store details about animals, including a photo::
|
||||||
In the following example, a document is created to store details about animals, including a photo::
|
|
||||||
|
|
||||||
class Animal(Document):
|
class Animal(Document):
|
||||||
genus = StringField()
|
genus = StringField()
|
||||||
family = StringField()
|
family = StringField()
|
||||||
photo = FileField()
|
photo = FileField()
|
||||||
|
|
||||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
marmot = Animal('Marmota', 'Sciuridae')
|
||||||
|
|
||||||
|
marmot_photo = open('marmot.jpg', 'r') # Retrieve a photo from disk
|
||||||
|
marmot.photo = marmot_photo # Store photo in the document
|
||||||
|
marmot.photo.content_type = 'image/jpeg' # Store metadata
|
||||||
|
|
||||||
|
marmot.save()
|
||||||
|
|
||||||
|
Another way of writing to a :class:`~mongoengine.FileField` is to use the
|
||||||
|
:func:`put` method. This allows for metadata to be stored in the same call as
|
||||||
|
the file::
|
||||||
|
|
||||||
|
marmot.photo.put(marmot_photo, content_type='image/jpeg')
|
||||||
|
|
||||||
with open('marmot.jpg', 'rb') as fd:
|
|
||||||
marmot.photo.put(fd, content_type = 'image/jpeg')
|
|
||||||
marmot.save()
|
marmot.save()
|
||||||
|
|
||||||
Retrieval
|
Retrieval
|
||||||
---------
|
---------
|
||||||
|
|
||||||
So using the :class:`~mongoengine.fields.FileField` is just like using any other
|
So using the :class:`~mongoengine.FileField` is just like using any other
|
||||||
field. The file can also be retrieved just as easily::
|
field. The file can also be retrieved just as easily::
|
||||||
|
|
||||||
marmot = Animal.objects(genus='Marmota').first()
|
marmot = Animal.objects(genus='Marmota').first()
|
||||||
photo = marmot.photo.read()
|
photo = marmot.photo.read()
|
||||||
content_type = marmot.photo.content_type
|
content_type = marmot.photo.content_type
|
||||||
|
|
||||||
.. note:: If you need to read() the content of a file multiple times, you'll need to "rewind"
|
|
||||||
the file-like object using `seek`::
|
|
||||||
|
|
||||||
marmot = Animal.objects(genus='Marmota').first()
|
|
||||||
content1 = marmot.photo.read()
|
|
||||||
assert content1 != ""
|
|
||||||
|
|
||||||
content2 = marmot.photo.read() # will be empty
|
|
||||||
assert content2 == ""
|
|
||||||
|
|
||||||
marmot.photo.seek(0) # rewind the file by setting the current position of the cursor in the file to 0
|
|
||||||
content3 = marmot.photo.read()
|
|
||||||
assert content3 == content1
|
|
||||||
|
|
||||||
Streaming
|
Streaming
|
||||||
---------
|
---------
|
||||||
|
|
||||||
Streaming data into a :class:`~mongoengine.fields.FileField` is achieved in a
|
Streaming data into a :class:`~mongoengine.FileField` is achieved in a
|
||||||
slightly different manner. First, a new file must be created by calling the
|
slightly different manner. First, a new file must be created by calling the
|
||||||
:func:`new_file` method. Data can then be written using :func:`write`::
|
:func:`new_file` method. Data can then be written using :func:`write`::
|
||||||
|
|
||||||
@ -59,15 +56,14 @@ slightly different manner. First, a new file must be created by calling the
|
|||||||
marmot.photo.write('some_more_image_data')
|
marmot.photo.write('some_more_image_data')
|
||||||
marmot.photo.close()
|
marmot.photo.close()
|
||||||
|
|
||||||
marmot.save()
|
marmot.photo.save()
|
||||||
|
|
||||||
Deletion
|
Deletion
|
||||||
--------
|
--------
|
||||||
|
|
||||||
Deleting stored files is achieved with the :func:`delete` method::
|
Deleting stored files is achieved with the :func:`delete` method::
|
||||||
|
|
||||||
marmot.photo.delete() # Deletes the GridFS document
|
marmot.photo.delete()
|
||||||
marmot.save() # Saves the GridFS reference (being None) contained in the marmot instance
|
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
@ -84,6 +80,5 @@ Replacing files
|
|||||||
Files can be replaced with the :func:`replace` method. This works just like
|
Files can be replaced with the :func:`replace` method. This works just like
|
||||||
the :func:`put` method so even metadata can (and should) be replaced::
|
the :func:`put` method so even metadata can (and should) be replaced::
|
||||||
|
|
||||||
another_marmot = open('another_marmot.png', 'rb')
|
another_marmot = open('another_marmot.png', 'r')
|
||||||
marmot.photo.replace(another_marmot, content_type='image/png') # Replaces the GridFS document
|
marmot.photo.replace(another_marmot, content_type='image/png')
|
||||||
marmot.save() # Replaces the GridFS reference contained in marmot instance
|
|
||||||
|
@ -10,10 +10,5 @@ User Guide
|
|||||||
defining-documents
|
defining-documents
|
||||||
document-instances
|
document-instances
|
||||||
querying
|
querying
|
||||||
validation
|
|
||||||
gridfs
|
gridfs
|
||||||
signals
|
signals
|
||||||
text-indexes
|
|
||||||
migration
|
|
||||||
logging-monitoring
|
|
||||||
mongomock
|
|
||||||
|
@ -2,17 +2,17 @@
|
|||||||
Installing MongoEngine
|
Installing MongoEngine
|
||||||
======================
|
======================
|
||||||
|
|
||||||
To use MongoEngine, you will need to download `MongoDB <http://mongodb.com/>`_
|
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
|
||||||
and ensure it is running in an accessible location. You will also need
|
and ensure it is running in an accessible location. You will also need
|
||||||
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
||||||
install MongoEngine using setuptools, then the dependencies will be handled for
|
install MongoEngine using setuptools, then the dependencies will be handled for
|
||||||
you.
|
you.
|
||||||
|
|
||||||
MongoEngine is available on PyPI, so you can use :program:`pip`:
|
MongoEngine is available on PyPI, so to use it you can use :program:`pip`:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ python -m pip install mongoengine
|
$ pip install mongoengine
|
||||||
|
|
||||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||||
@ -22,10 +22,10 @@ Alternatively, if you don't have setuptools installed, `download it from PyPi
|
|||||||
$ python setup.py install
|
$ python setup.py install
|
||||||
|
|
||||||
To use the bleeding-edge version of MongoEngine, you can get the source from
|
To use the bleeding-edge version of MongoEngine, you can get the source from
|
||||||
`GitHub <http://github.com/mongoengine/mongoengine/>`_ and install it as above:
|
`GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git clone git://github.com/mongoengine/mongoengine
|
$ git clone git://github.com/hmarr/mongoengine
|
||||||
$ cd mongoengine
|
$ cd mongoengine
|
||||||
$ python setup.py install
|
$ python setup.py install
|
||||||
|
@ -1,80 +0,0 @@
|
|||||||
==================
|
|
||||||
Logging/Monitoring
|
|
||||||
==================
|
|
||||||
|
|
||||||
It is possible to use `pymongo.monitoring <https://api.mongodb.com/python/current/api/pymongo/monitoring.html>`_ to monitor
|
|
||||||
the driver events (e.g: queries, connections, etc). This can be handy if you want to monitor the queries issued by
|
|
||||||
MongoEngine to the driver.
|
|
||||||
|
|
||||||
To use `pymongo.monitoring` with MongoEngine, you need to make sure that you are registering the listeners
|
|
||||||
**before** establishing the database connection (i.e calling `connect`):
|
|
||||||
|
|
||||||
The following snippet provides a basic logging of all command events:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from pymongo import monitoring
|
|
||||||
from mongoengine import *
|
|
||||||
|
|
||||||
log = logging.getLogger()
|
|
||||||
log.setLevel(logging.DEBUG)
|
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
|
||||||
|
|
||||||
|
|
||||||
class CommandLogger(monitoring.CommandListener):
|
|
||||||
|
|
||||||
def started(self, event):
|
|
||||||
log.debug("Command {0.command_name} with request id "
|
|
||||||
"{0.request_id} started on server "
|
|
||||||
"{0.connection_id}".format(event))
|
|
||||||
|
|
||||||
def succeeded(self, event):
|
|
||||||
log.debug("Command {0.command_name} with request id "
|
|
||||||
"{0.request_id} on server {0.connection_id} "
|
|
||||||
"succeeded in {0.duration_micros} "
|
|
||||||
"microseconds".format(event))
|
|
||||||
|
|
||||||
def failed(self, event):
|
|
||||||
log.debug("Command {0.command_name} with request id "
|
|
||||||
"{0.request_id} on server {0.connection_id} "
|
|
||||||
"failed in {0.duration_micros} "
|
|
||||||
"microseconds".format(event))
|
|
||||||
|
|
||||||
monitoring.register(CommandLogger())
|
|
||||||
|
|
||||||
|
|
||||||
class Jedi(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
|
|
||||||
connect()
|
|
||||||
|
|
||||||
|
|
||||||
log.info('GO!')
|
|
||||||
|
|
||||||
log.info('Saving an item through MongoEngine...')
|
|
||||||
Jedi(name='Obi-Wan Kenobii').save()
|
|
||||||
|
|
||||||
log.info('Querying through MongoEngine...')
|
|
||||||
obiwan = Jedi.objects.first()
|
|
||||||
|
|
||||||
log.info('Updating through MongoEngine...')
|
|
||||||
obiwan.name = 'Obi-Wan Kenobi'
|
|
||||||
obiwan.save()
|
|
||||||
|
|
||||||
|
|
||||||
Executing this prints the following output::
|
|
||||||
|
|
||||||
INFO:root:GO!
|
|
||||||
INFO:root:Saving an item through MongoEngine...
|
|
||||||
DEBUG:root:Command insert with request id 1681692777 started on server ('localhost', 27017)
|
|
||||||
DEBUG:root:Command insert with request id 1681692777 on server ('localhost', 27017) succeeded in 562 microseconds
|
|
||||||
INFO:root:Querying through MongoEngine...
|
|
||||||
DEBUG:root:Command find with request id 1714636915 started on server ('localhost', 27017)
|
|
||||||
DEBUG:root:Command find with request id 1714636915 on server ('localhost', 27017) succeeded in 341 microseconds
|
|
||||||
INFO:root:Updating through MongoEngine...
|
|
||||||
DEBUG:root:Command update with request id 1957747793 started on server ('localhost', 27017)
|
|
||||||
DEBUG:root:Command update with request id 1957747793 on server ('localhost', 27017) succeeded in 455 microseconds
|
|
||||||
|
|
||||||
More details can of course be obtained by checking the `event` argument from the `CommandListener`.
|
|
@ -1,308 +0,0 @@
|
|||||||
===================
|
|
||||||
Documents migration
|
|
||||||
===================
|
|
||||||
|
|
||||||
The structure of your documents and their associated mongoengine schemas are likely
|
|
||||||
to change over the lifetime of an application. This section provides guidance and
|
|
||||||
recommendations on how to deal with migrations.
|
|
||||||
|
|
||||||
Due to the very flexible nature of mongodb, migrations of models aren't trivial and
|
|
||||||
for people that know about `alembic` for `sqlalchemy`, there is unfortunately no equivalent
|
|
||||||
library that will manage the migration in an automatic fashion for mongoengine.
|
|
||||||
|
|
||||||
Example 1: Addition of a field
|
|
||||||
==============================
|
|
||||||
|
|
||||||
Let's start by taking a simple example of a model change and review the different option you
|
|
||||||
have to deal with the migration.
|
|
||||||
|
|
||||||
Let's assume we start with the following schema and save an instance:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
User(name="John Doe").save()
|
|
||||||
|
|
||||||
# print the objects as they exist in mongodb
|
|
||||||
print(User.objects().as_pymongo()) # [{u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John Doe'}]
|
|
||||||
|
|
||||||
On the next version of your application, let's now assume that a new field `enabled` gets added to the
|
|
||||||
existing ``User`` model with a `default=True`. Thus you simply update the ``User`` class to the following:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField(required=True)
|
|
||||||
enabled = BooleanField(default=True)
|
|
||||||
|
|
||||||
Without applying any migration, we now reload an object from the database into the ``User`` class
|
|
||||||
and checks its `enabled` attribute:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
assert User.objects.count() == 1
|
|
||||||
user = User.objects().first()
|
|
||||||
assert user.enabled is True
|
|
||||||
assert User.objects(enabled=True).count() == 0 # uh?
|
|
||||||
assert User.objects(enabled=False).count() == 0 # uh?
|
|
||||||
|
|
||||||
# this is consistent with what we have in the database
|
|
||||||
# in fact, 'enabled' does not exist
|
|
||||||
print(User.objects().as_pymongo().first()) # {u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John'}
|
|
||||||
assert User.objects(enabled=None).count() == 1
|
|
||||||
|
|
||||||
As you can see, even if the document wasn't updated, mongoengine applies the default value seamlessly when it
|
|
||||||
loads the pymongo dict into a ``User`` instance. At first sight it looks like you don't need to migrate the
|
|
||||||
existing documents when adding new fields but this actually leads to inconsistencies when it comes to querying.
|
|
||||||
|
|
||||||
In fact, when querying, mongoengine isn't trying to account for the default value of the new field and so
|
|
||||||
if you don't actually migrate the existing documents, you are taking a risk that querying/updating
|
|
||||||
will be missing relevant record.
|
|
||||||
|
|
||||||
When adding fields/modifying default values, you can use any of the following to do the migration
|
|
||||||
as a standalone script:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# Use mongoengine to set a default value for a given field
|
|
||||||
User.objects().update(enabled=True)
|
|
||||||
# or use pymongo
|
|
||||||
user_coll = User._get_collection()
|
|
||||||
user_coll.update_many({}, {'$set': {'enabled': True}})
|
|
||||||
|
|
||||||
|
|
||||||
Example 2: Inheritance change
|
|
||||||
=============================
|
|
||||||
|
|
||||||
Let's consider the following example:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Human(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class Jedi(Human):
|
|
||||||
dark_side = BooleanField()
|
|
||||||
light_saber_color = StringField()
|
|
||||||
|
|
||||||
Jedi(name="Darth Vader", dark_side=True, light_saber_color="red").save()
|
|
||||||
Jedi(name="Obi Wan Kenobi", dark_side=False, light_saber_color="blue").save()
|
|
||||||
|
|
||||||
assert Human.objects.count() == 2
|
|
||||||
assert Jedi.objects.count() == 2
|
|
||||||
|
|
||||||
# Let's check how these documents got stored in mongodb
|
|
||||||
print(Jedi.objects.as_pymongo())
|
|
||||||
# [
|
|
||||||
# {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'},
|
|
||||||
# {'_id': ObjectId('5fac4ac4f61d7fb06046e0fa'), '_cls': 'Human.Jedi', 'name': 'Obi Wan Kenobi', 'dark_side': False, 'light_saber_color': 'blue'}
|
|
||||||
# ]
|
|
||||||
|
|
||||||
As you can observe, when you use inheritance, MongoEngine stores a field named '_cls' behind the scene to keep
|
|
||||||
track of the Document class.
|
|
||||||
|
|
||||||
Let's now take the scenario that you want to refactor the inheritance schema and:
|
|
||||||
- Have the Jedi's with dark_side=True/False become GoodJedi's/DarkSith
|
|
||||||
- get rid of the 'dark_side' field
|
|
||||||
|
|
||||||
move to the following schemas:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# unchanged
|
|
||||||
class Human(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
# attribute 'dark_side' removed
|
|
||||||
class GoodJedi(Human):
|
|
||||||
light_saber_color = StringField()
|
|
||||||
|
|
||||||
# new class
|
|
||||||
class BadSith(Human):
|
|
||||||
light_saber_color = StringField()
|
|
||||||
|
|
||||||
MongoEngine doesn't know about the change or how to map them with the existing data
|
|
||||||
so if you don't apply any migration, you will observe a strange behavior, as if the collection was suddenly
|
|
||||||
empty.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# As a reminder, the documents that we inserted
|
|
||||||
# have the _cls field = 'Human.Jedi'
|
|
||||||
|
|
||||||
# Following has no match
|
|
||||||
# because the query that is used behind the scene is
|
|
||||||
# filtering on {'_cls': 'Human.GoodJedi'}
|
|
||||||
assert GoodJedi.objects().count() == 0
|
|
||||||
|
|
||||||
# Following has also no match
|
|
||||||
# because it is filtering on {'_cls': {'$in': ('Human', 'Human.GoodJedi', 'Human.BadSith')}}
|
|
||||||
# which has no match
|
|
||||||
assert Human.objects.count() == 0
|
|
||||||
assert Human.objects.first() is None
|
|
||||||
|
|
||||||
# If we bypass MongoEngine and make use of underlying driver (PyMongo)
|
|
||||||
# we can see that the documents are there
|
|
||||||
humans_coll = Human._get_collection()
|
|
||||||
assert humans_coll.count_documents({}) == 2
|
|
||||||
# print first document
|
|
||||||
print(humans_coll.find_one())
|
|
||||||
# {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'}
|
|
||||||
|
|
||||||
As you can see, first obvious problem is that we need to modify '_cls' values based on existing values of
|
|
||||||
'dark_side' documents.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
humans_coll = Human._get_collection()
|
|
||||||
old_class = 'Human.Jedi'
|
|
||||||
good_jedi_class = 'Human.GoodJedi'
|
|
||||||
bad_sith_class = 'Human.BadSith'
|
|
||||||
humans_coll.update_many({'_cls': old_class, 'dark_side': False}, {'$set': {'_cls': good_jedi_class}})
|
|
||||||
humans_coll.update_many({'_cls': old_class, 'dark_side': True}, {'$set': {'_cls': bad_sith_class}})
|
|
||||||
|
|
||||||
Let's now check if querying improved in MongoEngine:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
assert GoodJedi.objects().count() == 1 # Hoorah!
|
|
||||||
assert BadSith.objects().count() == 1 # Hoorah!
|
|
||||||
assert Human.objects.count() == 2 # Hoorah!
|
|
||||||
|
|
||||||
# let's now check that documents load correctly
|
|
||||||
jedi = GoodJedi.objects().first()
|
|
||||||
# raises FieldDoesNotExist: The fields "{'dark_side'}" do not exist on the document "Human.GoodJedi"
|
|
||||||
|
|
||||||
In fact we only took care of renaming the _cls values but we havn't removed the 'dark_side' fields
|
|
||||||
which does not exist anymore on the GoodJedi's and BadSith's models.
|
|
||||||
Let's remove the field from the collections:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
humans_coll = Human._get_collection()
|
|
||||||
humans_coll.update_many({}, {'$unset': {'dark_side': 1}})
|
|
||||||
|
|
||||||
.. note:: We did this migration in 2 different steps for the sake of example but it could have been combined
|
|
||||||
with the migration of the _cls fields: ::
|
|
||||||
|
|
||||||
humans_coll.update_many(
|
|
||||||
{'_cls': old_class, 'dark_side': False},
|
|
||||||
{
|
|
||||||
'$set': {'_cls': good_jedi_class},
|
|
||||||
'$unset': {'dark_side': 1}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
And verify that the documents now load correctly:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
jedi = GoodJedi.objects().first()
|
|
||||||
assert jedi.name == "Obi Wan Kenobi"
|
|
||||||
|
|
||||||
sith = BadSith.objects().first()
|
|
||||||
assert sith.name == "Darth Vader"
|
|
||||||
|
|
||||||
|
|
||||||
An other way of dealing with this migration is to iterate over
|
|
||||||
the documents and update/replace them one by one. This is way slower but
|
|
||||||
it is often useful for complex migrations of Document models.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
for doc in humans_coll.find():
|
|
||||||
if doc['_cls'] == 'Human.Jedi':
|
|
||||||
doc['_cls'] = 'Human.BadSith' if doc['dark_side'] else 'Human.GoodJedi'
|
|
||||||
doc.pop('dark_side')
|
|
||||||
humans_coll.replace_one({'_id': doc['_id']}, doc)
|
|
||||||
|
|
||||||
.. warning:: Be aware of this `flaw <https://groups.google.com/g/mongodb-user/c/AFC1ia7MHzk>`_ if you modify documents while iterating
|
|
||||||
|
|
||||||
Example 4: Index removal
|
|
||||||
========================
|
|
||||||
|
|
||||||
If you remove an index from your Document class, or remove an indexed Field from your Document class,
|
|
||||||
you'll need to manually drop the corresponding index. MongoEngine will not do that for you.
|
|
||||||
|
|
||||||
The way to deal with this case is to identify the name of the index to drop with `index_information()`, and then drop
|
|
||||||
it with `drop_index()`
|
|
||||||
|
|
||||||
Let's for instance assume that you start with the following Document class
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField(index=True)
|
|
||||||
|
|
||||||
meta = {"indexes": ["name"]}
|
|
||||||
|
|
||||||
User(name="John Doe").save()
|
|
||||||
|
|
||||||
As soon as you start interacting with the Document collection (when `.save()` is called in this case),
|
|
||||||
it would create the following indexes:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
print(User._get_collection().index_information())
|
|
||||||
# {
|
|
||||||
# '_id_': {'key': [('_id', 1)], 'v': 2},
|
|
||||||
# 'name_1': {'background': False, 'key': [('name', 1)], 'v': 2},
|
|
||||||
# }
|
|
||||||
|
|
||||||
Thus: '_id' which is the default index and 'name_1' which is our custom index.
|
|
||||||
If you would remove the 'name' field or its index, you would have to call:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
User._get_collection().drop_index('name_1')
|
|
||||||
|
|
||||||
.. note:: When adding new fields or new indexes, MongoEngine will take care of creating them
|
|
||||||
(unless `auto_create_index` is disabled) ::
|
|
||||||
|
|
||||||
Recommendations
|
|
||||||
===============
|
|
||||||
|
|
||||||
- Write migration scripts whenever you do changes to the model schemas
|
|
||||||
- Using :class:`~mongoengine.DynamicDocument` or ``meta = {"strict": False}`` may help to avoid some migrations or to have the 2 versions of your application to co-exist.
|
|
||||||
- Write post-processing checks to verify that migrations script worked. See below
|
|
||||||
|
|
||||||
Post-processing checks
|
|
||||||
======================
|
|
||||||
|
|
||||||
The following recipe can be used to sanity check a Document collection after you applied migration.
|
|
||||||
It does not make any assumption on what was migrated, it will fetch 1000 objects randomly and
|
|
||||||
run some quick checks on the documents to make sure the document looks OK. As it is, it will fail
|
|
||||||
on the first occurrence of an error but this is something that can be adapted based on your needs.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
def get_random_oids(collection, sample_size):
|
|
||||||
pipeline = [{"$project": {'_id': 1}}, {"$sample": {"size": sample_size}}]
|
|
||||||
return [s['_id'] for s in collection.aggregate(pipeline)]
|
|
||||||
|
|
||||||
def get_random_documents(DocCls, sample_size):
|
|
||||||
doc_collection = DocCls._get_collection()
|
|
||||||
random_oids = get_random_oids(doc_collection, sample_size)
|
|
||||||
return DocCls.objects(id__in=random_oids)
|
|
||||||
|
|
||||||
def check_documents(DocCls, sample_size):
|
|
||||||
for doc in get_random_documents(DocCls, sample_size):
|
|
||||||
# general validation (types and values)
|
|
||||||
doc.validate()
|
|
||||||
|
|
||||||
# load all subfields,
|
|
||||||
# this may trigger additional queries if you have ReferenceFields
|
|
||||||
# so it may be slow
|
|
||||||
for field in doc._fields:
|
|
||||||
try:
|
|
||||||
getattr(doc, field)
|
|
||||||
except Exception:
|
|
||||||
LOG.warning(f"Could not load field {field} in Document {doc.id}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
check_documents(Human, sample_size=1000)
|
|
@ -1,48 +0,0 @@
|
|||||||
==============================
|
|
||||||
Use mongomock for testing
|
|
||||||
==============================
|
|
||||||
|
|
||||||
`mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just
|
|
||||||
what the name implies, mocking a mongo database.
|
|
||||||
|
|
||||||
To use with mongoengine, simply specify mongomock when connecting with
|
|
||||||
mongoengine:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
connect('mongoenginetest', host='mongomock://localhost')
|
|
||||||
conn = get_connection()
|
|
||||||
|
|
||||||
or with an alias:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
connect('mongoenginetest', host='mongomock://localhost', alias='testdb')
|
|
||||||
conn = get_connection('testdb')
|
|
||||||
|
|
||||||
Example of test file:
|
|
||||||
---------------------
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
import unittest
|
|
||||||
from mongoengine import connect, disconnect
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class TestPerson(unittest.TestCase):
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def setUpClass(cls):
|
|
||||||
connect('mongoenginetest', host='mongomock://localhost')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def tearDownClass(cls):
|
|
||||||
disconnect()
|
|
||||||
|
|
||||||
def test_thing(self):
|
|
||||||
pers = Person(name='John')
|
|
||||||
pers.save()
|
|
||||||
|
|
||||||
fresh_pers = Person.objects().first()
|
|
||||||
assert fresh_pers.name == 'John'
|
|
@ -15,10 +15,11 @@ fetch documents from the database::
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
As of MongoEngine 0.8 the querysets utilise a local cache. So iterating
|
Once the iteration finishes (when :class:`StopIteration` is raised),
|
||||||
it multiple times will only cause a single query. If this is not the
|
:meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the
|
||||||
desired behaviour you can call :class:`~mongoengine.QuerySet.no_cache`
|
:class:`~mongoengine.queryset.QuerySet` may be iterated over again. The
|
||||||
(version **0.8.3+**) to return a non-caching queryset.
|
results of the first iteration are *not* cached, so the database will be hit
|
||||||
|
each time the :class:`~mongoengine.queryset.QuerySet` is iterated over.
|
||||||
|
|
||||||
Filtering queries
|
Filtering queries
|
||||||
=================
|
=================
|
||||||
@ -39,18 +40,10 @@ syntax::
|
|||||||
# been written by a user whose 'country' field is set to 'uk'
|
# been written by a user whose 'country' field is set to 'uk'
|
||||||
uk_pages = Page.objects(author__country='uk')
|
uk_pages = Page.objects(author__country='uk')
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
(version **0.9.1+**) if your field name is like mongodb operator name (for example
|
|
||||||
type, lte, lt...) and you want to place it at the end of lookup keyword
|
|
||||||
mongoengine automatically prepend $ to it. To avoid this use __ at the end of
|
|
||||||
your lookup keyword. For example if your field name is ``type`` and you want to
|
|
||||||
query by this field you must use ``.objects(user__type__="admin")`` instead of
|
|
||||||
``.objects(user__type="admin")``
|
|
||||||
|
|
||||||
Query operators
|
Query operators
|
||||||
===============
|
===============
|
||||||
Operators other than equality may also be used in queries --- just attach the
|
Operators other than equality may also be used in queries; just attach the
|
||||||
operator name to a key with a double-underscore::
|
operator name to a key with a double-underscore::
|
||||||
|
|
||||||
# Only find users whose age is 18 or less
|
# Only find users whose age is 18 or less
|
||||||
@ -64,7 +57,7 @@ Available operators are as follows:
|
|||||||
* ``gt`` -- greater than
|
* ``gt`` -- greater than
|
||||||
* ``gte`` -- greater than or equal to
|
* ``gte`` -- greater than or equal to
|
||||||
* ``not`` -- negate a standard check, may be used before other operators (e.g.
|
* ``not`` -- negate a standard check, may be used before other operators (e.g.
|
||||||
``Q(age__not__mod=(5, 0))``)
|
``Q(age__not__mod=5)``)
|
||||||
* ``in`` -- value is in list (a list of values should be provided)
|
* ``in`` -- value is in list (a list of values should be provided)
|
||||||
* ``nin`` -- value is not in list (a list of values should be provided)
|
* ``nin`` -- value is not in list (a list of values should be provided)
|
||||||
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
|
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
|
||||||
@ -72,9 +65,6 @@ Available operators are as follows:
|
|||||||
* ``size`` -- the size of the array is
|
* ``size`` -- the size of the array is
|
||||||
* ``exists`` -- value for field exists
|
* ``exists`` -- value for field exists
|
||||||
|
|
||||||
String queries
|
|
||||||
--------------
|
|
||||||
|
|
||||||
The following operators are available as shortcuts to querying with regular
|
The following operators are available as shortcuts to querying with regular
|
||||||
expressions:
|
expressions:
|
||||||
|
|
||||||
@ -86,81 +76,14 @@ expressions:
|
|||||||
* ``istartswith`` -- string field starts with value (case insensitive)
|
* ``istartswith`` -- string field starts with value (case insensitive)
|
||||||
* ``endswith`` -- string field ends with value
|
* ``endswith`` -- string field ends with value
|
||||||
* ``iendswith`` -- string field ends with value (case insensitive)
|
* ``iendswith`` -- string field ends with value (case insensitive)
|
||||||
* ``wholeword`` -- string field contains whole word
|
|
||||||
* ``iwholeword`` -- string field contains whole word (case insensitive)
|
|
||||||
* ``regex`` -- string field match by regex
|
|
||||||
* ``iregex`` -- string field match by regex (case insensitive)
|
|
||||||
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
||||||
|
|
||||||
|
There are a few special operators for performing geographical queries, that
|
||||||
Geo queries
|
may used with :class:`~mongoengine.GeoPointField`\ s:
|
||||||
-----------
|
|
||||||
|
|
||||||
There are a few special operators for performing geographical queries.
|
|
||||||
The following were added in MongoEngine 0.8 for
|
|
||||||
:class:`~mongoengine.fields.PointField`,
|
|
||||||
:class:`~mongoengine.fields.LineStringField` and
|
|
||||||
:class:`~mongoengine.fields.PolygonField`:
|
|
||||||
|
|
||||||
* ``geo_within`` -- check if a geometry is within a polygon. For ease of use
|
|
||||||
it accepts either a geojson geometry or just the polygon coordinates eg::
|
|
||||||
|
|
||||||
loc.objects(point__geo_within=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
|
|
||||||
loc.objects(point__geo_within={"type": "Polygon",
|
|
||||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
|
||||||
|
|
||||||
* ``geo_within_box`` -- simplified geo_within searching with a box eg::
|
|
||||||
|
|
||||||
loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)])
|
|
||||||
loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>])
|
|
||||||
|
|
||||||
* ``geo_within_polygon`` -- simplified geo_within searching within a simple polygon eg::
|
|
||||||
|
|
||||||
loc.objects(point__geo_within_polygon=[[40, 5], [40, 6], [41, 6], [40, 5]])
|
|
||||||
loc.objects(point__geo_within_polygon=[ [ <x1> , <y1> ] ,
|
|
||||||
[ <x2> , <y2> ] ,
|
|
||||||
[ <x3> , <y3> ] ])
|
|
||||||
|
|
||||||
* ``geo_within_center`` -- simplified geo_within the flat circle radius of a point eg::
|
|
||||||
|
|
||||||
loc.objects(point__geo_within_center=[(-125.0, 35.0), 1])
|
|
||||||
loc.objects(point__geo_within_center=[ [ <x>, <y> ] , <radius> ])
|
|
||||||
|
|
||||||
* ``geo_within_sphere`` -- simplified geo_within the spherical circle radius of a point eg::
|
|
||||||
|
|
||||||
loc.objects(point__geo_within_sphere=[(-125.0, 35.0), 1])
|
|
||||||
loc.objects(point__geo_within_sphere=[ [ <x>, <y> ] , <radius> ])
|
|
||||||
|
|
||||||
* ``geo_intersects`` -- selects all locations that intersect with a geometry eg::
|
|
||||||
|
|
||||||
# Inferred from provided points lists:
|
|
||||||
loc.objects(poly__geo_intersects=[40, 6])
|
|
||||||
loc.objects(poly__geo_intersects=[[40, 5], [40, 6]])
|
|
||||||
loc.objects(poly__geo_intersects=[[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]])
|
|
||||||
|
|
||||||
# With geoJson style objects
|
|
||||||
loc.objects(poly__geo_intersects={"type": "Point", "coordinates": [40, 6]})
|
|
||||||
loc.objects(poly__geo_intersects={"type": "LineString",
|
|
||||||
"coordinates": [[40, 5], [40, 6]]})
|
|
||||||
loc.objects(poly__geo_intersects={"type": "Polygon",
|
|
||||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]})
|
|
||||||
|
|
||||||
* ``near`` -- find all the locations near a given point::
|
|
||||||
|
|
||||||
loc.objects(point__near=[40, 5])
|
|
||||||
loc.objects(point__near={"type": "Point", "coordinates": [40, 5]})
|
|
||||||
|
|
||||||
You can also set the maximum and/or the minimum distance in meters as well::
|
|
||||||
|
|
||||||
loc.objects(point__near=[40, 5], point__max_distance=1000)
|
|
||||||
loc.objects(point__near=[40, 5], point__min_distance=100)
|
|
||||||
|
|
||||||
The older 2D indexes are still supported with the
|
|
||||||
:class:`~mongoengine.fields.GeoPointField`:
|
|
||||||
|
|
||||||
* ``within_distance`` -- provide a list containing a point and a maximum
|
* ``within_distance`` -- provide a list containing a point and a maximum
|
||||||
distance (e.g. [(41.342, -87.653), 5])
|
distance (e.g. [(41.342, -87.653), 5])
|
||||||
* ``within_spherical_distance`` -- same as above but using the spherical geo model
|
* ``within_spherical_distance`` -- Same as above but using the spherical geo model
|
||||||
(e.g. [(41.342, -87.653), 5/earth_radius])
|
(e.g. [(41.342, -87.653), 5/earth_radius])
|
||||||
* ``near`` -- order the documents by how close they are to a given point
|
* ``near`` -- order the documents by how close they are to a given point
|
||||||
* ``near_sphere`` -- Same as above but using the spherical geo model
|
* ``near_sphere`` -- Same as above but using the spherical geo model
|
||||||
@ -168,19 +91,14 @@ The older 2D indexes are still supported with the
|
|||||||
[(35.0, -125.0), (40.0, -100.0)])
|
[(35.0, -125.0), (40.0, -100.0)])
|
||||||
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
|
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
|
||||||
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
|
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
|
||||||
|
|
||||||
.. note:: Requires Mongo Server 2.0
|
.. note:: Requires Mongo Server 2.0
|
||||||
|
|
||||||
* ``max_distance`` -- can be added to your location queries to set a maximum
|
|
||||||
distance.
|
|
||||||
* ``min_distance`` -- can be added to your location queries to set a minimum
|
|
||||||
distance.
|
|
||||||
|
|
||||||
Querying lists
|
Querying lists
|
||||||
--------------
|
--------------
|
||||||
On most fields, this syntax will look up documents where the field specified
|
On most fields, this syntax will look up documents where the field specified
|
||||||
matches the given value exactly, but when the field refers to a
|
matches the given value exactly, but when the field refers to a
|
||||||
:class:`~mongoengine.fields.ListField`, a single item may be provided, in which case
|
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
||||||
lists that contain that item will be matched::
|
lists that contain that item will be matched::
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
@ -211,14 +129,12 @@ However, this doesn't map well to the syntax so you can also use a capital S ins
|
|||||||
|
|
||||||
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
||||||
|
|
||||||
.. note::
|
.. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query.
|
||||||
Due to :program:`Mongo`, currently the $ operator only applies to the
|
|
||||||
first matched item in the query.
|
|
||||||
|
|
||||||
|
|
||||||
Raw queries
|
Raw queries
|
||||||
-----------
|
-----------
|
||||||
It is possible to provide a raw :mod:`PyMongo` query as a query parameter, which will
|
It is possible to provide a raw PyMongo query as a query parameter, which will
|
||||||
be integrated directly into the query. This is done using the ``__raw__``
|
be integrated directly into the query. This is done using the ``__raw__``
|
||||||
keyword argument::
|
keyword argument::
|
||||||
|
|
||||||
@ -226,26 +142,14 @@ keyword argument::
|
|||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
Sorting/Ordering results
|
|
||||||
========================
|
|
||||||
It is possible to order the results by 1 or more keys using :meth:`~mongoengine.queryset.QuerySet.order_by`.
|
|
||||||
The order may be specified by prepending each of the keys by "+" or "-". Ascending order is assumed if there's no prefix.::
|
|
||||||
|
|
||||||
# Order by ascending date
|
|
||||||
blogs = BlogPost.objects().order_by('date') # equivalent to .order_by('+date')
|
|
||||||
|
|
||||||
# Order by ascending date first, then descending title
|
|
||||||
blogs = BlogPost.objects().order_by('+date', '-title')
|
|
||||||
|
|
||||||
|
|
||||||
Limiting and skipping results
|
Limiting and skipping results
|
||||||
=============================
|
=============================
|
||||||
Just as with traditional ORMs, you may limit the number of results returned or
|
Just as with traditional ORMs, you may limit the number of results returned, or
|
||||||
skip a number or results in you query.
|
skip a number or results in you query.
|
||||||
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
||||||
:meth:`~mongoengine.queryset.QuerySet.skip` methods are available on
|
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
||||||
:class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax
|
:class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
|
||||||
is preferred for achieving this::
|
achieving this is using array-slicing syntax::
|
||||||
|
|
||||||
# Only the first 5 people
|
# Only the first 5 people
|
||||||
users = User.objects[:5]
|
users = User.objects[:5]
|
||||||
@ -253,7 +157,7 @@ is preferred for achieving this::
|
|||||||
# All except for the first 5 people
|
# All except for the first 5 people
|
||||||
users = User.objects[5:]
|
users = User.objects[5:]
|
||||||
|
|
||||||
# 5 users, starting from the 11th user found
|
# 5 users, starting from the 10th user found
|
||||||
users = User.objects[10:15]
|
users = User.objects[10:15]
|
||||||
|
|
||||||
You may also index the query to retrieve a single result. If an item at that
|
You may also index the query to retrieve a single result. If an item at that
|
||||||
@ -275,21 +179,25 @@ Retrieving unique results
|
|||||||
-------------------------
|
-------------------------
|
||||||
To retrieve a result that should be unique in the collection, use
|
To retrieve a result that should be unique in the collection, use
|
||||||
:meth:`~mongoengine.queryset.QuerySet.get`. This will raise
|
:meth:`~mongoengine.queryset.QuerySet.get`. This will raise
|
||||||
:class:`~mongoengine.queryset.DoesNotExist` if
|
:class:`~mongoengine.queryset.DoesNotExist` if no document matches the query,
|
||||||
no document matches the query, and
|
and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one
|
||||||
:class:`~mongoengine.queryset.MultipleObjectsReturned`
|
document matched the query.
|
||||||
if more than one document matched the query. These exceptions are merged into
|
|
||||||
your document definitions eg: `MyDoc.DoesNotExist`
|
|
||||||
|
|
||||||
A variation of this method, get_or_create() existed, but it was unsafe. It
|
A variation of this method exists,
|
||||||
could not be made safe, because there are no transactions in mongoDB. Other
|
:meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new
|
||||||
approaches should be investigated, to ensure you don't accidentally duplicate
|
document with the query arguments if no documents match the query. An
|
||||||
data when using something similar to this method. Therefore it was deprecated
|
additional keyword argument, :attr:`defaults` may be provided, which will be
|
||||||
in 0.8 and removed in 0.10.
|
used as default values for the new document, in the case that it should need
|
||||||
|
to be created::
|
||||||
|
|
||||||
|
>>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30})
|
||||||
|
>>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40})
|
||||||
|
>>> a.name == b.name and a.age == b.age
|
||||||
|
True
|
||||||
|
|
||||||
Default Document queries
|
Default Document queries
|
||||||
========================
|
========================
|
||||||
By default, the objects :attr:`~Document.objects` attribute on a
|
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
|
||||||
document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter
|
document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter
|
||||||
the collection -- it returns all objects. This may be changed by defining a
|
the collection -- it returns all objects. This may be changed by defining a
|
||||||
method on a document that modifies a queryset. The method should accept two
|
method on a document that modifies a queryset. The method should accept two
|
||||||
@ -332,7 +240,7 @@ Should you want to add custom methods for interacting with or filtering
|
|||||||
documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be
|
documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be
|
||||||
the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on
|
the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on
|
||||||
a document, set ``queryset_class`` to the custom class in a
|
a document, set ``queryset_class`` to the custom class in a
|
||||||
:class:`~mongoengine.Document`'s ``meta`` dictionary::
|
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
|
||||||
|
|
||||||
class AwesomerQuerySet(QuerySet):
|
class AwesomerQuerySet(QuerySet):
|
||||||
|
|
||||||
@ -356,19 +264,12 @@ Javascript code that is executed on the database server.
|
|||||||
|
|
||||||
Counting results
|
Counting results
|
||||||
----------------
|
----------------
|
||||||
Just as with limiting and skipping results, there is a method on a
|
Just as with limiting and skipping results, there is a method on
|
||||||
:class:`~mongoengine.queryset.QuerySet` object --
|
:class:`~mongoengine.queryset.QuerySet` objects --
|
||||||
:meth:`~mongoengine.queryset.QuerySet.count`::
|
:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic
|
||||||
|
way of achieving this::
|
||||||
|
|
||||||
num_users = User.objects.count()
|
num_users = len(User.objects)
|
||||||
|
|
||||||
You could technically use ``len(User.objects)`` to get the same result, but it
|
|
||||||
would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`.
|
|
||||||
When you execute a server-side count query, you let MongoDB do the heavy
|
|
||||||
lifting and you receive a single integer over the wire. Meanwhile, ``len()``
|
|
||||||
retrieves all the results, places them in a local cache, and finally counts
|
|
||||||
them. If we compare the performance of the two operations, ``len()`` is much slower
|
|
||||||
than :meth:`~mongoengine.queryset.QuerySet.count`.
|
|
||||||
|
|
||||||
Further aggregation
|
Further aggregation
|
||||||
-------------------
|
-------------------
|
||||||
@ -402,25 +303,6 @@ would be generating "tag-clouds"::
|
|||||||
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
||||||
|
|
||||||
|
|
||||||
MongoDB aggregation API
|
|
||||||
-----------------------
|
|
||||||
If you need to run aggregation pipelines, MongoEngine provides an entry point to `Pymongo's aggregation framework <https://api.mongodb.com/python/current/examples/aggregation.html#aggregation-framework>`_
|
|
||||||
through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline.
|
|
||||||
An example of its use would be::
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Person(name='John').save()
|
|
||||||
Person(name='Bob').save()
|
|
||||||
|
|
||||||
pipeline = [
|
|
||||||
{"$sort" : {"name" : -1}},
|
|
||||||
{"$project": {"_id": 0, "name": {"$toUpper": "$name"}}}
|
|
||||||
]
|
|
||||||
data = Person.objects().aggregate(pipeline)
|
|
||||||
assert data == [{'name': 'BOB'}, {'name': 'JOHN'}]
|
|
||||||
|
|
||||||
Query efficiency and performance
|
Query efficiency and performance
|
||||||
================================
|
================================
|
||||||
|
|
||||||
@ -433,7 +315,7 @@ Retrieving a subset of fields
|
|||||||
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
|
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
|
||||||
and for efficiency only these should be retrieved from the database. This issue
|
and for efficiency only these should be retrieved from the database. This issue
|
||||||
is especially important for MongoDB, as fields may often be extremely large
|
is especially important for MongoDB, as fields may often be extremely large
|
||||||
(e.g. a :class:`~mongoengine.fields.ListField` of
|
(e.g. a :class:`~mongoengine.ListField` of
|
||||||
:class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a
|
:class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a
|
||||||
blog post. To select only a subset of fields, use
|
blog post. To select only a subset of fields, use
|
||||||
:meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to
|
:meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to
|
||||||
@ -465,14 +347,14 @@ If you later need the missing fields, just call
|
|||||||
Getting related data
|
Getting related data
|
||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
When iterating the results of :class:`~mongoengine.fields.ListField` or
|
When iterating the results of :class:`~mongoengine.ListField` or
|
||||||
:class:`~mongoengine.fields.DictField` we automatically dereference any
|
:class:`~mongoengine.DictField` we automatically dereference any
|
||||||
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
||||||
number the queries to mongo.
|
number the queries to mongo.
|
||||||
|
|
||||||
There are times when that efficiency is not enough, documents that have
|
There are times when that efficiency is not enough, documents that have
|
||||||
:class:`~mongoengine.fields.ReferenceField` objects or
|
:class:`~mongoengine.ReferenceField` objects or
|
||||||
:class:`~mongoengine.fields.GenericReferenceField` objects at the top level are
|
:class:`~mongoengine.GenericReferenceField` objects at the top level are
|
||||||
expensive as the number of queries to MongoDB can quickly rise.
|
expensive as the number of queries to MongoDB can quickly rise.
|
||||||
|
|
||||||
To limit the number of queries use
|
To limit the number of queries use
|
||||||
@ -483,30 +365,8 @@ references to the depth of 1 level. If you have more complicated documents and
|
|||||||
want to dereference more of the object at once then increasing the :attr:`max_depth`
|
want to dereference more of the object at once then increasing the :attr:`max_depth`
|
||||||
will dereference more levels of the document.
|
will dereference more levels of the document.
|
||||||
|
|
||||||
Turning off dereferencing
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
Sometimes for performance reasons you don't want to automatically dereference
|
|
||||||
data. To turn off dereferencing of the results of a query use
|
|
||||||
:func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so::
|
|
||||||
|
|
||||||
post = Post.objects.no_dereference().first()
|
|
||||||
assert(isinstance(post.author, DBRef))
|
|
||||||
|
|
||||||
You can also turn off all dereferencing for a fixed period by using the
|
|
||||||
:class:`~mongoengine.context_managers.no_dereference` context manager::
|
|
||||||
|
|
||||||
with no_dereference(Post) as Post:
|
|
||||||
post = Post.objects.first()
|
|
||||||
assert(isinstance(post.author, DBRef))
|
|
||||||
|
|
||||||
# Outside the context manager dereferencing occurs.
|
|
||||||
assert(isinstance(post.author, User))
|
|
||||||
|
|
||||||
|
|
||||||
Advanced queries
|
Advanced queries
|
||||||
================
|
================
|
||||||
|
|
||||||
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
||||||
arguments can't fully express the query you want to use -- for example if you
|
arguments can't fully express the query you want to use -- for example if you
|
||||||
need to combine a number of constraints using *and* and *or*. This is made
|
need to combine a number of constraints using *and* and *or*. This is made
|
||||||
@ -519,49 +379,33 @@ operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the
|
|||||||
first positional argument to :attr:`Document.objects` when you filter it by
|
first positional argument to :attr:`Document.objects` when you filter it by
|
||||||
calling it with keyword arguments::
|
calling it with keyword arguments::
|
||||||
|
|
||||||
from mongoengine.queryset.visitor import Q
|
|
||||||
|
|
||||||
# Get published posts
|
# Get published posts
|
||||||
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))
|
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))
|
||||||
|
|
||||||
# Get top posts
|
# Get top posts
|
||||||
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
|
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
|
||||||
|
|
||||||
.. warning:: You have to use bitwise operators. You cannot use ``or``, ``and``
|
|
||||||
to combine queries as ``Q(a=a) or Q(b=b)`` is not the same as
|
|
||||||
``Q(a=a) | Q(b=b)``. As ``Q(a=a)`` equates to true ``Q(a=a) or Q(b=b)`` is
|
|
||||||
the same as ``Q(a=a)``.
|
|
||||||
|
|
||||||
.. _guide-atomic-updates:
|
.. _guide-atomic-updates:
|
||||||
|
|
||||||
Atomic updates
|
Atomic updates
|
||||||
==============
|
==============
|
||||||
Documents may be updated atomically by using the
|
Documents may be updated atomically by using the
|
||||||
:meth:`~mongoengine.queryset.QuerySet.update_one`,
|
:meth:`~mongoengine.queryset.QuerySet.update_one` and
|
||||||
:meth:`~mongoengine.queryset.QuerySet.update` and
|
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
|
||||||
:meth:`~mongoengine.queryset.QuerySet.modify` methods on a
|
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
||||||
:class:`~mongoengine.queryset.QuerySet` or
|
that you may use with these methods:
|
||||||
:meth:`~mongoengine.Document.modify` and
|
|
||||||
:meth:`~mongoengine.Document.save` (with :attr:`save_condition` argument) on a
|
|
||||||
:class:`~mongoengine.Document`.
|
|
||||||
There are several different "modifiers" that you may use with these methods:
|
|
||||||
|
|
||||||
* ``set`` -- set a particular value
|
* ``set`` -- set a particular value
|
||||||
* ``set_on_insert`` -- set only if this is new document `need to add upsert=True`_
|
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
||||||
* ``unset`` -- delete a particular value (since MongoDB v1.3)
|
|
||||||
* ``max`` -- update only if value is bigger
|
|
||||||
* ``min`` -- update only if value is smaller
|
|
||||||
* ``inc`` -- increment a value by a given amount
|
* ``inc`` -- increment a value by a given amount
|
||||||
* ``dec`` -- decrement a value by a given amount
|
* ``dec`` -- decrement a value by a given amount
|
||||||
|
* ``pop`` -- remove the last item from a list
|
||||||
* ``push`` -- append a value to a list
|
* ``push`` -- append a value to a list
|
||||||
* ``push_all`` -- append several values to a list
|
* ``push_all`` -- append several values to a list
|
||||||
* ``pop`` -- remove the first or last element of a list `depending on the value`_
|
* ``pop`` -- remove the first or last element of a list
|
||||||
* ``pull`` -- remove a value from a list
|
* ``pull`` -- remove a value from a list
|
||||||
* ``pull_all`` -- remove several values from a list
|
* ``pull_all`` -- remove several values from a list
|
||||||
* ``add_to_set`` -- add value to a list only if its not in the list already
|
* ``add_to_set`` -- add value to a list only if its not in the list already
|
||||||
* ``rename`` -- rename the key name
|
|
||||||
|
|
||||||
.. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/
|
|
||||||
|
|
||||||
The syntax for atomic updates is similar to the querying syntax, but the
|
The syntax for atomic updates is similar to the querying syntax, but the
|
||||||
modifier comes before the field, not after it::
|
modifier comes before the field, not after it::
|
||||||
@ -581,14 +425,7 @@ modifier comes before the field, not after it::
|
|||||||
>>> post.tags
|
>>> post.tags
|
||||||
['database', 'nosql']
|
['database', 'nosql']
|
||||||
|
|
||||||
.. note::
|
.. note ::
|
||||||
|
|
||||||
If no modifier operator is specified the default will be ``$set``. So the following sentences are identical::
|
|
||||||
|
|
||||||
>>> BlogPost.objects(id=post.id).update(title='Example Post')
|
|
||||||
>>> BlogPost.objects(id=post.id).update(set__title='Example Post')
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
|
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
|
||||||
on changed documents by tracking changes to that document.
|
on changed documents by tracking changes to that document.
|
||||||
@ -604,20 +441,10 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
|
|||||||
>>> post.tags
|
>>> post.tags
|
||||||
['database', 'mongodb']
|
['database', 'mongodb']
|
||||||
|
|
||||||
From MongoDB version 2.6, push operator supports $position value which allows
|
.. note ::
|
||||||
to push values with index::
|
|
||||||
|
|
||||||
>>> post = BlogPost(title="Test", tags=["mongo"])
|
|
||||||
>>> post.save()
|
|
||||||
>>> post.update(push__tags__0=["database", "code"])
|
|
||||||
>>> post.reload()
|
|
||||||
>>> post.tags
|
|
||||||
['database', 'code', 'mongo']
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
Currently only top level lists are handled, future versions of mongodb /
|
Currently only top level lists are handled, future versions of mongodb /
|
||||||
pymongo plan to support nested positional operators. See `The $ positional
|
pymongo plan to support nested positional operators. See `The $ positional
|
||||||
operator <https://docs.mongodb.com/manual/tutorial/update-documents/#Updating-The%24positionaloperator>`_.
|
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
||||||
|
|
||||||
Server-side javascript execution
|
Server-side javascript execution
|
||||||
================================
|
================================
|
||||||
@ -656,7 +483,7 @@ Some variables are made available in the scope of the Javascript function:
|
|||||||
|
|
||||||
The following example demonstrates the intended usage of
|
The following example demonstrates the intended usage of
|
||||||
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
|
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
|
||||||
over a field on a document (this functionality is already available through
|
over a field on a document (this functionality is already available throught
|
||||||
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
|
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
|
||||||
example)::
|
example)::
|
||||||
|
|
||||||
@ -683,7 +510,7 @@ Javascript code. When accessing a field on a collection object, use
|
|||||||
square-bracket notation, and prefix the MongoEngine field name with a tilde.
|
square-bracket notation, and prefix the MongoEngine field name with a tilde.
|
||||||
The field name that follows the tilde will be translated to the name used in
|
The field name that follows the tilde will be translated to the name used in
|
||||||
the database. Note that when referring to fields on embedded documents,
|
the database. Note that when referring to fields on embedded documents,
|
||||||
the name of the :class:`~mongoengine.fields.EmbeddedDocumentField`, followed by a dot,
|
the name of the :class:`~mongoengine.EmbeddedDocumentField`, followed by a dot,
|
||||||
should be used before the name of the field on the embedded document. The
|
should be used before the name of the field on the embedded document. The
|
||||||
following example shows how the substitutions are made::
|
following example shows how the substitutions are made::
|
||||||
|
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
.. _signals:
|
.. _signals:
|
||||||
|
|
||||||
=======
|
|
||||||
Signals
|
Signals
|
||||||
=======
|
=======
|
||||||
|
|
||||||
@ -8,95 +7,32 @@ Signals
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Signal support is provided by the excellent `blinker`_ library. If you wish
|
Signal support is provided by the excellent `blinker`_ library and
|
||||||
to enable signal support this library must be installed, though it is not
|
will gracefully fall back if it is not available.
|
||||||
required for MongoEngine to function.
|
|
||||||
|
|
||||||
Overview
|
|
||||||
--------
|
|
||||||
|
|
||||||
Signals are found within the `mongoengine.signals` module. Unless
|
The following document signals exist in MongoEngine and are pretty self-explanatory:
|
||||||
specified signals receive no additional arguments beyond the `sender` class and
|
|
||||||
`document` instance. Post-signals are only called if there were no exceptions
|
|
||||||
raised during the processing of their related function.
|
|
||||||
|
|
||||||
Available signals include:
|
* `mongoengine.signals.pre_init`
|
||||||
|
* `mongoengine.signals.post_init`
|
||||||
|
* `mongoengine.signals.pre_save`
|
||||||
|
* `mongoengine.signals.post_save`
|
||||||
|
* `mongoengine.signals.pre_delete`
|
||||||
|
* `mongoengine.signals.post_delete`
|
||||||
|
* `mongoengine.signals.pre_bulk_insert`
|
||||||
|
* `mongoengine.signals.post_bulk_insert`
|
||||||
|
|
||||||
`pre_init`
|
Example usage::
|
||||||
Called during the creation of a new :class:`~mongoengine.Document` or
|
|
||||||
:class:`~mongoengine.EmbeddedDocument` instance, after the constructor
|
|
||||||
arguments have been collected but before any additional processing has been
|
|
||||||
done to them. (I.e. assignment of default values.) Handlers for this signal
|
|
||||||
are passed the dictionary of arguments using the `values` keyword argument
|
|
||||||
and may modify this dictionary prior to returning.
|
|
||||||
|
|
||||||
`post_init`
|
|
||||||
Called after all processing of a new :class:`~mongoengine.Document` or
|
|
||||||
:class:`~mongoengine.EmbeddedDocument` instance has been completed.
|
|
||||||
|
|
||||||
`pre_save`
|
|
||||||
Called within :meth:`~mongoengine.Document.save` prior to performing
|
|
||||||
any actions.
|
|
||||||
|
|
||||||
`pre_save_post_validation`
|
|
||||||
Called within :meth:`~mongoengine.Document.save` after validation
|
|
||||||
has taken place but before saving.
|
|
||||||
|
|
||||||
`post_save`
|
|
||||||
Called within :meth:`~mongoengine.Document.save` after most actions
|
|
||||||
(validation, insert/update, and cascades, but not clearing dirty flags) have
|
|
||||||
completed successfully. Passed the additional boolean keyword argument
|
|
||||||
`created` to indicate if the save was an insert or an update.
|
|
||||||
|
|
||||||
`pre_delete`
|
|
||||||
Called within :meth:`~mongoengine.Document.delete` prior to
|
|
||||||
attempting the delete operation.
|
|
||||||
|
|
||||||
`post_delete`
|
|
||||||
Called within :meth:`~mongoengine.Document.delete` upon successful
|
|
||||||
deletion of the record.
|
|
||||||
|
|
||||||
`pre_bulk_insert`
|
|
||||||
Called after validation of the documents to insert, but prior to any data
|
|
||||||
being written. In this case, the `document` argument is replaced by a
|
|
||||||
`documents` argument representing the list of documents being inserted.
|
|
||||||
|
|
||||||
`post_bulk_insert`
|
|
||||||
Called after a successful bulk insert operation. As per `pre_bulk_insert`,
|
|
||||||
the `document` argument is omitted and replaced with a `documents` argument.
|
|
||||||
An additional boolean argument, `loaded`, identifies the contents of
|
|
||||||
`documents` as either :class:`~mongoengine.Document` instances when `True` or
|
|
||||||
simply a list of primary key values for the inserted records if `False`.
|
|
||||||
|
|
||||||
Attaching Events
|
|
||||||
----------------
|
|
||||||
|
|
||||||
After writing a handler function like the following::
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
|
|
||||||
def update_modified(sender, document):
|
|
||||||
document.modified = datetime.utcnow()
|
|
||||||
|
|
||||||
You attach the event handler to your :class:`~mongoengine.Document` or
|
|
||||||
:class:`~mongoengine.EmbeddedDocument` subclass::
|
|
||||||
|
|
||||||
class Record(Document):
|
|
||||||
modified = DateTimeField()
|
|
||||||
|
|
||||||
signals.pre_save.connect(update_modified)
|
|
||||||
|
|
||||||
While this is not the most elaborate document model, it does demonstrate the
|
|
||||||
concepts involved. As a more complete demonstration you can also define your
|
|
||||||
handlers within your subclass::
|
|
||||||
|
|
||||||
class Author(Document):
|
class Author(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def pre_save(cls, sender, document, **kwargs):
|
def pre_save(cls, sender, document, **kwargs):
|
||||||
logging.debug("Pre Save: %s" % document.name)
|
logging.debug("Pre Save: %s" % document.name)
|
||||||
@ -113,37 +49,5 @@ handlers within your subclass::
|
|||||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||||
signals.post_save.connect(Author.post_save, sender=Author)
|
signals.post_save.connect(Author.post_save, sender=Author)
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently.
|
|
||||||
|
|
||||||
Finally, you can also use this small decorator to quickly create a number of
|
|
||||||
signals and attach them to your :class:`~mongoengine.Document` or
|
|
||||||
:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators::
|
|
||||||
|
|
||||||
def handler(event):
|
|
||||||
"""Signal decorator to allow use of callback functions as class decorators."""
|
|
||||||
|
|
||||||
def decorator(fn):
|
|
||||||
def apply(cls):
|
|
||||||
event.connect(fn, sender=cls)
|
|
||||||
return cls
|
|
||||||
|
|
||||||
fn.apply = apply
|
|
||||||
return fn
|
|
||||||
|
|
||||||
return decorator
|
|
||||||
|
|
||||||
Using the first example of updating a modification time the code is now much
|
|
||||||
cleaner looking while still allowing manual execution of the callback::
|
|
||||||
|
|
||||||
@handler(signals.pre_save)
|
|
||||||
def update_modified(sender, document):
|
|
||||||
document.modified = datetime.utcnow()
|
|
||||||
|
|
||||||
@update_modified.apply
|
|
||||||
class Record(Document):
|
|
||||||
modified = DateTimeField()
|
|
||||||
|
|
||||||
|
|
||||||
.. _blinker: http://pypi.python.org/pypi/blinker
|
.. _blinker: http://pypi.python.org/pypi/blinker
|
||||||
|
@ -1,51 +0,0 @@
|
|||||||
===========
|
|
||||||
Text Search
|
|
||||||
===========
|
|
||||||
|
|
||||||
After MongoDB 2.4 version, supports search documents by text indexes.
|
|
||||||
|
|
||||||
|
|
||||||
Defining a Document with text index
|
|
||||||
===================================
|
|
||||||
Use the *$* prefix to set a text index, Look the declaration::
|
|
||||||
|
|
||||||
class News(Document):
|
|
||||||
title = StringField()
|
|
||||||
content = StringField()
|
|
||||||
is_active = BooleanField()
|
|
||||||
|
|
||||||
meta = {'indexes': [
|
|
||||||
{'fields': ['$title', "$content"],
|
|
||||||
'default_language': 'english',
|
|
||||||
'weights': {'title': 10, 'content': 2}
|
|
||||||
}
|
|
||||||
]}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Querying
|
|
||||||
========
|
|
||||||
|
|
||||||
Saving a document::
|
|
||||||
|
|
||||||
News(title="Using mongodb text search",
|
|
||||||
content="Testing text search").save()
|
|
||||||
|
|
||||||
News(title="MongoEngine 0.9 released",
|
|
||||||
content="Various improvements").save()
|
|
||||||
|
|
||||||
Next, start a text search using :attr:`QuerySet.search_text` method::
|
|
||||||
|
|
||||||
document = News.objects.search_text('testing').first()
|
|
||||||
document.title # may be: "Using mongodb text search"
|
|
||||||
|
|
||||||
document = News.objects.search_text('released').first()
|
|
||||||
document.title # may be: "MongoEngine 0.9 released"
|
|
||||||
|
|
||||||
|
|
||||||
Ordering by text score
|
|
||||||
======================
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
objects = News.objects.search_text('mongo').order_by('$text_score')
|
|
@ -1,122 +0,0 @@
|
|||||||
====================
|
|
||||||
Document Validation
|
|
||||||
====================
|
|
||||||
|
|
||||||
By design, MongoEngine strictly validates the documents right before they are inserted in MongoDB
|
|
||||||
and makes sure they are consistent with the fields defined in your models.
|
|
||||||
|
|
||||||
MongoEngine makes the assumption that the documents that exists in the DB are compliant with the schema.
|
|
||||||
This means that Mongoengine will not validate a document when an object is loaded from the DB into an instance
|
|
||||||
of your model but this operation may fail under some circumstances (e.g. if there is a field in
|
|
||||||
the document fetched from the database that is not defined in your model).
|
|
||||||
|
|
||||||
|
|
||||||
Built-in validation
|
|
||||||
===================
|
|
||||||
|
|
||||||
Mongoengine provides different fields that encapsulate the corresponding validation
|
|
||||||
out of the box. Validation runs when calling `.validate()` or `.save()`
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
from mongoengine import Document, EmailField
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
email = EmailField()
|
|
||||||
age = IntField(min_value=0, max_value=99)
|
|
||||||
|
|
||||||
user = User(email='invalid@', age=24)
|
|
||||||
user.validate() # raises ValidationError (Invalid email address: ['email'])
|
|
||||||
user.save() # raises ValidationError (Invalid email address: ['email'])
|
|
||||||
|
|
||||||
user2 = User(email='john.doe@garbage.com', age=1000)
|
|
||||||
user2.save() # raises ValidationError (Integer value is too large: ['age'])
|
|
||||||
|
|
||||||
Custom validation
|
|
||||||
=================
|
|
||||||
|
|
||||||
The following feature can be used to customize the validation:
|
|
||||||
|
|
||||||
* Field `validation` parameter
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
def not_john_doe(name):
|
|
||||||
if name == 'John Doe':
|
|
||||||
raise ValidationError("John Doe is not a valid name")
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
full_name = StringField(validation=not_john_doe)
|
|
||||||
|
|
||||||
Person(full_name='Billy Doe').save()
|
|
||||||
Person(full_name='John Doe').save() # raises ValidationError (John Doe is not a valid name)
|
|
||||||
|
|
||||||
|
|
||||||
* Document `clean` method
|
|
||||||
|
|
||||||
This method is called as part of :meth:`~mongoengine.document.Document.save` and should be used to provide
|
|
||||||
custom model validation and/or to modify some of the field values prior to validation.
|
|
||||||
For instance, you could use it to automatically provide a value for a field, or to do validation
|
|
||||||
that requires access to more than a single field.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Essay(Document):
|
|
||||||
status = StringField(choices=('Published', 'Draft'), required=True)
|
|
||||||
pub_date = DateTimeField()
|
|
||||||
|
|
||||||
def clean(self):
|
|
||||||
# Validate that only published essays have a `pub_date`
|
|
||||||
if self.status == 'Draft' and self.pub_date is not None:
|
|
||||||
raise ValidationError('Draft entries should not have a publication date.')
|
|
||||||
# Set the pub_date for published items if not set.
|
|
||||||
if self.status == 'Published' and self.pub_date is None:
|
|
||||||
self.pub_date = datetime.now()
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
Cleaning is only called if validation is turned on and when calling
|
|
||||||
:meth:`~mongoengine.Document.save`.
|
|
||||||
|
|
||||||
* Adding custom Field classes
|
|
||||||
|
|
||||||
We recommend as much as possible to use fields provided by MongoEngine. However, it is also possible
|
|
||||||
to subclass a Field and encapsulate some validation by overriding the `validate` method
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class AgeField(IntField):
|
|
||||||
|
|
||||||
def validate(self, value):
|
|
||||||
super(AgeField, self).validate(value) # let IntField.validate run first
|
|
||||||
if value == 60:
|
|
||||||
self.error('60 is not allowed')
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
age = AgeField(min_value=0, max_value=99)
|
|
||||||
|
|
||||||
Person(age=20).save() # passes
|
|
||||||
Person(age=1000).save() # raises ValidationError (Integer value is too large: ['age'])
|
|
||||||
Person(age=60).save() # raises ValidationError (Person:None) (60 is not allowed: ['age'])
|
|
||||||
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
When overriding `validate`, use `self.error("your-custom-error")` instead of raising ValidationError explicitly,
|
|
||||||
it will provide a better context with the error message
|
|
||||||
|
|
||||||
Skipping validation
|
|
||||||
====================
|
|
||||||
|
|
||||||
Although discouraged as it allows to violate fields constraints, if for some reason you need to disable
|
|
||||||
the validation and cleaning of a document when you call :meth:`~mongoengine.document.Document.save`, you can use `.save(validate=False)`.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
age = IntField(max_value=100)
|
|
||||||
|
|
||||||
Person(age=1000).save() # raises ValidationError (Integer value is too large)
|
|
||||||
|
|
||||||
Person(age=1000).save(validate=False)
|
|
||||||
person = Person.objects.first()
|
|
||||||
assert person.age == 1000
|
|
@ -7,83 +7,56 @@ MongoDB. To install it, simply run
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ python -m pip install -U mongoengine
|
# pip install -U mongoengine
|
||||||
|
|
||||||
:doc:`tutorial`
|
:doc:`tutorial`
|
||||||
A quick tutorial building a tumblelog to get you up and running with
|
Start here for a quick overview.
|
||||||
MongoEngine.
|
|
||||||
|
|
||||||
:doc:`guide/index`
|
:doc:`guide/index`
|
||||||
The Full guide to MongoEngine --- from modeling documents to storing files,
|
The Full guide to MongoEngine
|
||||||
from querying for data to firing signals and *everything* between.
|
|
||||||
|
|
||||||
:doc:`apireference`
|
:doc:`apireference`
|
||||||
The complete API documentation --- the innards of documents, querysets and fields.
|
The complete API documentation.
|
||||||
|
|
||||||
:doc:`upgrade`
|
:doc:`upgrade`
|
||||||
How to upgrade MongoEngine.
|
How to upgrade MongoEngine.
|
||||||
|
|
||||||
:doc:`faq`
|
|
||||||
Frequently Asked Questions
|
|
||||||
|
|
||||||
:doc:`django`
|
:doc:`django`
|
||||||
Using MongoEngine and Django
|
Using MongoEngine and Django
|
||||||
|
|
||||||
MongoDB and driver support
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
MongoEngine is based on the PyMongo driver and tested against multiple versions of MongoDB.
|
|
||||||
For further details, please refer to the `readme <https://github.com/MongoEngine/mongoengine#mongoengine>`_.
|
|
||||||
|
|
||||||
Community
|
Community
|
||||||
---------
|
---------
|
||||||
|
|
||||||
To get help with using MongoEngine, use the `MongoEngine Users mailing list
|
To get help with using MongoEngine, use the `MongoEngine Users mailing list
|
||||||
<http://groups.google.com/group/mongoengine-users>`_ or the ever popular
|
<http://groups.google.com/group/mongoengine-users>`_ or come chat on the
|
||||||
`stackoverflow <http://www.stackoverflow.com>`_.
|
`#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_.
|
||||||
|
|
||||||
Contributing
|
Contributing
|
||||||
------------
|
------------
|
||||||
|
|
||||||
**Yes please!** We are always looking for contributions, additions and improvements.
|
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and
|
||||||
|
contributions are always encouraged. Contributions can be as simple as
|
||||||
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_
|
minor tweaks to this documentation. To contribute, fork the project on
|
||||||
and contributions are always encouraged. Contributions can be as simple as
|
`GitHub <http://github.com/hmarr/mongoengine>`_ and send a
|
||||||
minor tweaks to this documentation, the website or the core.
|
|
||||||
|
|
||||||
To contribute, fork the project on
|
|
||||||
`GitHub <http://github.com/MongoEngine/mongoengine>`_ and send a
|
|
||||||
pull request.
|
pull request.
|
||||||
|
|
||||||
|
Also, you can join the developers' `mailing list
|
||||||
|
<http://groups.google.com/group/mongoengine-dev>`_.
|
||||||
|
|
||||||
Changes
|
Changes
|
||||||
-------
|
-------
|
||||||
|
|
||||||
See the :doc:`changelog` for a full list of changes to MongoEngine and
|
See the :doc:`changelog` for a full list of changes to MongoEngine and
|
||||||
:doc:`upgrade` for upgrade information.
|
:doc:`upgrade` for upgrade information.
|
||||||
|
|
||||||
.. note:: Always read and test the `upgrade <upgrade>`_ documentation before
|
|
||||||
putting updates live in production **;)**
|
|
||||||
|
|
||||||
Offline Reading
|
|
||||||
---------------
|
|
||||||
|
|
||||||
Download the docs in `pdf <https://media.readthedocs.org/pdf/mongoengine-odm/latest/mongoengine-odm.pdf>`_
|
|
||||||
or `epub <https://media.readthedocs.org/epub/mongoengine-odm/latest/mongoengine-odm.epub>`_
|
|
||||||
formats for offline reading.
|
|
||||||
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:hidden:
|
||||||
:numbered:
|
|
||||||
:hidden:
|
|
||||||
|
|
||||||
tutorial
|
tutorial
|
||||||
guide/index
|
guide/index
|
||||||
apireference
|
apireference
|
||||||
changelog
|
django
|
||||||
upgrade
|
changelog
|
||||||
faq
|
upgrade
|
||||||
django
|
|
||||||
|
|
||||||
Indices and tables
|
Indices and tables
|
||||||
------------------
|
------------------
|
||||||
@ -91,3 +64,4 @@ Indices and tables
|
|||||||
* :ref:`genindex`
|
* :ref:`genindex`
|
||||||
* :ref:`modindex`
|
* :ref:`modindex`
|
||||||
* :ref:`search`
|
* :ref:`search`
|
||||||
|
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
Sphinx==3.3.0
|
|
||||||
sphinx-rtd-theme==0.5.0
|
|
||||||
readthedocs-sphinx-ext==2.1.1
|
|
@ -1,83 +1,73 @@
|
|||||||
========
|
========
|
||||||
Tutorial
|
Tutorial
|
||||||
========
|
========
|
||||||
|
|
||||||
This tutorial introduces **MongoEngine** by means of example --- we will walk
|
This tutorial introduces **MongoEngine** by means of example --- we will walk
|
||||||
through how to create a simple **Tumblelog** application. A tumblelog is a
|
through how to create a simple **Tumblelog** application. A Tumblelog is a type
|
||||||
blog that supports mixed media content, including text, images, links, video,
|
of blog where posts are not constrained to being conventional text-based posts.
|
||||||
audio, etc. For simplicity's sake, we'll stick to text, image, and link
|
As well as text-based entries, users may post images, links, videos, etc. For
|
||||||
entries. As the purpose of this tutorial is to introduce MongoEngine, we'll
|
simplicity's sake, we'll stick to text, image and link entries in our
|
||||||
|
application. As the purpose of this tutorial is to introduce MongoEngine, we'll
|
||||||
focus on the data-modelling side of the application, leaving out a user
|
focus on the data-modelling side of the application, leaving out a user
|
||||||
interface.
|
interface.
|
||||||
|
|
||||||
Getting started
|
Getting started
|
||||||
===============
|
===============
|
||||||
|
|
||||||
Before we start, make sure that a copy of MongoDB is running in an accessible
|
Before we start, make sure that a copy of MongoDB is running in an accessible
|
||||||
location --- running it locally will be easier, but if that is not an option
|
location --- running it locally will be easier, but if that is not an option
|
||||||
then it may be run on a remote server. If you haven't installed MongoEngine,
|
then it may be run on a remote server.
|
||||||
simply use pip to install it like so::
|
|
||||||
|
|
||||||
$ python -m pip install mongoengine
|
|
||||||
|
|
||||||
Before we can start using MongoEngine, we need to tell it how to connect to our
|
Before we can start using MongoEngine, we need to tell it how to connect to our
|
||||||
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
||||||
function. If running locally, the only argument we need to provide is the name
|
function. The only argument we need to provide is the name of the MongoDB
|
||||||
of the MongoDB database to use::
|
database to use::
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
connect('tumblelog')
|
connect('tumblelog')
|
||||||
|
|
||||||
There are lots of options for connecting to MongoDB, for more information about
|
For more information about connecting to MongoDB see :ref:`guide-connecting`.
|
||||||
them see the :ref:`guide-connecting` guide.
|
|
||||||
|
|
||||||
Defining our documents
|
Defining our documents
|
||||||
======================
|
======================
|
||||||
|
|
||||||
MongoDB is *schemaless*, which means that no schema is enforced by the database
|
MongoDB is *schemaless*, which means that no schema is enforced by the database
|
||||||
--- we may add and remove fields however we want and MongoDB won't complain.
|
--- we may add and remove fields however we want and MongoDB won't complain.
|
||||||
This makes life a lot easier in many regards, especially when there is a change
|
This makes life a lot easier in many regards, especially when there is a change
|
||||||
to the data model. However, defining schemas for our documents can help to iron
|
to the data model. However, defining schemata for our documents can help to
|
||||||
out bugs involving incorrect types or missing fields, and also allow us to
|
iron out bugs involving incorrect types or missing fields, and also allow us to
|
||||||
define utility methods on our documents in the same way that traditional
|
define utility methods on our documents in the same way that traditional
|
||||||
:abbr:`ORMs (Object-Relational Mappers)` do.
|
:abbr:`ORMs (Object-Relational Mappers)` do.
|
||||||
|
|
||||||
In our Tumblelog application we need to store several different types of
|
In our Tumblelog application we need to store several different types of
|
||||||
information. We will need to have a collection of **users**, so that we may
|
information. We will need to have a collection of **users**, so that we may
|
||||||
link posts to an individual. We also need to store our different types of
|
link posts to an individual. We also need to store our different types
|
||||||
**posts** (eg: text, image and link) in the database. To aid navigation of our
|
**posts** (text, image and link) in the database. To aid navigation of our
|
||||||
Tumblelog, posts may have **tags** associated with them, so that the list of
|
Tumblelog, posts may have **tags** associated with them, so that the list of
|
||||||
posts shown to the user may be limited to posts that have been assigned a
|
posts shown to the user may be limited to posts that have been assigned a
|
||||||
specific tag. Finally, it would be nice if **comments** could be added to
|
specified tag. Finally, it would be nice if **comments** could be added to
|
||||||
posts. We'll start with **users**, as the other document models are slightly
|
posts. We'll start with **users**, as the others are slightly more involved.
|
||||||
more involved.
|
|
||||||
|
|
||||||
Users
|
Users
|
||||||
-----
|
-----
|
||||||
|
|
||||||
Just as if we were using a relational database with an ORM, we need to define
|
Just as if we were using a relational database with an ORM, we need to define
|
||||||
which fields a :class:`User` may have, and what types of data they might store::
|
which fields a :class:`User` may have, and what their types will be::
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
email = StringField(required=True)
|
email = StringField(required=True)
|
||||||
first_name = StringField(max_length=50)
|
first_name = StringField(max_length=50)
|
||||||
last_name = StringField(max_length=50)
|
last_name = StringField(max_length=50)
|
||||||
|
|
||||||
This looks similar to how the structure of a table would be defined in a
|
This looks similar to how a the structure of a table would be defined in a
|
||||||
regular ORM. The key difference is that this schema will never be passed on to
|
regular ORM. The key difference is that this schema will never be passed on to
|
||||||
MongoDB --- this will only be enforced at the application level, making future
|
MongoDB --- this will only be enforced at the application level. Also, the User
|
||||||
changes easy to manage. Also, the User documents will be stored in a
|
documents will be stored in a MongoDB *collection* rather than a table.
|
||||||
MongoDB *collection* rather than a table.
|
|
||||||
|
|
||||||
Posts, Comments and Tags
|
Posts, Comments and Tags
|
||||||
------------------------
|
------------------------
|
||||||
|
|
||||||
Now we'll think about how to store the rest of the information. If we were
|
Now we'll think about how to store the rest of the information. If we were
|
||||||
using a relational database, we would most likely have a table of **posts**, a
|
using a relational database, we would most likely have a table of **posts**, a
|
||||||
table of **comments** and a table of **tags**. To associate the comments with
|
table of **comments** and a table of **tags**. To associate the comments with
|
||||||
individual posts, we would put a column in the comments table that contained a
|
individual posts, we would put a column in the comments table that contained a
|
||||||
foreign key to the posts table. We'd also need a link table to provide the
|
foreign key to the posts table. We'd also need a link table to provide the
|
||||||
many-to-many relationship between posts and tags. Then we'd need to address the
|
many-to-many relationship between posts and tags. Then we'd need to address the
|
||||||
problem of storing the specialised post-types (text, image and link). There are
|
problem of storing the specialised post-types (text, image and link). There are
|
||||||
several ways we can achieve this, but each of them have their problems --- none
|
several ways we can achieve this, but each of them have their problems --- none
|
||||||
@ -85,25 +75,21 @@ of them stand out as particularly intuitive solutions.
|
|||||||
|
|
||||||
Posts
|
Posts
|
||||||
^^^^^
|
^^^^^
|
||||||
|
But MongoDB *isn't* a relational database, so we're not going to do it that
|
||||||
Happily MongoDB *isn't* a relational database, so we're not going to do it that
|
|
||||||
way. As it turns out, we can use MongoDB's schemaless nature to provide us with
|
way. As it turns out, we can use MongoDB's schemaless nature to provide us with
|
||||||
a much nicer solution. We will store all of the posts in *one collection* and
|
a much nicer solution. We will store all of the posts in *one collection* ---
|
||||||
each post type will only store the fields it needs. If we later want to add
|
each post type will just have the fields it needs. If we later want to add
|
||||||
video posts, we don't have to modify the collection at all, we just *start
|
video posts, we don't have to modify the collection at all, we just *start
|
||||||
using* the new fields we need to support video posts. This fits with the
|
using* the new fields we need to support video posts. This fits with the
|
||||||
Object-Oriented principle of *inheritance* nicely. We can think of
|
Object-Oriented principle of *inheritance* nicely. We can think of
|
||||||
:class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
|
:class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
|
||||||
:class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
|
:class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
|
||||||
this kind of modeling out of the box --- all you need do is turn on inheritance
|
this kind of modelling out of the box::
|
||||||
by setting :attr:`allow_inheritance` to True in the :attr:`meta`::
|
|
||||||
|
|
||||||
class Post(Document):
|
class Post(Document):
|
||||||
title = StringField(max_length=120, required=True)
|
title = StringField(max_length=120, required=True)
|
||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
class TextPost(Post):
|
class TextPost(Post):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
@ -114,21 +100,20 @@ by setting :attr:`allow_inheritance` to True in the :attr:`meta`::
|
|||||||
link_url = StringField()
|
link_url = StringField()
|
||||||
|
|
||||||
We are storing a reference to the author of the posts using a
|
We are storing a reference to the author of the posts using a
|
||||||
:class:`~mongoengine.fields.ReferenceField` object. These are similar to foreign key
|
:class:`~mongoengine.ReferenceField` object. These are similar to foreign key
|
||||||
fields in traditional ORMs, and are automatically translated into references
|
fields in traditional ORMs, and are automatically translated into references
|
||||||
when they are saved, and dereferenced when they are loaded.
|
when they are saved, and dereferenced when they are loaded.
|
||||||
|
|
||||||
Tags
|
Tags
|
||||||
^^^^
|
^^^^
|
||||||
|
|
||||||
Now that we have our Post models figured out, how will we attach tags to them?
|
Now that we have our Post models figured out, how will we attach tags to them?
|
||||||
MongoDB allows us to store lists of items natively, so rather than having a
|
MongoDB allows us to store lists of items natively, so rather than having a
|
||||||
link table, we can just store a list of tags in each post. So, for both
|
link table, we can just store a list of tags in each post. So, for both
|
||||||
efficiency and simplicity's sake, we'll store the tags as strings directly
|
efficiency and simplicity's sake, we'll store the tags as strings directly
|
||||||
within the post, rather than storing references to tags in a separate
|
within the post, rather than storing references to tags in a separate
|
||||||
collection. Especially as tags are generally very short (often even shorter
|
collection. Especially as tags are generally very short (often even shorter
|
||||||
than a document's id), this denormalization won't impact the size of the
|
than a document's id), this denormalisation won't impact very strongly on the
|
||||||
database very strongly. Let's take a look at the code of our modified
|
size of our database. So let's take a look that the code our modified
|
||||||
:class:`Post` class::
|
:class:`Post` class::
|
||||||
|
|
||||||
class Post(Document):
|
class Post(Document):
|
||||||
@ -136,24 +121,21 @@ database very strongly. Let's take a look at the code of our modified
|
|||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
tags = ListField(StringField(max_length=30))
|
tags = ListField(StringField(max_length=30))
|
||||||
|
|
||||||
The :class:`~mongoengine.fields.ListField` object that is used to define a Post's tags
|
The :class:`~mongoengine.ListField` object that is used to define a Post's tags
|
||||||
takes a field object as its first argument --- this means that you can have
|
takes a field object as its first argument --- this means that you can have
|
||||||
lists of any type of field (including lists).
|
lists of any type of field (including lists). Note that we don't need to
|
||||||
|
modify the specialised post types as they all inherit from :class:`Post`.
|
||||||
.. note:: We don't need to modify the specialized post types as they all
|
|
||||||
inherit from :class:`Post`.
|
|
||||||
|
|
||||||
Comments
|
Comments
|
||||||
^^^^^^^^
|
^^^^^^^^
|
||||||
|
|
||||||
A comment is typically associated with *one* post. In a relational database, to
|
A comment is typically associated with *one* post. In a relational database, to
|
||||||
display a post with its comments, we would have to retrieve the post from the
|
display a post with its comments, we would have to retrieve the post from the
|
||||||
database and then query the database again for the comments associated with the
|
database, then query the database again for the comments associated with the
|
||||||
post. This works, but there is no real reason to be storing the comments
|
post. This works, but there is no real reason to be storing the comments
|
||||||
separately from their associated posts, other than to work around the
|
separately from their associated posts, other than to work around the
|
||||||
relational model. Using MongoDB we can store the comments as a list of
|
relational model. Using MongoDB we can store the comments as a list of
|
||||||
*embedded documents* directly on a post document. An embedded document should
|
*embedded documents* directly on a post document. An embedded document should
|
||||||
be treated no differently than a regular document; it just doesn't have its own
|
be treated no differently that a regular document; it just doesn't have its own
|
||||||
collection in the database. Using MongoEngine, we can define the structure of
|
collection in the database. Using MongoEngine, we can define the structure of
|
||||||
embedded documents, along with utility methods, in exactly the same way we do
|
embedded documents, along with utility methods, in exactly the same way we do
|
||||||
with regular documents::
|
with regular documents::
|
||||||
@ -173,7 +155,7 @@ We can then store a list of comment documents in our post document::
|
|||||||
Handling deletions of references
|
Handling deletions of references
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
The :class:`~mongoengine.fields.ReferenceField` object takes a keyword
|
The :class:`~mongoengine.ReferenceField` object takes a keyword
|
||||||
`reverse_delete_rule` for handling deletion rules if the reference is deleted.
|
`reverse_delete_rule` for handling deletion rules if the reference is deleted.
|
||||||
To delete all the posts if a user is deleted set the rule::
|
To delete all the posts if a user is deleted set the rule::
|
||||||
|
|
||||||
@ -183,9 +165,9 @@ To delete all the posts if a user is deleted set the rule::
|
|||||||
tags = ListField(StringField(max_length=30))
|
tags = ListField(StringField(max_length=30))
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
See :class:`~mongoengine.fields.ReferenceField` for more information.
|
See :class:`~mongoengine.ReferenceField` for more information.
|
||||||
|
|
||||||
.. note::
|
..note::
|
||||||
MapFields and DictFields currently don't support automatic handling of
|
MapFields and DictFields currently don't support automatic handling of
|
||||||
deleted references
|
deleted references
|
||||||
|
|
||||||
@ -196,37 +178,33 @@ Now that we've defined how our documents will be structured, let's start adding
|
|||||||
some documents to the database. Firstly, we'll need to create a :class:`User`
|
some documents to the database. Firstly, we'll need to create a :class:`User`
|
||||||
object::
|
object::
|
||||||
|
|
||||||
ross = User(email='ross@example.com', first_name='Ross', last_name='Lawley').save()
|
john = User(email='jdoe@example.com', first_name='John', last_name='Doe')
|
||||||
|
john.save()
|
||||||
|
|
||||||
.. note::
|
Note that we could have also defined our user using attribute syntax::
|
||||||
We could have also defined our user using attribute syntax::
|
|
||||||
|
|
||||||
ross = User(email='ross@example.com')
|
john = User(email='jdoe@example.com')
|
||||||
ross.first_name = 'Ross'
|
john.first_name = 'John'
|
||||||
ross.last_name = 'Lawley'
|
john.last_name = 'Doe'
|
||||||
ross.save()
|
john.save()
|
||||||
|
|
||||||
Assign another user to a variable called ``john``, just like we did above with
|
Now that we've got our user in the database, let's add a couple of posts::
|
||||||
``ross``.
|
|
||||||
|
|
||||||
Now that we've got our users in the database, let's add a couple of posts::
|
|
||||||
|
|
||||||
post1 = TextPost(title='Fun with MongoEngine', author=john)
|
post1 = TextPost(title='Fun with MongoEngine', author=john)
|
||||||
post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
|
post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
|
||||||
post1.tags = ['mongodb', 'mongoengine']
|
post1.tags = ['mongodb', 'mongoengine']
|
||||||
post1.save()
|
post1.save()
|
||||||
|
|
||||||
post2 = LinkPost(title='MongoEngine Documentation', author=ross)
|
post2 = LinkPost(title='MongoEngine Documentation', author=john)
|
||||||
post2.link_url = 'http://docs.mongoengine.com/'
|
post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
|
||||||
post2.tags = ['mongoengine']
|
post2.tags = ['mongoengine']
|
||||||
post2.save()
|
post2.save()
|
||||||
|
|
||||||
.. note:: If you change a field on an object that has already been saved and
|
Note that if you change a field on a object that has already been saved, then
|
||||||
then call :meth:`save` again, the document will be updated.
|
call :meth:`save` again, the document will be updated.
|
||||||
|
|
||||||
Accessing our data
|
Accessing our data
|
||||||
==================
|
==================
|
||||||
|
|
||||||
So now we've got a couple of posts in our database, how do we display them?
|
So now we've got a couple of posts in our database, how do we display them?
|
||||||
Each document class (i.e. any class that inherits either directly or indirectly
|
Each document class (i.e. any class that inherits either directly or indirectly
|
||||||
from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is
|
from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is
|
||||||
@ -234,17 +212,16 @@ used to access the documents in the database collection associated with that
|
|||||||
class. So let's see how we can get our posts' titles::
|
class. So let's see how we can get our posts' titles::
|
||||||
|
|
||||||
for post in Post.objects:
|
for post in Post.objects:
|
||||||
print(post.title)
|
print post.title
|
||||||
|
|
||||||
Retrieving type-specific information
|
Retrieving type-specific information
|
||||||
------------------------------------
|
------------------------------------
|
||||||
|
This will print the titles of our posts, one on each line. But What if we want
|
||||||
This will print the titles of our posts, one on each line. But what if we want
|
|
||||||
to access the type-specific data (link_url, content, etc.)? One way is simply
|
to access the type-specific data (link_url, content, etc.)? One way is simply
|
||||||
to use the :attr:`objects` attribute of a subclass of :class:`Post`::
|
to use the :attr:`objects` attribute of a subclass of :class:`Post`::
|
||||||
|
|
||||||
for post in TextPost.objects:
|
for post in TextPost.objects:
|
||||||
print(post.content)
|
print post.content
|
||||||
|
|
||||||
Using TextPost's :attr:`objects` attribute only returns documents that were
|
Using TextPost's :attr:`objects` attribute only returns documents that were
|
||||||
created using :class:`TextPost`. Actually, there is a more general rule here:
|
created using :class:`TextPost`. Actually, there is a more general rule here:
|
||||||
@ -261,21 +238,22 @@ instances of :class:`Post` --- they were instances of the subclass of
|
|||||||
practice::
|
practice::
|
||||||
|
|
||||||
for post in Post.objects:
|
for post in Post.objects:
|
||||||
print(post.title)
|
print post.title
|
||||||
print('=' * len(post.title))
|
print '=' * len(post.title)
|
||||||
|
|
||||||
if isinstance(post, TextPost):
|
if isinstance(post, TextPost):
|
||||||
print(post.content)
|
print post.content
|
||||||
|
|
||||||
if isinstance(post, LinkPost):
|
if isinstance(post, LinkPost):
|
||||||
print('Link: {}'.format(post.link_url))
|
print 'Link:', post.link_url
|
||||||
|
|
||||||
|
print
|
||||||
|
|
||||||
This would print the title of each post, followed by the content if it was a
|
This would print the title of each post, followed by the content if it was a
|
||||||
text post, and "Link: <url>" if it was a link post.
|
text post, and "Link: <url>" if it was a link post.
|
||||||
|
|
||||||
Searching our posts by tag
|
Searching our posts by tag
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a
|
The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a
|
||||||
:class:`~mongoengine.queryset.QuerySet` object. This lazily queries the
|
:class:`~mongoengine.queryset.QuerySet` object. This lazily queries the
|
||||||
database only when you need the data. It may also be filtered to narrow down
|
database only when you need the data. It may also be filtered to narrow down
|
||||||
@ -283,7 +261,7 @@ your query. Let's adjust our query so that only posts with the tag "mongodb"
|
|||||||
are returned::
|
are returned::
|
||||||
|
|
||||||
for post in Post.objects(tags='mongodb'):
|
for post in Post.objects(tags='mongodb'):
|
||||||
print(post.title)
|
print post.title
|
||||||
|
|
||||||
There are also methods available on :class:`~mongoengine.queryset.QuerySet`
|
There are also methods available on :class:`~mongoengine.queryset.QuerySet`
|
||||||
objects that allow different results to be returned, for example, calling
|
objects that allow different results to be returned, for example, calling
|
||||||
@ -292,11 +270,5 @@ the first matched by the query you provide. Aggregation functions may also be
|
|||||||
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
||||||
|
|
||||||
num_posts = Post.objects(tags='mongodb').count()
|
num_posts = Post.objects(tags='mongodb').count()
|
||||||
print('Found {} posts with tag "mongodb"'.format(num_posts))
|
print 'Found %d posts with tag "mongodb"' % num_posts
|
||||||
|
|
||||||
Learning more about MongoEngine
|
|
||||||
-------------------------------
|
|
||||||
|
|
||||||
If you got this far you've made a great start, so well done! The next step on
|
|
||||||
your MongoEngine journey is the `full user guide <guide/index.html>`_, where
|
|
||||||
you can learn in-depth about how to use MongoEngine and MongoDB.
|
|
||||||
|
480
docs/upgrade.rst
480
docs/upgrade.rst
@ -1,449 +1,12 @@
|
|||||||
#########
|
=========
|
||||||
Upgrading
|
Upgrading
|
||||||
#########
|
|
||||||
|
|
||||||
Development
|
|
||||||
***********
|
|
||||||
(Fill this out whenever you introduce breaking changes to MongoEngine)
|
|
||||||
|
|
||||||
URLField's constructor no longer takes `verify_exists`
|
|
||||||
|
|
||||||
0.15.0
|
|
||||||
******
|
|
||||||
|
|
||||||
0.14.0
|
|
||||||
******
|
|
||||||
This release includes a few bug fixes and a significant code cleanup. The most
|
|
||||||
important change is that `QuerySet.as_pymongo` no longer supports a
|
|
||||||
`coerce_types` mode. If you used it in the past, a) please let us know of your
|
|
||||||
use case, b) you'll need to override `as_pymongo` to get the desired outcome.
|
|
||||||
|
|
||||||
This release also makes the EmbeddedDocument not hashable by default. If you
|
|
||||||
use embedded documents in sets or dictionaries, you might have to override
|
|
||||||
`__hash__` and implement a hashing logic specific to your use case. See #1528
|
|
||||||
for the reason behind this change.
|
|
||||||
|
|
||||||
0.13.0
|
|
||||||
******
|
|
||||||
This release adds Unicode support to the `EmailField` and changes its
|
|
||||||
structure significantly. Previously, email addresses containing Unicode
|
|
||||||
characters didn't work at all. Starting with v0.13.0, domains with Unicode
|
|
||||||
characters are supported out of the box, meaning some emails that previously
|
|
||||||
didn't pass validation now do. Make sure the rest of your application can
|
|
||||||
accept such email addresses. Additionally, if you subclassed the `EmailField`
|
|
||||||
in your application and overrode `EmailField.EMAIL_REGEX`, you will have to
|
|
||||||
adjust your code to override `EmailField.USER_REGEX`, `EmailField.DOMAIN_REGEX`,
|
|
||||||
and potentially `EmailField.UTF8_USER_REGEX`.
|
|
||||||
|
|
||||||
0.12.0
|
|
||||||
******
|
|
||||||
This release includes various fixes for the `BaseQuerySet` methods and how they
|
|
||||||
are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size
|
|
||||||
to an already-existing queryset wouldn't modify the underlying PyMongo cursor.
|
|
||||||
This has been fixed now, so you'll need to make sure that your code didn't rely
|
|
||||||
on the broken implementation.
|
|
||||||
|
|
||||||
Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private
|
|
||||||
`_clone_into`. If you directly used that method in your code, you'll need to
|
|
||||||
rename its occurrences.
|
|
||||||
|
|
||||||
0.11.0
|
|
||||||
******
|
|
||||||
This release includes a major rehaul of MongoEngine's code quality and
|
|
||||||
introduces a few breaking changes. It also touches many different parts of
|
|
||||||
the package and although all the changes have been tested and scrutinized,
|
|
||||||
you're encouraged to thoroughly test the upgrade.
|
|
||||||
|
|
||||||
First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`.
|
|
||||||
If you import or catch this exception, you'll need to rename it in your code.
|
|
||||||
|
|
||||||
Second breaking change drops Python v2.6 support. If you run MongoEngine on
|
|
||||||
that Python version, you'll need to upgrade it first.
|
|
||||||
|
|
||||||
Third breaking change drops an old backward compatibility measure where
|
|
||||||
`from mongoengine.base import ErrorClass` would work on top of
|
|
||||||
`from mongoengine.errors import ErrorClass` (where `ErrorClass` is e.g.
|
|
||||||
`ValidationError`). If you import any exceptions from `mongoengine.base`,
|
|
||||||
change it to `mongoengine.errors`.
|
|
||||||
|
|
||||||
0.10.8
|
|
||||||
******
|
|
||||||
This version fixed an issue where specifying a MongoDB URI host would override
|
|
||||||
more information than it should. These changes are minor, but they still
|
|
||||||
subtly modify the connection logic and thus you're encouraged to test your
|
|
||||||
MongoDB connection before shipping v0.10.8 in production.
|
|
||||||
|
|
||||||
0.10.7
|
|
||||||
******
|
|
||||||
|
|
||||||
`QuerySet.aggregate_sum` and `QuerySet.aggregate_average` are dropped. Use
|
|
||||||
`QuerySet.sum` and `QuerySet.average` instead which use the aggreation framework
|
|
||||||
by default from now on.
|
|
||||||
|
|
||||||
0.9.0
|
|
||||||
*****
|
|
||||||
|
|
||||||
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
|
|
||||||
|
|
||||||
python -m pip uninstall pymongo
|
|
||||||
python -m pip uninstall mongoengine
|
|
||||||
python -m pip install pymongo==2.8
|
|
||||||
python -m pip install mongoengine
|
|
||||||
|
|
||||||
0.8.7
|
|
||||||
*****
|
|
||||||
|
|
||||||
Calling reload on deleted / nonexistent documents now raises a DoesNotExist
|
|
||||||
exception.
|
|
||||||
|
|
||||||
|
|
||||||
0.8.2 to 0.8.3
|
|
||||||
**************
|
|
||||||
|
|
||||||
Minor change that may impact users:
|
|
||||||
|
|
||||||
DynamicDocument fields are now stored in creation order after any declared
|
|
||||||
fields. Previously they were stored alphabetically.
|
|
||||||
|
|
||||||
|
|
||||||
0.7 to 0.8
|
|
||||||
**********
|
|
||||||
|
|
||||||
There have been numerous backwards breaking changes in 0.8. The reasons for
|
|
||||||
these are to ensure that MongoEngine has sane defaults going forward and that it
|
|
||||||
performs the best it can out of the box. Where possible there have been
|
|
||||||
FutureWarnings to help get you ready for the change, but that hasn't been
|
|
||||||
possible for the whole of the release.
|
|
||||||
|
|
||||||
.. warning:: Breaking changes - test upgrading on a test system before putting
|
|
||||||
live. There maybe multiple manual steps in migrating and these are best honed
|
|
||||||
on a staging / test system.
|
|
||||||
|
|
||||||
Python and PyMongo
|
|
||||||
==================
|
|
||||||
|
|
||||||
MongoEngine requires python 2.6 (or above) and pymongo 2.5 (or above)
|
|
||||||
|
|
||||||
Data Model
|
|
||||||
==========
|
|
||||||
|
|
||||||
Inheritance
|
|
||||||
-----------
|
|
||||||
|
|
||||||
The inheritance model has changed, we no longer need to store an array of
|
|
||||||
:attr:`types` with the model we can just use the classname in :attr:`_cls`.
|
|
||||||
This means that you will have to update your indexes for each of your
|
|
||||||
inherited classes like so: ::
|
|
||||||
|
|
||||||
# 1. Declaration of the class
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'indexes': ['name']
|
|
||||||
}
|
|
||||||
|
|
||||||
# 2. Remove _types
|
|
||||||
collection = Animal._get_collection()
|
|
||||||
collection.update({}, {"$unset": {"_types": 1}}, multi=True)
|
|
||||||
|
|
||||||
# 3. Confirm extra data is removed
|
|
||||||
count = collection.find({'_types': {"$exists": True}}).count()
|
|
||||||
assert count == 0
|
|
||||||
|
|
||||||
# 4. Remove indexes
|
|
||||||
info = collection.index_information()
|
|
||||||
indexes_to_drop = [key for key, value in info.items()
|
|
||||||
if '_types' in dict(value['key'])]
|
|
||||||
for index in indexes_to_drop:
|
|
||||||
collection.drop_index(index)
|
|
||||||
|
|
||||||
# 5. Recreate indexes
|
|
||||||
Animal.ensure_indexes()
|
|
||||||
|
|
||||||
|
|
||||||
Document Definition
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
The default for inheritance has changed - it is now off by default and
|
|
||||||
:attr:`_cls` will not be stored automatically with the class. So if you extend
|
|
||||||
your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments`
|
|
||||||
you will need to declare :attr:`allow_inheritance` in the meta data like so: ::
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
Previously, if you had data in the database that wasn't defined in the Document
|
|
||||||
definition, it would set it as an attribute on the document. This is no longer
|
|
||||||
the case and the data is set only in the ``document._data`` dictionary: ::
|
|
||||||
|
|
||||||
>>> from mongoengine import *
|
|
||||||
>>> class Animal(Document):
|
|
||||||
... name = StringField()
|
|
||||||
...
|
|
||||||
>>> cat = Animal(name="kit", size="small")
|
|
||||||
|
|
||||||
# 0.7
|
|
||||||
>>> cat.size
|
|
||||||
u'small'
|
|
||||||
|
|
||||||
# 0.8
|
|
||||||
>>> cat.size
|
|
||||||
Traceback (most recent call last):
|
|
||||||
File "<stdin>", line 1, in <module>
|
|
||||||
AttributeError: 'Animal' object has no attribute 'size'
|
|
||||||
|
|
||||||
The Document class has introduced a reserved function `clean()`, which will be
|
|
||||||
called before saving the document. If your document class happens to have a method
|
|
||||||
with the same name, please try to rename it.
|
|
||||||
|
|
||||||
def clean(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
ReferenceField
|
|
||||||
--------------
|
|
||||||
|
|
||||||
ReferenceFields now store ObjectIds by default - this is more efficient than
|
|
||||||
DBRefs as we already know what Document types they reference::
|
|
||||||
|
|
||||||
# Old code
|
|
||||||
class Animal(Document):
|
|
||||||
name = ReferenceField('self')
|
|
||||||
|
|
||||||
# New code to keep dbrefs
|
|
||||||
class Animal(Document):
|
|
||||||
name = ReferenceField('self', dbref=True)
|
|
||||||
|
|
||||||
To migrate all the references you need to touch each object and mark it as dirty
|
|
||||||
eg::
|
|
||||||
|
|
||||||
# Doc definition
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
parent = ReferenceField('self')
|
|
||||||
friends = ListField(ReferenceField('self'))
|
|
||||||
|
|
||||||
# Mark all ReferenceFields as dirty and save
|
|
||||||
for p in Person.objects:
|
|
||||||
p._mark_as_changed('parent')
|
|
||||||
p._mark_as_changed('friends')
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
`An example test migration for ReferenceFields is available on github
|
|
||||||
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/refrencefield_dbref_to_object_id.py>`_.
|
|
||||||
|
|
||||||
.. Note:: Internally mongoengine handles ReferenceFields the same, so they are
|
|
||||||
converted to DBRef on loading and ObjectIds or DBRefs depending on settings
|
|
||||||
on storage.
|
|
||||||
|
|
||||||
UUIDField
|
|
||||||
---------
|
|
||||||
|
|
||||||
UUIDFields now default to storing binary values::
|
|
||||||
|
|
||||||
# Old code
|
|
||||||
class Animal(Document):
|
|
||||||
uuid = UUIDField()
|
|
||||||
|
|
||||||
# New code
|
|
||||||
class Animal(Document):
|
|
||||||
uuid = UUIDField(binary=False)
|
|
||||||
|
|
||||||
To migrate all the uuids you need to touch each object and mark it as dirty
|
|
||||||
eg::
|
|
||||||
|
|
||||||
# Doc definition
|
|
||||||
class Animal(Document):
|
|
||||||
uuid = UUIDField()
|
|
||||||
|
|
||||||
# Mark all UUIDFields as dirty and save
|
|
||||||
for a in Animal.objects:
|
|
||||||
a._mark_as_changed('uuid')
|
|
||||||
a.save()
|
|
||||||
|
|
||||||
`An example test migration for UUIDFields is available on github
|
|
||||||
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/uuidfield_to_binary.py>`_.
|
|
||||||
|
|
||||||
DecimalField
|
|
||||||
------------
|
|
||||||
|
|
||||||
DecimalFields now store floats - previously it was storing strings and that
|
|
||||||
made it impossible to do comparisons when querying correctly.::
|
|
||||||
|
|
||||||
# Old code
|
|
||||||
class Person(Document):
|
|
||||||
balance = DecimalField()
|
|
||||||
|
|
||||||
# New code
|
|
||||||
class Person(Document):
|
|
||||||
balance = DecimalField(force_string=True)
|
|
||||||
|
|
||||||
To migrate all the DecimalFields you need to touch each object and mark it as dirty
|
|
||||||
eg::
|
|
||||||
|
|
||||||
# Doc definition
|
|
||||||
class Person(Document):
|
|
||||||
balance = DecimalField()
|
|
||||||
|
|
||||||
# Mark all DecimalField's as dirty and save
|
|
||||||
for p in Person.objects:
|
|
||||||
p._mark_as_changed('balance')
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
.. note:: DecimalFields have also been improved with the addition of precision
|
|
||||||
and rounding. See :class:`~mongoengine.fields.DecimalField` for more information.
|
|
||||||
|
|
||||||
`An example test migration for DecimalFields is available on github
|
|
||||||
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/decimalfield_as_float.py>`_.
|
|
||||||
|
|
||||||
Cascading Saves
|
|
||||||
---------------
|
|
||||||
To improve performance document saves will no longer automatically cascade.
|
|
||||||
Any changes to a Document's references will either have to be saved manually or
|
|
||||||
you will have to explicitly tell it to cascade on save::
|
|
||||||
|
|
||||||
# At the class level:
|
|
||||||
class Person(Document):
|
|
||||||
meta = {'cascade': True}
|
|
||||||
|
|
||||||
# Or on save:
|
|
||||||
my_document.save(cascade=True)
|
|
||||||
|
|
||||||
Storage
|
|
||||||
-------
|
|
||||||
|
|
||||||
Document and Embedded Documents are now serialized based on declared field order.
|
|
||||||
Previously, the data was passed to mongodb as a dictionary and which meant that
|
|
||||||
order wasn't guaranteed - so things like ``$addToSet`` operations on
|
|
||||||
:class:`~mongoengine.EmbeddedDocument` could potentially fail in unexpected
|
|
||||||
ways.
|
|
||||||
|
|
||||||
If this impacts you, you may want to rewrite the objects using the
|
|
||||||
``doc.mark_as_dirty('field')`` pattern described above. If you are using a
|
|
||||||
compound primary key then you will need to ensure the order is fixed and match
|
|
||||||
your EmbeddedDocument to that order.
|
|
||||||
|
|
||||||
Querysets
|
|
||||||
=========
|
=========
|
||||||
|
|
||||||
Attack of the clones
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
Querysets now return clones and should no longer be considered editable in
|
|
||||||
place. This brings us in line with how Django's querysets work and removes a
|
|
||||||
long running gotcha. If you edit your querysets inplace you will have to
|
|
||||||
update your code like so: ::
|
|
||||||
|
|
||||||
# Old code:
|
|
||||||
mammals = Animal.objects(type="mammal")
|
|
||||||
mammals.filter(order="Carnivora") # Returns a cloned queryset that isn't assigned to anything - so this will break in 0.8
|
|
||||||
[m for m in mammals] # This will return all mammals in 0.8 as the 2nd filter returned a new queryset
|
|
||||||
|
|
||||||
# Update example a) assign queryset after a change:
|
|
||||||
mammals = Animal.objects(type="mammal")
|
|
||||||
carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so filter can be applied
|
|
||||||
[m for m in carnivores] # This will return all carnivores
|
|
||||||
|
|
||||||
# Update example b) chain the queryset:
|
|
||||||
mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals
|
|
||||||
[m for m in mammals] # This will return all carnivores
|
|
||||||
|
|
||||||
Len iterates the queryset
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
If you ever did `len(queryset)` it previously did a `count()` under the covers,
|
|
||||||
this caused some unusual issues. As `len(queryset)` is most often used by
|
|
||||||
`list(queryset)` we now cache the queryset results and use that for the length.
|
|
||||||
|
|
||||||
This isn't as performant as a `count()` and if you aren't iterating the
|
|
||||||
queryset you should upgrade to use count::
|
|
||||||
|
|
||||||
# Old code
|
|
||||||
len(Animal.objects(type="mammal"))
|
|
||||||
|
|
||||||
# New code
|
|
||||||
Animal.objects(type="mammal").count()
|
|
||||||
|
|
||||||
|
|
||||||
.only() now inline with .exclude()
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
The behaviour of `.only()` was highly ambiguous, now it works in mirror fashion
|
|
||||||
to `.exclude()`. Chaining `.only()` calls will increase the fields required::
|
|
||||||
|
|
||||||
# Old code
|
|
||||||
Animal.objects().only(['type', 'name']).only('name', 'order') # Would have returned just `name`
|
|
||||||
|
|
||||||
# New code
|
|
||||||
Animal.objects().only('name')
|
|
||||||
|
|
||||||
# Note:
|
|
||||||
Animal.objects().only(['name']).only('order') # Now returns `name` *and* `order`
|
|
||||||
|
|
||||||
|
|
||||||
Client
|
|
||||||
======
|
|
||||||
PyMongo 2.4 came with a new connection client; MongoClient_ and started the
|
|
||||||
depreciation of the old :class:`~pymongo.connection.Connection`. MongoEngine
|
|
||||||
now uses the latest `MongoClient` for connections. By default operations were
|
|
||||||
`safe` but if you turned them off or used the connection directly this will
|
|
||||||
impact your queries.
|
|
||||||
|
|
||||||
Querysets
|
|
||||||
---------
|
|
||||||
|
|
||||||
Safe
|
|
||||||
^^^^
|
|
||||||
|
|
||||||
`safe` has been depreciated in the new MongoClient connection. Please use
|
|
||||||
`write_concern` instead. As `safe` always defaulted as `True` normally no code
|
|
||||||
change is required. To disable confirmation of the write just pass `{"w": 0}`
|
|
||||||
eg: ::
|
|
||||||
|
|
||||||
# Old
|
|
||||||
Animal(name="Dinasour").save(safe=False)
|
|
||||||
|
|
||||||
# new code:
|
|
||||||
Animal(name="Dinasour").save(write_concern={"w": 0})
|
|
||||||
|
|
||||||
Write Concern
|
|
||||||
^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
`write_options` has been replaced with `write_concern` to bring it inline with
|
|
||||||
pymongo. To upgrade simply rename any instances where you used the `write_option`
|
|
||||||
keyword to `write_concern` like so::
|
|
||||||
|
|
||||||
# Old code:
|
|
||||||
Animal(name="Dinasour").save(write_options={"w": 2})
|
|
||||||
|
|
||||||
# new code:
|
|
||||||
Animal(name="Dinasour").save(write_concern={"w": 2})
|
|
||||||
|
|
||||||
|
|
||||||
Indexes
|
|
||||||
=======
|
|
||||||
|
|
||||||
Index methods are no longer tied to querysets but rather to the document class.
|
|
||||||
Although `QuerySet._ensure_indexes` and `QuerySet.ensure_index` still exist.
|
|
||||||
They should be replaced with :func:`~mongoengine.Document.ensure_indexes` /
|
|
||||||
:func:`~mongoengine.Document.ensure_index`.
|
|
||||||
|
|
||||||
SequenceFields
|
|
||||||
==============
|
|
||||||
|
|
||||||
:class:`~mongoengine.fields.SequenceField` now inherits from `BaseField` to
|
|
||||||
allow flexible storage of the calculated value. As such MIN and MAX settings
|
|
||||||
are no longer handled.
|
|
||||||
|
|
||||||
.. _MongoClient: http://blog.mongodb.org/post/36666163412/introducing-mongoclient
|
|
||||||
|
|
||||||
0.6 to 0.7
|
0.6 to 0.7
|
||||||
**********
|
==========
|
||||||
|
|
||||||
Cascade saves
|
Cascade saves
|
||||||
=============
|
-------------
|
||||||
|
|
||||||
Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set
|
Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set
|
||||||
to True. This is because in 0.8 it will default to False. If you require
|
to True. This is because in 0.8 it will default to False. If you require
|
||||||
@ -457,11 +20,11 @@ via `save` eg ::
|
|||||||
# Or in code:
|
# Or in code:
|
||||||
my_document.save(cascade=True)
|
my_document.save(cascade=True)
|
||||||
|
|
||||||
.. note::
|
.. note ::
|
||||||
Remember: cascading saves **do not** cascade through lists.
|
Remember: cascading saves **do not** cascade through lists.
|
||||||
|
|
||||||
ReferenceFields
|
ReferenceFields
|
||||||
===============
|
---------------
|
||||||
|
|
||||||
ReferenceFields now can store references as ObjectId strings instead of DBRefs.
|
ReferenceFields now can store references as ObjectId strings instead of DBRefs.
|
||||||
This will become the default in 0.8 and if `dbref` is not set a `FutureWarning`
|
This will become the default in 0.8 and if `dbref` is not set a `FutureWarning`
|
||||||
@ -490,7 +53,7 @@ migrate ::
|
|||||||
|
|
||||||
|
|
||||||
item_frequencies
|
item_frequencies
|
||||||
================
|
----------------
|
||||||
|
|
||||||
In the 0.6 series we added support for null / zero / false values in
|
In the 0.6 series we added support for null / zero / false values in
|
||||||
item_frequencies. A side effect was to return keys in the value they are
|
item_frequencies. A side effect was to return keys in the value they are
|
||||||
@ -498,15 +61,8 @@ stored in rather than as string representations. Your code may need to be
|
|||||||
updated to handle native types rather than strings keys for the results of
|
updated to handle native types rather than strings keys for the results of
|
||||||
item frequency queries.
|
item frequency queries.
|
||||||
|
|
||||||
BinaryFields
|
|
||||||
============
|
|
||||||
|
|
||||||
Binary fields have been updated so that they are native binary types. If you
|
|
||||||
previously were doing `str` comparisons with binary field values you will have
|
|
||||||
to update and wrap the value in a `str`.
|
|
||||||
|
|
||||||
0.5 to 0.6
|
0.5 to 0.6
|
||||||
**********
|
==========
|
||||||
|
|
||||||
Embedded Documents - if you had a `pk` field you will have to rename it from
|
Embedded Documents - if you had a `pk` field you will have to rename it from
|
||||||
`_id` to `pk` as pk is no longer a property of Embedded Documents.
|
`_id` to `pk` as pk is no longer a property of Embedded Documents.
|
||||||
@ -521,26 +77,26 @@ Document.objects.with_id - now raises an InvalidQueryError if used with a
|
|||||||
filter.
|
filter.
|
||||||
|
|
||||||
FutureWarning - A future warning has been added to all inherited classes that
|
FutureWarning - A future warning has been added to all inherited classes that
|
||||||
don't define :attr:`allow_inheritance` in their meta.
|
don't define `allow_inheritance` in their meta.
|
||||||
|
|
||||||
You may need to update pyMongo to 2.0 for use with Sharding.
|
You may need to update pyMongo to 2.0 for use with Sharding.
|
||||||
|
|
||||||
0.4 to 0.5
|
0.4 to 0.5
|
||||||
**********
|
===========
|
||||||
|
|
||||||
There have been the following backwards incompatibilities from 0.4 to 0.5. The
|
There have been the following backwards incompatibilities from 0.4 to 0.5. The
|
||||||
main areas of changed are: choices in fields, map_reduce and collection names.
|
main areas of changed are: choices in fields, map_reduce and collection names.
|
||||||
|
|
||||||
Choice options:
|
Choice options:
|
||||||
===============
|
---------------
|
||||||
|
|
||||||
Are now expected to be an iterable of tuples, with the first element in each
|
Are now expected to be an iterable of tuples, with the first element in each
|
||||||
tuple being the actual value to be stored. The second element is the
|
tuple being the actual value to be stored. The second element is the
|
||||||
human-readable name for the option.
|
human-readable name for the option.
|
||||||
|
|
||||||
|
|
||||||
PyMongo / MongoDB
|
PyMongo / MongoDB
|
||||||
=================
|
-----------------
|
||||||
|
|
||||||
map reduce now requires pymongo 1.11+- The pymongo `merge_output` and
|
map reduce now requires pymongo 1.11+- The pymongo `merge_output` and
|
||||||
`reduce_output` parameters, have been depreciated.
|
`reduce_output` parameters, have been depreciated.
|
||||||
@ -554,10 +110,10 @@ such the following have been changed:
|
|||||||
|
|
||||||
|
|
||||||
Default collection naming
|
Default collection naming
|
||||||
=========================
|
-------------------------
|
||||||
|
|
||||||
Previously it was just lowercase, it's now much more pythonic and readable as
|
Previously it was just lowercase, its now much more pythonic and readable as
|
||||||
it's lowercase and underscores, previously ::
|
its lowercase and underscores, previously ::
|
||||||
|
|
||||||
class MyAceDocument(Document):
|
class MyAceDocument(Document):
|
||||||
pass
|
pass
|
||||||
@ -620,9 +176,3 @@ Alternatively, you can rename your collections eg ::
|
|||||||
else:
|
else:
|
||||||
print "Upgraded collection names"
|
print "Upgraded collection names"
|
||||||
|
|
||||||
|
|
||||||
mongodb 1.8 > 2.0 +
|
|
||||||
===================
|
|
||||||
|
|
||||||
It's been reported that indexes may need to be recreated to the newer version of indexes.
|
|
||||||
To do this drop indexes and call ``ensure_indexes`` on each model.
|
|
||||||
|
@ -1,43 +1,23 @@
|
|||||||
# Import submodules so that we can expose their __all__
|
import document
|
||||||
from mongoengine import (
|
from document import *
|
||||||
connection,
|
import fields
|
||||||
document,
|
from fields import *
|
||||||
errors,
|
import connection
|
||||||
fields,
|
from connection import *
|
||||||
queryset,
|
import queryset
|
||||||
signals,
|
from queryset import *
|
||||||
)
|
import signals
|
||||||
|
from signals import *
|
||||||
|
|
||||||
# Import everything from each submodule so that it can be accessed via
|
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||||
# mongoengine, e.g. instead of `from mongoengine.connection import connect`,
|
queryset.__all__ + signals.__all__)
|
||||||
# users can simply use `from mongoengine import connect`, or even
|
|
||||||
# `from mongoengine import *` and then `connect('testdb')`.
|
|
||||||
from mongoengine.connection import * # noqa: F401
|
|
||||||
from mongoengine.document import * # noqa: F401
|
|
||||||
from mongoengine.errors import * # noqa: F401
|
|
||||||
from mongoengine.fields import * # noqa: F401
|
|
||||||
from mongoengine.queryset import * # noqa: F401
|
|
||||||
from mongoengine.signals import * # noqa: F401
|
|
||||||
|
|
||||||
__all__ = (
|
VERSION = (0, '7rc1')
|
||||||
list(document.__all__)
|
|
||||||
+ list(fields.__all__)
|
|
||||||
+ list(connection.__all__)
|
|
||||||
+ list(queryset.__all__)
|
|
||||||
+ list(signals.__all__)
|
|
||||||
+ list(errors.__all__)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
VERSION = (0, 23, 1)
|
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
"""Return the VERSION as a string.
|
if isinstance(VERSION[-1], basestring):
|
||||||
|
return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
|
||||||
For example, if `VERSION == (0, 10, 7)`, return '0.10.7'.
|
return '.'.join(map(str, VERSION))
|
||||||
"""
|
|
||||||
return ".".join(map(str, VERSION))
|
|
||||||
|
|
||||||
|
|
||||||
__version__ = get_version()
|
__version__ = get_version()
|
||||||
|
1490
mongoengine/base.py
Normal file
1490
mongoengine/base.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,33 +0,0 @@
|
|||||||
# Base module is split into several files for convenience. Files inside of
|
|
||||||
# this module should import from a specific submodule (e.g.
|
|
||||||
# `from mongoengine.base.document import BaseDocument`), but all of the
|
|
||||||
# other modules should import directly from the top-level module (e.g.
|
|
||||||
# `from mongoengine.base import BaseDocument`). This approach is cleaner and
|
|
||||||
# also helps with cyclical import errors.
|
|
||||||
from mongoengine.base.common import *
|
|
||||||
from mongoengine.base.datastructures import *
|
|
||||||
from mongoengine.base.document import *
|
|
||||||
from mongoengine.base.fields import *
|
|
||||||
from mongoengine.base.metaclasses import *
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
# common
|
|
||||||
"UPDATE_OPERATORS",
|
|
||||||
"_document_registry",
|
|
||||||
"get_document",
|
|
||||||
# datastructures
|
|
||||||
"BaseDict",
|
|
||||||
"BaseList",
|
|
||||||
"EmbeddedDocumentList",
|
|
||||||
"LazyReference",
|
|
||||||
# document
|
|
||||||
"BaseDocument",
|
|
||||||
# fields
|
|
||||||
"BaseField",
|
|
||||||
"ComplexBaseField",
|
|
||||||
"ObjectIdField",
|
|
||||||
"GeoJsonBaseField",
|
|
||||||
# metaclasses
|
|
||||||
"DocumentMetaclass",
|
|
||||||
"TopLevelDocumentMetaclass",
|
|
||||||
)
|
|
@ -1,62 +0,0 @@
|
|||||||
from mongoengine.errors import NotRegistered
|
|
||||||
|
|
||||||
__all__ = ("UPDATE_OPERATORS", "get_document", "_document_registry")
|
|
||||||
|
|
||||||
|
|
||||||
UPDATE_OPERATORS = {
|
|
||||||
"set",
|
|
||||||
"unset",
|
|
||||||
"inc",
|
|
||||||
"dec",
|
|
||||||
"mul",
|
|
||||||
"pop",
|
|
||||||
"push",
|
|
||||||
"push_all",
|
|
||||||
"pull",
|
|
||||||
"pull_all",
|
|
||||||
"add_to_set",
|
|
||||||
"set_on_insert",
|
|
||||||
"min",
|
|
||||||
"max",
|
|
||||||
"rename",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
_document_registry = {}
|
|
||||||
|
|
||||||
|
|
||||||
def get_document(name):
|
|
||||||
"""Get a registered Document class by name."""
|
|
||||||
doc = _document_registry.get(name, None)
|
|
||||||
if not doc:
|
|
||||||
# Possible old style name
|
|
||||||
single_end = name.split(".")[-1]
|
|
||||||
compound_end = ".%s" % single_end
|
|
||||||
possible_match = [
|
|
||||||
k for k in _document_registry if k.endswith(compound_end) or k == single_end
|
|
||||||
]
|
|
||||||
if len(possible_match) == 1:
|
|
||||||
doc = _document_registry.get(possible_match.pop(), None)
|
|
||||||
if not doc:
|
|
||||||
raise NotRegistered(
|
|
||||||
"""
|
|
||||||
`%s` has not been registered in the document registry.
|
|
||||||
Importing the document class automatically registers it, has it
|
|
||||||
been imported?
|
|
||||||
""".strip()
|
|
||||||
% name
|
|
||||||
)
|
|
||||||
return doc
|
|
||||||
|
|
||||||
|
|
||||||
def _get_documents_by_db(connection_alias, default_connection_alias):
|
|
||||||
"""Get all registered Documents class attached to a given database"""
|
|
||||||
|
|
||||||
def get_doc_alias(doc_cls):
|
|
||||||
return doc_cls._meta.get("db_alias", default_connection_alias)
|
|
||||||
|
|
||||||
return [
|
|
||||||
doc_cls
|
|
||||||
for doc_cls in _document_registry.values()
|
|
||||||
if get_doc_alias(doc_cls) == connection_alias
|
|
||||||
]
|
|
@ -1,473 +0,0 @@
|
|||||||
import weakref
|
|
||||||
|
|
||||||
from bson import DBRef
|
|
||||||
|
|
||||||
from mongoengine.common import _import_class
|
|
||||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"BaseDict",
|
|
||||||
"StrictDict",
|
|
||||||
"BaseList",
|
|
||||||
"EmbeddedDocumentList",
|
|
||||||
"LazyReference",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def mark_as_changed_wrapper(parent_method):
|
|
||||||
"""Decorator that ensures _mark_as_changed method gets called."""
|
|
||||||
|
|
||||||
def wrapper(self, *args, **kwargs):
|
|
||||||
# Can't use super() in the decorator.
|
|
||||||
result = parent_method(self, *args, **kwargs)
|
|
||||||
self._mark_as_changed()
|
|
||||||
return result
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
def mark_key_as_changed_wrapper(parent_method):
|
|
||||||
"""Decorator that ensures _mark_as_changed method gets called with the key argument"""
|
|
||||||
|
|
||||||
def wrapper(self, key, *args, **kwargs):
|
|
||||||
# Can't use super() in the decorator.
|
|
||||||
result = parent_method(self, key, *args, **kwargs)
|
|
||||||
self._mark_as_changed(key)
|
|
||||||
return result
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
class BaseDict(dict):
|
|
||||||
"""A special dict so we can watch any changes."""
|
|
||||||
|
|
||||||
_dereferenced = False
|
|
||||||
_instance = None
|
|
||||||
_name = None
|
|
||||||
|
|
||||||
def __init__(self, dict_items, instance, name):
|
|
||||||
BaseDocument = _import_class("BaseDocument")
|
|
||||||
|
|
||||||
if isinstance(instance, BaseDocument):
|
|
||||||
self._instance = weakref.proxy(instance)
|
|
||||||
self._name = name
|
|
||||||
super().__init__(dict_items)
|
|
||||||
|
|
||||||
def get(self, key, default=None):
|
|
||||||
# get does not use __getitem__ by default so we must override it as well
|
|
||||||
try:
|
|
||||||
return self.__getitem__(key)
|
|
||||||
except KeyError:
|
|
||||||
return default
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
value = super().__getitem__(key)
|
|
||||||
|
|
||||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
|
||||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
|
||||||
value._instance = self._instance
|
|
||||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
|
||||||
value = BaseDict(value, None, f"{self._name}.{key}")
|
|
||||||
super().__setitem__(key, value)
|
|
||||||
value._instance = self._instance
|
|
||||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
|
||||||
value = BaseList(value, None, f"{self._name}.{key}")
|
|
||||||
super().__setitem__(key, value)
|
|
||||||
value._instance = self._instance
|
|
||||||
return value
|
|
||||||
|
|
||||||
def __getstate__(self):
|
|
||||||
self.instance = None
|
|
||||||
self._dereferenced = False
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __setstate__(self, state):
|
|
||||||
self = state
|
|
||||||
return self
|
|
||||||
|
|
||||||
__setitem__ = mark_key_as_changed_wrapper(dict.__setitem__)
|
|
||||||
__delattr__ = mark_key_as_changed_wrapper(dict.__delattr__)
|
|
||||||
__delitem__ = mark_key_as_changed_wrapper(dict.__delitem__)
|
|
||||||
pop = mark_as_changed_wrapper(dict.pop)
|
|
||||||
clear = mark_as_changed_wrapper(dict.clear)
|
|
||||||
update = mark_as_changed_wrapper(dict.update)
|
|
||||||
popitem = mark_as_changed_wrapper(dict.popitem)
|
|
||||||
setdefault = mark_as_changed_wrapper(dict.setdefault)
|
|
||||||
|
|
||||||
def _mark_as_changed(self, key=None):
|
|
||||||
if hasattr(self._instance, "_mark_as_changed"):
|
|
||||||
if key:
|
|
||||||
self._instance._mark_as_changed(f"{self._name}.{key}")
|
|
||||||
else:
|
|
||||||
self._instance._mark_as_changed(self._name)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseList(list):
|
|
||||||
"""A special list so we can watch any changes."""
|
|
||||||
|
|
||||||
_dereferenced = False
|
|
||||||
_instance = None
|
|
||||||
_name = None
|
|
||||||
|
|
||||||
def __init__(self, list_items, instance, name):
|
|
||||||
BaseDocument = _import_class("BaseDocument")
|
|
||||||
|
|
||||||
if isinstance(instance, BaseDocument):
|
|
||||||
self._instance = weakref.proxy(instance)
|
|
||||||
self._name = name
|
|
||||||
super().__init__(list_items)
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
# change index to positive value because MongoDB does not support negative one
|
|
||||||
if isinstance(key, int) and key < 0:
|
|
||||||
key = len(self) + key
|
|
||||||
value = super().__getitem__(key)
|
|
||||||
|
|
||||||
if isinstance(key, slice):
|
|
||||||
# When receiving a slice operator, we don't convert the structure and bind
|
|
||||||
# to parent's instance. This is buggy for now but would require more work to be handled properly
|
|
||||||
return value
|
|
||||||
|
|
||||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
|
||||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
|
||||||
value._instance = self._instance
|
|
||||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
|
||||||
# Replace dict by BaseDict
|
|
||||||
value = BaseDict(value, None, f"{self._name}.{key}")
|
|
||||||
super().__setitem__(key, value)
|
|
||||||
value._instance = self._instance
|
|
||||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
|
||||||
# Replace list by BaseList
|
|
||||||
value = BaseList(value, None, f"{self._name}.{key}")
|
|
||||||
super().__setitem__(key, value)
|
|
||||||
value._instance = self._instance
|
|
||||||
return value
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
yield from super().__iter__()
|
|
||||||
|
|
||||||
def __getstate__(self):
|
|
||||||
self.instance = None
|
|
||||||
self._dereferenced = False
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __setstate__(self, state):
|
|
||||||
self = state
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
changed_key = key
|
|
||||||
if isinstance(key, slice):
|
|
||||||
# In case of slice, we don't bother to identify the exact elements being updated
|
|
||||||
# instead, we simply marks the whole list as changed
|
|
||||||
changed_key = None
|
|
||||||
|
|
||||||
result = super().__setitem__(key, value)
|
|
||||||
self._mark_as_changed(changed_key)
|
|
||||||
return result
|
|
||||||
|
|
||||||
append = mark_as_changed_wrapper(list.append)
|
|
||||||
extend = mark_as_changed_wrapper(list.extend)
|
|
||||||
insert = mark_as_changed_wrapper(list.insert)
|
|
||||||
pop = mark_as_changed_wrapper(list.pop)
|
|
||||||
remove = mark_as_changed_wrapper(list.remove)
|
|
||||||
reverse = mark_as_changed_wrapper(list.reverse)
|
|
||||||
sort = mark_as_changed_wrapper(list.sort)
|
|
||||||
__delitem__ = mark_as_changed_wrapper(list.__delitem__)
|
|
||||||
__iadd__ = mark_as_changed_wrapper(list.__iadd__)
|
|
||||||
__imul__ = mark_as_changed_wrapper(list.__imul__)
|
|
||||||
|
|
||||||
def _mark_as_changed(self, key=None):
|
|
||||||
if hasattr(self._instance, "_mark_as_changed"):
|
|
||||||
if key is not None:
|
|
||||||
self._instance._mark_as_changed(f"{self._name}.{key % len(self)}")
|
|
||||||
else:
|
|
||||||
self._instance._mark_as_changed(self._name)
|
|
||||||
|
|
||||||
|
|
||||||
class EmbeddedDocumentList(BaseList):
|
|
||||||
def __init__(self, list_items, instance, name):
|
|
||||||
super().__init__(list_items, instance, name)
|
|
||||||
self._instance = instance
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __match_all(cls, embedded_doc, kwargs):
|
|
||||||
"""Return True if a given embedded doc matches all the filter
|
|
||||||
kwargs. If it doesn't return False.
|
|
||||||
"""
|
|
||||||
for key, expected_value in kwargs.items():
|
|
||||||
doc_val = getattr(embedded_doc, key)
|
|
||||||
if doc_val != expected_value and str(doc_val) != expected_value:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __only_matches(cls, embedded_docs, kwargs):
|
|
||||||
"""Return embedded docs that match the filter kwargs."""
|
|
||||||
if not kwargs:
|
|
||||||
return embedded_docs
|
|
||||||
return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)]
|
|
||||||
|
|
||||||
def filter(self, **kwargs):
|
|
||||||
"""
|
|
||||||
Filters the list by only including embedded documents with the
|
|
||||||
given keyword arguments.
|
|
||||||
|
|
||||||
This method only supports simple comparison (e.g. .filter(name='John Doe'))
|
|
||||||
and does not support operators like __gte, __lte, __icontains like queryset.filter does
|
|
||||||
|
|
||||||
:param kwargs: The keyword arguments corresponding to the fields to
|
|
||||||
filter on. *Multiple arguments are treated as if they are ANDed
|
|
||||||
together.*
|
|
||||||
:return: A new ``EmbeddedDocumentList`` containing the matching
|
|
||||||
embedded documents.
|
|
||||||
|
|
||||||
Raises ``AttributeError`` if a given keyword is not a valid field for
|
|
||||||
the embedded document class.
|
|
||||||
"""
|
|
||||||
values = self.__only_matches(self, kwargs)
|
|
||||||
return EmbeddedDocumentList(values, self._instance, self._name)
|
|
||||||
|
|
||||||
def exclude(self, **kwargs):
|
|
||||||
"""
|
|
||||||
Filters the list by excluding embedded documents with the given
|
|
||||||
keyword arguments.
|
|
||||||
|
|
||||||
:param kwargs: The keyword arguments corresponding to the fields to
|
|
||||||
exclude on. *Multiple arguments are treated as if they are ANDed
|
|
||||||
together.*
|
|
||||||
:return: A new ``EmbeddedDocumentList`` containing the non-matching
|
|
||||||
embedded documents.
|
|
||||||
|
|
||||||
Raises ``AttributeError`` if a given keyword is not a valid field for
|
|
||||||
the embedded document class.
|
|
||||||
"""
|
|
||||||
exclude = self.__only_matches(self, kwargs)
|
|
||||||
values = [item for item in self if item not in exclude]
|
|
||||||
return EmbeddedDocumentList(values, self._instance, self._name)
|
|
||||||
|
|
||||||
def count(self):
|
|
||||||
"""
|
|
||||||
The number of embedded documents in the list.
|
|
||||||
|
|
||||||
:return: The length of the list, equivalent to the result of ``len()``.
|
|
||||||
"""
|
|
||||||
return len(self)
|
|
||||||
|
|
||||||
def get(self, **kwargs):
|
|
||||||
"""
|
|
||||||
Retrieves an embedded document determined by the given keyword
|
|
||||||
arguments.
|
|
||||||
|
|
||||||
:param kwargs: The keyword arguments corresponding to the fields to
|
|
||||||
search on. *Multiple arguments are treated as if they are ANDed
|
|
||||||
together.*
|
|
||||||
:return: The embedded document matched by the given keyword arguments.
|
|
||||||
|
|
||||||
Raises ``DoesNotExist`` if the arguments used to query an embedded
|
|
||||||
document returns no results. ``MultipleObjectsReturned`` if more
|
|
||||||
than one result is returned.
|
|
||||||
"""
|
|
||||||
values = self.__only_matches(self, kwargs)
|
|
||||||
if len(values) == 0:
|
|
||||||
raise DoesNotExist("%s matching query does not exist." % self._name)
|
|
||||||
elif len(values) > 1:
|
|
||||||
raise MultipleObjectsReturned(
|
|
||||||
"%d items returned, instead of 1" % len(values)
|
|
||||||
)
|
|
||||||
|
|
||||||
return values[0]
|
|
||||||
|
|
||||||
def first(self):
|
|
||||||
"""Return the first embedded document in the list, or ``None``
|
|
||||||
if empty.
|
|
||||||
"""
|
|
||||||
if len(self) > 0:
|
|
||||||
return self[0]
|
|
||||||
|
|
||||||
def create(self, **values):
|
|
||||||
"""
|
|
||||||
Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
the instance of the EmbeddedDocument is not automatically saved to the database.
|
|
||||||
You still need to call .save() on the parent Document.
|
|
||||||
|
|
||||||
:param values: A dictionary of values for the embedded document.
|
|
||||||
:return: The new embedded document instance.
|
|
||||||
"""
|
|
||||||
name = self._name
|
|
||||||
EmbeddedClass = self._instance._fields[name].field.document_type_obj
|
|
||||||
self._instance[self._name].append(EmbeddedClass(**values))
|
|
||||||
|
|
||||||
return self._instance[self._name][-1]
|
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Saves the ancestor document.
|
|
||||||
|
|
||||||
:param args: Arguments passed up to the ancestor Document's save
|
|
||||||
method.
|
|
||||||
:param kwargs: Keyword arguments passed up to the ancestor Document's
|
|
||||||
save method.
|
|
||||||
"""
|
|
||||||
self._instance.save(*args, **kwargs)
|
|
||||||
|
|
||||||
def delete(self):
|
|
||||||
"""
|
|
||||||
Deletes the embedded documents from the database.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
The embedded document changes are not automatically saved
|
|
||||||
to the database after calling this method.
|
|
||||||
|
|
||||||
:return: The number of entries deleted.
|
|
||||||
"""
|
|
||||||
values = list(self)
|
|
||||||
for item in values:
|
|
||||||
self._instance[self._name].remove(item)
|
|
||||||
|
|
||||||
return len(values)
|
|
||||||
|
|
||||||
def update(self, **update):
|
|
||||||
"""
|
|
||||||
Updates the embedded documents with the given replacement values. This
|
|
||||||
function does not support mongoDB update operators such as ``inc__``.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
The embedded document changes are not automatically saved
|
|
||||||
to the database after calling this method.
|
|
||||||
|
|
||||||
:param update: A dictionary of update values to apply to each
|
|
||||||
embedded document.
|
|
||||||
:return: The number of entries updated.
|
|
||||||
"""
|
|
||||||
if len(update) == 0:
|
|
||||||
return 0
|
|
||||||
values = list(self)
|
|
||||||
for item in values:
|
|
||||||
for k, v in update.items():
|
|
||||||
setattr(item, k, v)
|
|
||||||
|
|
||||||
return len(values)
|
|
||||||
|
|
||||||
|
|
||||||
class StrictDict:
|
|
||||||
__slots__ = ()
|
|
||||||
_special_fields = {"get", "pop", "iteritems", "items", "keys", "create"}
|
|
||||||
_classes = {}
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
for k, v in kwargs.items():
|
|
||||||
setattr(self, k, v)
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
key = "_reserved_" + key if key in self._special_fields else key
|
|
||||||
try:
|
|
||||||
return getattr(self, key)
|
|
||||||
except AttributeError:
|
|
||||||
raise KeyError(key)
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
key = "_reserved_" + key if key in self._special_fields else key
|
|
||||||
return setattr(self, key, value)
|
|
||||||
|
|
||||||
def __contains__(self, key):
|
|
||||||
return hasattr(self, key)
|
|
||||||
|
|
||||||
def get(self, key, default=None):
|
|
||||||
try:
|
|
||||||
return self[key]
|
|
||||||
except KeyError:
|
|
||||||
return default
|
|
||||||
|
|
||||||
def pop(self, key, default=None):
|
|
||||||
v = self.get(key, default)
|
|
||||||
try:
|
|
||||||
delattr(self, key)
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
return v
|
|
||||||
|
|
||||||
def iteritems(self):
|
|
||||||
for key in self:
|
|
||||||
yield key, self[key]
|
|
||||||
|
|
||||||
def items(self):
|
|
||||||
return [(k, self[k]) for k in iter(self)]
|
|
||||||
|
|
||||||
def iterkeys(self):
|
|
||||||
return iter(self)
|
|
||||||
|
|
||||||
def keys(self):
|
|
||||||
return list(iter(self))
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return (key for key in self.__slots__ if hasattr(self, key))
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return len(list(self.items()))
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return list(self.items()) == list(other.items())
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not (self == other)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, allowed_keys):
|
|
||||||
allowed_keys_tuple = tuple(
|
|
||||||
("_reserved_" + k if k in cls._special_fields else k) for k in allowed_keys
|
|
||||||
)
|
|
||||||
allowed_keys = frozenset(allowed_keys_tuple)
|
|
||||||
if allowed_keys not in cls._classes:
|
|
||||||
|
|
||||||
class SpecificStrictDict(cls):
|
|
||||||
__slots__ = allowed_keys_tuple
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "{%s}" % ", ".join(
|
|
||||||
f'"{k!s}": {v!r}' for k, v in self.items()
|
|
||||||
)
|
|
||||||
|
|
||||||
cls._classes[allowed_keys] = SpecificStrictDict
|
|
||||||
return cls._classes[allowed_keys]
|
|
||||||
|
|
||||||
|
|
||||||
class LazyReference(DBRef):
|
|
||||||
__slots__ = ("_cached_doc", "passthrough", "document_type")
|
|
||||||
|
|
||||||
def fetch(self, force=False):
|
|
||||||
if not self._cached_doc or force:
|
|
||||||
self._cached_doc = self.document_type.objects.get(pk=self.pk)
|
|
||||||
if not self._cached_doc:
|
|
||||||
raise DoesNotExist("Trying to dereference unknown document %s" % (self))
|
|
||||||
return self._cached_doc
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pk(self):
|
|
||||||
return self.id
|
|
||||||
|
|
||||||
def __init__(self, document_type, pk, cached_doc=None, passthrough=False):
|
|
||||||
self.document_type = document_type
|
|
||||||
self._cached_doc = cached_doc
|
|
||||||
self.passthrough = passthrough
|
|
||||||
super().__init__(self.document_type._get_collection_name(), pk)
|
|
||||||
|
|
||||||
def __getitem__(self, name):
|
|
||||||
if not self.passthrough:
|
|
||||||
raise KeyError()
|
|
||||||
document = self.fetch()
|
|
||||||
return document[name]
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
if not object.__getattribute__(self, "passthrough"):
|
|
||||||
raise AttributeError()
|
|
||||||
document = self.fetch()
|
|
||||||
try:
|
|
||||||
return document[name]
|
|
||||||
except KeyError:
|
|
||||||
raise AttributeError()
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<LazyReference({self.document_type}, {self.pk!r})>"
|
|
File diff suppressed because it is too large
Load Diff
@ -1,686 +0,0 @@
|
|||||||
import operator
|
|
||||||
import weakref
|
|
||||||
|
|
||||||
import pymongo
|
|
||||||
from bson import SON, DBRef, ObjectId
|
|
||||||
|
|
||||||
from mongoengine.base.common import UPDATE_OPERATORS
|
|
||||||
from mongoengine.base.datastructures import (
|
|
||||||
BaseDict,
|
|
||||||
BaseList,
|
|
||||||
EmbeddedDocumentList,
|
|
||||||
)
|
|
||||||
from mongoengine.common import _import_class
|
|
||||||
from mongoengine.errors import DeprecatedError, ValidationError
|
|
||||||
|
|
||||||
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
|
||||||
|
|
||||||
|
|
||||||
class BaseField:
|
|
||||||
"""A base class for fields in a MongoDB document. Instances of this class
|
|
||||||
may be added to subclasses of `Document` to define a document's schema.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = None # set in TopLevelDocumentMetaclass
|
|
||||||
_geo_index = False
|
|
||||||
_auto_gen = False # Call `generate` to generate a value
|
|
||||||
_auto_dereference = True
|
|
||||||
|
|
||||||
# These track each time a Field instance is created. Used to retain order.
|
|
||||||
# The auto_creation_counter is used for fields that MongoEngine implicitly
|
|
||||||
# creates, creation_counter is used for all user-specified fields.
|
|
||||||
creation_counter = 0
|
|
||||||
auto_creation_counter = -1
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
db_field=None,
|
|
||||||
required=False,
|
|
||||||
default=None,
|
|
||||||
unique=False,
|
|
||||||
unique_with=None,
|
|
||||||
primary_key=False,
|
|
||||||
validation=None,
|
|
||||||
choices=None,
|
|
||||||
null=False,
|
|
||||||
sparse=False,
|
|
||||||
**kwargs,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
:param db_field: The database field to store this field in
|
|
||||||
(defaults to the name of the field)
|
|
||||||
:param required: If the field is required. Whether it has to have a
|
|
||||||
value or not. Defaults to False.
|
|
||||||
:param default: (optional) The default value for this field if no value
|
|
||||||
has been set (or if the value has been unset). It can be a
|
|
||||||
callable.
|
|
||||||
:param unique: Is the field value unique or not. Defaults to False.
|
|
||||||
:param unique_with: (optional) The other field this field should be
|
|
||||||
unique with.
|
|
||||||
:param primary_key: Mark this field as the primary key. Defaults to False.
|
|
||||||
:param validation: (optional) A callable to validate the value of the
|
|
||||||
field. The callable takes the value as parameter and should raise
|
|
||||||
a ValidationError if validation fails
|
|
||||||
:param choices: (optional) The valid choices
|
|
||||||
:param null: (optional) If the field value can be null. If no and there is a default value
|
|
||||||
then the default value is set
|
|
||||||
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
|
|
||||||
means that uniqueness won't be enforced for `None` values
|
|
||||||
:param **kwargs: (optional) Arbitrary indirection-free metadata for
|
|
||||||
this field can be supplied as additional keyword arguments and
|
|
||||||
accessed as attributes of the field. Must not conflict with any
|
|
||||||
existing attributes. Common metadata includes `verbose_name` and
|
|
||||||
`help_text`.
|
|
||||||
"""
|
|
||||||
self.db_field = db_field if not primary_key else "_id"
|
|
||||||
|
|
||||||
self.required = required or primary_key
|
|
||||||
self.default = default
|
|
||||||
self.unique = bool(unique or unique_with)
|
|
||||||
self.unique_with = unique_with
|
|
||||||
self.primary_key = primary_key
|
|
||||||
self.validation = validation
|
|
||||||
self.choices = choices
|
|
||||||
self.null = null
|
|
||||||
self.sparse = sparse
|
|
||||||
self._owner_document = None
|
|
||||||
|
|
||||||
# Make sure db_field is a string (if it's explicitly defined).
|
|
||||||
if self.db_field is not None and not isinstance(self.db_field, str):
|
|
||||||
raise TypeError("db_field should be a string.")
|
|
||||||
|
|
||||||
# Make sure db_field doesn't contain any forbidden characters.
|
|
||||||
if isinstance(self.db_field, str) and (
|
|
||||||
"." in self.db_field
|
|
||||||
or "\0" in self.db_field
|
|
||||||
or self.db_field.startswith("$")
|
|
||||||
):
|
|
||||||
raise ValueError(
|
|
||||||
'field names cannot contain dots (".") or null characters '
|
|
||||||
'("\\0"), and they must not start with a dollar sign ("$").'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Detect and report conflicts between metadata and base properties.
|
|
||||||
conflicts = set(dir(self)) & set(kwargs)
|
|
||||||
if conflicts:
|
|
||||||
raise TypeError(
|
|
||||||
"%s already has attribute(s): %s"
|
|
||||||
% (self.__class__.__name__, ", ".join(conflicts))
|
|
||||||
)
|
|
||||||
|
|
||||||
# Assign metadata to the instance
|
|
||||||
# This efficient method is available because no __slots__ are defined.
|
|
||||||
self.__dict__.update(kwargs)
|
|
||||||
|
|
||||||
# Adjust the appropriate creation counter, and save our local copy.
|
|
||||||
if self.db_field == "_id":
|
|
||||||
self.creation_counter = BaseField.auto_creation_counter
|
|
||||||
BaseField.auto_creation_counter -= 1
|
|
||||||
else:
|
|
||||||
self.creation_counter = BaseField.creation_counter
|
|
||||||
BaseField.creation_counter += 1
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
"""Descriptor for retrieving a value from a field in a document."""
|
|
||||||
if instance is None:
|
|
||||||
# Document class being used rather than a document object
|
|
||||||
return self
|
|
||||||
|
|
||||||
# Get value from document instance if available
|
|
||||||
return instance._data.get(self.name)
|
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
|
||||||
"""Descriptor for assigning a value to a field in a document."""
|
|
||||||
# If setting to None and there is a default value provided for this
|
|
||||||
# field, then set the value to the default value.
|
|
||||||
if value is None:
|
|
||||||
if self.null:
|
|
||||||
value = None
|
|
||||||
elif self.default is not None:
|
|
||||||
value = self.default
|
|
||||||
if callable(value):
|
|
||||||
value = value()
|
|
||||||
|
|
||||||
if instance._initialised:
|
|
||||||
try:
|
|
||||||
value_has_changed = (
|
|
||||||
self.name not in instance._data
|
|
||||||
or instance._data[self.name] != value
|
|
||||||
)
|
|
||||||
if value_has_changed:
|
|
||||||
instance._mark_as_changed(self.name)
|
|
||||||
except Exception:
|
|
||||||
# Some values can't be compared and throw an error when we
|
|
||||||
# attempt to do so (e.g. tz-naive and tz-aware datetimes).
|
|
||||||
# Mark the field as changed in such cases.
|
|
||||||
instance._mark_as_changed(self.name)
|
|
||||||
|
|
||||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
|
||||||
if isinstance(value, EmbeddedDocument):
|
|
||||||
value._instance = weakref.proxy(instance)
|
|
||||||
elif isinstance(value, (list, tuple)):
|
|
||||||
for v in value:
|
|
||||||
if isinstance(v, EmbeddedDocument):
|
|
||||||
v._instance = weakref.proxy(instance)
|
|
||||||
|
|
||||||
instance._data[self.name] = value
|
|
||||||
|
|
||||||
def error(self, message="", errors=None, field_name=None):
|
|
||||||
"""Raise a ValidationError."""
|
|
||||||
field_name = field_name if field_name else self.name
|
|
||||||
raise ValidationError(message, errors=errors, field_name=field_name)
|
|
||||||
|
|
||||||
def to_python(self, value):
|
|
||||||
"""Convert a MongoDB-compatible type to a Python type."""
|
|
||||||
return value
|
|
||||||
|
|
||||||
def to_mongo(self, value):
|
|
||||||
"""Convert a Python type to a MongoDB-compatible type."""
|
|
||||||
return self.to_python(value)
|
|
||||||
|
|
||||||
def _to_mongo_safe_call(self, value, use_db_field=True, fields=None):
|
|
||||||
"""Helper method to call to_mongo with proper inputs."""
|
|
||||||
f_inputs = self.to_mongo.__code__.co_varnames
|
|
||||||
ex_vars = {}
|
|
||||||
if "fields" in f_inputs:
|
|
||||||
ex_vars["fields"] = fields
|
|
||||||
|
|
||||||
if "use_db_field" in f_inputs:
|
|
||||||
ex_vars["use_db_field"] = use_db_field
|
|
||||||
|
|
||||||
return self.to_mongo(value, **ex_vars)
|
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
|
||||||
"""Prepare a value that is being used in a query for PyMongo."""
|
|
||||||
if op in UPDATE_OPERATORS:
|
|
||||||
self.validate(value)
|
|
||||||
return value
|
|
||||||
|
|
||||||
def validate(self, value, clean=True):
|
|
||||||
"""Perform validation on a value."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _validate_choices(self, value):
|
|
||||||
Document = _import_class("Document")
|
|
||||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
|
||||||
|
|
||||||
choice_list = self.choices
|
|
||||||
if isinstance(next(iter(choice_list)), (list, tuple)):
|
|
||||||
# next(iter) is useful for sets
|
|
||||||
choice_list = [k for k, _ in choice_list]
|
|
||||||
|
|
||||||
# Choices which are other types of Documents
|
|
||||||
if isinstance(value, (Document, EmbeddedDocument)):
|
|
||||||
if not any(isinstance(value, c) for c in choice_list):
|
|
||||||
self.error("Value must be an instance of %s" % (choice_list))
|
|
||||||
# Choices which are types other than Documents
|
|
||||||
else:
|
|
||||||
values = value if isinstance(value, (list, tuple)) else [value]
|
|
||||||
if len(set(values) - set(choice_list)):
|
|
||||||
self.error("Value must be one of %s" % str(choice_list))
|
|
||||||
|
|
||||||
def _validate(self, value, **kwargs):
|
|
||||||
# Check the Choices Constraint
|
|
||||||
if self.choices:
|
|
||||||
self._validate_choices(value)
|
|
||||||
|
|
||||||
# check validation argument
|
|
||||||
if self.validation is not None:
|
|
||||||
if callable(self.validation):
|
|
||||||
try:
|
|
||||||
# breaking change of 0.18
|
|
||||||
# Get rid of True/False-type return for the validation method
|
|
||||||
# in favor of having validation raising a ValidationError
|
|
||||||
ret = self.validation(value)
|
|
||||||
if ret is not None:
|
|
||||||
raise DeprecatedError(
|
|
||||||
"validation argument for `%s` must not return anything, "
|
|
||||||
"it should raise a ValidationError if validation fails"
|
|
||||||
% self.name
|
|
||||||
)
|
|
||||||
except ValidationError as ex:
|
|
||||||
self.error(str(ex))
|
|
||||||
else:
|
|
||||||
raise ValueError(
|
|
||||||
'validation argument for `"%s"` must be a ' "callable." % self.name
|
|
||||||
)
|
|
||||||
|
|
||||||
self.validate(value, **kwargs)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def owner_document(self):
|
|
||||||
return self._owner_document
|
|
||||||
|
|
||||||
def _set_owner_document(self, owner_document):
|
|
||||||
self._owner_document = owner_document
|
|
||||||
|
|
||||||
@owner_document.setter
|
|
||||||
def owner_document(self, owner_document):
|
|
||||||
self._set_owner_document(owner_document)
|
|
||||||
|
|
||||||
|
|
||||||
class ComplexBaseField(BaseField):
|
|
||||||
"""Handles complex fields, such as lists / dictionaries.
|
|
||||||
|
|
||||||
Allows for nesting of embedded documents inside complex types.
|
|
||||||
Handles the lazy dereferencing of a queryset by lazily dereferencing all
|
|
||||||
items in a list / dict rather than one at a time.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, field=None, **kwargs):
|
|
||||||
self.field = field
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _lazy_load_refs(instance, name, ref_values, *, max_depth):
|
|
||||||
_dereference = _import_class("DeReference")()
|
|
||||||
documents = _dereference(
|
|
||||||
ref_values,
|
|
||||||
max_depth=max_depth,
|
|
||||||
instance=instance,
|
|
||||||
name=name,
|
|
||||||
)
|
|
||||||
return documents
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
"""Descriptor to automatically dereference references."""
|
|
||||||
if instance is None:
|
|
||||||
# Document class being used rather than a document object
|
|
||||||
return self
|
|
||||||
|
|
||||||
ReferenceField = _import_class("ReferenceField")
|
|
||||||
GenericReferenceField = _import_class("GenericReferenceField")
|
|
||||||
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
|
||||||
|
|
||||||
auto_dereference = instance._fields[self.name]._auto_dereference
|
|
||||||
|
|
||||||
dereference = auto_dereference and (
|
|
||||||
self.field is None
|
|
||||||
or isinstance(self.field, (GenericReferenceField, ReferenceField))
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
instance._initialised
|
|
||||||
and dereference
|
|
||||||
and instance._data.get(self.name)
|
|
||||||
and not getattr(instance._data[self.name], "_dereferenced", False)
|
|
||||||
):
|
|
||||||
ref_values = instance._data.get(self.name)
|
|
||||||
instance._data[self.name] = self._lazy_load_refs(
|
|
||||||
ref_values=ref_values, instance=instance, name=self.name, max_depth=1
|
|
||||||
)
|
|
||||||
if hasattr(instance._data[self.name], "_dereferenced"):
|
|
||||||
instance._data[self.name]._dereferenced = True
|
|
||||||
|
|
||||||
value = super().__get__(instance, owner)
|
|
||||||
|
|
||||||
# Convert lists / values so we can watch for any changes on them
|
|
||||||
if isinstance(value, (list, tuple)):
|
|
||||||
if issubclass(type(self), EmbeddedDocumentListField) and not isinstance(
|
|
||||||
value, EmbeddedDocumentList
|
|
||||||
):
|
|
||||||
value = EmbeddedDocumentList(value, instance, self.name)
|
|
||||||
elif not isinstance(value, BaseList):
|
|
||||||
value = BaseList(value, instance, self.name)
|
|
||||||
instance._data[self.name] = value
|
|
||||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
|
||||||
value = BaseDict(value, instance, self.name)
|
|
||||||
instance._data[self.name] = value
|
|
||||||
|
|
||||||
if (
|
|
||||||
auto_dereference
|
|
||||||
and instance._initialised
|
|
||||||
and isinstance(value, (BaseList, BaseDict))
|
|
||||||
and not value._dereferenced
|
|
||||||
):
|
|
||||||
value = self._lazy_load_refs(
|
|
||||||
ref_values=value, instance=instance, name=self.name, max_depth=1
|
|
||||||
)
|
|
||||||
value._dereferenced = True
|
|
||||||
instance._data[self.name] = value
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
def to_python(self, value):
|
|
||||||
"""Convert a MongoDB-compatible type to a Python type."""
|
|
||||||
if isinstance(value, str):
|
|
||||||
return value
|
|
||||||
|
|
||||||
if hasattr(value, "to_python"):
|
|
||||||
return value.to_python()
|
|
||||||
|
|
||||||
BaseDocument = _import_class("BaseDocument")
|
|
||||||
if isinstance(value, BaseDocument):
|
|
||||||
# Something is wrong, return the value as it is
|
|
||||||
return value
|
|
||||||
|
|
||||||
is_list = False
|
|
||||||
if not hasattr(value, "items"):
|
|
||||||
try:
|
|
||||||
is_list = True
|
|
||||||
value = {idx: v for idx, v in enumerate(value)}
|
|
||||||
except TypeError: # Not iterable return the value
|
|
||||||
return value
|
|
||||||
|
|
||||||
if self.field:
|
|
||||||
self.field._auto_dereference = self._auto_dereference
|
|
||||||
value_dict = {
|
|
||||||
key: self.field.to_python(item) for key, item in value.items()
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
Document = _import_class("Document")
|
|
||||||
value_dict = {}
|
|
||||||
for k, v in value.items():
|
|
||||||
if isinstance(v, Document):
|
|
||||||
# We need the id from the saved object to create the DBRef
|
|
||||||
if v.pk is None:
|
|
||||||
self.error(
|
|
||||||
"You can only reference documents once they"
|
|
||||||
" have been saved to the database"
|
|
||||||
)
|
|
||||||
collection = v._get_collection_name()
|
|
||||||
value_dict[k] = DBRef(collection, v.pk)
|
|
||||||
elif hasattr(v, "to_python"):
|
|
||||||
value_dict[k] = v.to_python()
|
|
||||||
else:
|
|
||||||
value_dict[k] = self.to_python(v)
|
|
||||||
|
|
||||||
if is_list: # Convert back to a list
|
|
||||||
return [
|
|
||||||
v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0))
|
|
||||||
]
|
|
||||||
return value_dict
|
|
||||||
|
|
||||||
def to_mongo(self, value, use_db_field=True, fields=None):
|
|
||||||
"""Convert a Python type to a MongoDB-compatible type."""
|
|
||||||
Document = _import_class("Document")
|
|
||||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
|
||||||
GenericReferenceField = _import_class("GenericReferenceField")
|
|
||||||
|
|
||||||
if isinstance(value, str):
|
|
||||||
return value
|
|
||||||
|
|
||||||
if hasattr(value, "to_mongo"):
|
|
||||||
if isinstance(value, Document):
|
|
||||||
return GenericReferenceField().to_mongo(value)
|
|
||||||
cls = value.__class__
|
|
||||||
val = value.to_mongo(use_db_field, fields)
|
|
||||||
# If it's a document that is not inherited add _cls
|
|
||||||
if isinstance(value, EmbeddedDocument):
|
|
||||||
val["_cls"] = cls.__name__
|
|
||||||
return val
|
|
||||||
|
|
||||||
is_list = False
|
|
||||||
if not hasattr(value, "items"):
|
|
||||||
try:
|
|
||||||
is_list = True
|
|
||||||
value = {k: v for k, v in enumerate(value)}
|
|
||||||
except TypeError: # Not iterable return the value
|
|
||||||
return value
|
|
||||||
|
|
||||||
if self.field:
|
|
||||||
value_dict = {
|
|
||||||
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
|
||||||
for key, item in value.items()
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
value_dict = {}
|
|
||||||
for k, v in value.items():
|
|
||||||
if isinstance(v, Document):
|
|
||||||
# We need the id from the saved object to create the DBRef
|
|
||||||
if v.pk is None:
|
|
||||||
self.error(
|
|
||||||
"You can only reference documents once they"
|
|
||||||
" have been saved to the database"
|
|
||||||
)
|
|
||||||
|
|
||||||
# If its a document that is not inheritable it won't have
|
|
||||||
# any _cls data so make it a generic reference allows
|
|
||||||
# us to dereference
|
|
||||||
meta = getattr(v, "_meta", {})
|
|
||||||
allow_inheritance = meta.get("allow_inheritance")
|
|
||||||
if not allow_inheritance and not self.field:
|
|
||||||
value_dict[k] = GenericReferenceField().to_mongo(v)
|
|
||||||
else:
|
|
||||||
collection = v._get_collection_name()
|
|
||||||
value_dict[k] = DBRef(collection, v.pk)
|
|
||||||
elif hasattr(v, "to_mongo"):
|
|
||||||
cls = v.__class__
|
|
||||||
val = v.to_mongo(use_db_field, fields)
|
|
||||||
# If it's a document that is not inherited add _cls
|
|
||||||
if isinstance(v, (Document, EmbeddedDocument)):
|
|
||||||
val["_cls"] = cls.__name__
|
|
||||||
value_dict[k] = val
|
|
||||||
else:
|
|
||||||
value_dict[k] = self.to_mongo(v, use_db_field, fields)
|
|
||||||
|
|
||||||
if is_list: # Convert back to a list
|
|
||||||
return [
|
|
||||||
v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0))
|
|
||||||
]
|
|
||||||
return value_dict
|
|
||||||
|
|
||||||
def validate(self, value):
|
|
||||||
"""If field is provided ensure the value is valid."""
|
|
||||||
errors = {}
|
|
||||||
if self.field:
|
|
||||||
if hasattr(value, "items"):
|
|
||||||
sequence = value.items()
|
|
||||||
else:
|
|
||||||
sequence = enumerate(value)
|
|
||||||
for k, v in sequence:
|
|
||||||
try:
|
|
||||||
self.field._validate(v)
|
|
||||||
except ValidationError as error:
|
|
||||||
errors[k] = error.errors or error
|
|
||||||
except (ValueError, AssertionError) as error:
|
|
||||||
errors[k] = error
|
|
||||||
|
|
||||||
if errors:
|
|
||||||
field_class = self.field.__class__.__name__
|
|
||||||
self.error(f"Invalid {field_class} item ({value})", errors=errors)
|
|
||||||
# Don't allow empty values if required
|
|
||||||
if self.required and not value:
|
|
||||||
self.error("Field is required and cannot be empty")
|
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
|
||||||
return self.to_mongo(value)
|
|
||||||
|
|
||||||
def lookup_member(self, member_name):
|
|
||||||
if self.field:
|
|
||||||
return self.field.lookup_member(member_name)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _set_owner_document(self, owner_document):
|
|
||||||
if self.field:
|
|
||||||
self.field.owner_document = owner_document
|
|
||||||
self._owner_document = owner_document
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectIdField(BaseField):
|
|
||||||
"""A field wrapper around MongoDB's ObjectIds."""
|
|
||||||
|
|
||||||
def to_python(self, value):
|
|
||||||
try:
|
|
||||||
if not isinstance(value, ObjectId):
|
|
||||||
value = ObjectId(value)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return value
|
|
||||||
|
|
||||||
def to_mongo(self, value):
|
|
||||||
if not isinstance(value, ObjectId):
|
|
||||||
try:
|
|
||||||
return ObjectId(str(value))
|
|
||||||
except Exception as e:
|
|
||||||
self.error(str(e))
|
|
||||||
return value
|
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
|
||||||
return self.to_mongo(value)
|
|
||||||
|
|
||||||
def validate(self, value):
|
|
||||||
try:
|
|
||||||
ObjectId(str(value))
|
|
||||||
except Exception:
|
|
||||||
self.error("Invalid ObjectID")
|
|
||||||
|
|
||||||
|
|
||||||
class GeoJsonBaseField(BaseField):
|
|
||||||
"""A geo json field storing a geojson style object."""
|
|
||||||
|
|
||||||
_geo_index = pymongo.GEOSPHERE
|
|
||||||
_type = "GeoBase"
|
|
||||||
|
|
||||||
def __init__(self, auto_index=True, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
:param bool auto_index: Automatically create a '2dsphere' index.\
|
|
||||||
Defaults to `True`.
|
|
||||||
"""
|
|
||||||
self._name = "%sField" % self._type
|
|
||||||
if not auto_index:
|
|
||||||
self._geo_index = False
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def validate(self, value):
|
|
||||||
"""Validate the GeoJson object based on its type."""
|
|
||||||
if isinstance(value, dict):
|
|
||||||
if set(value.keys()) == {"type", "coordinates"}:
|
|
||||||
if value["type"] != self._type:
|
|
||||||
self.error(f'{self._name} type must be "{self._type}"')
|
|
||||||
return self.validate(value["coordinates"])
|
|
||||||
else:
|
|
||||||
self.error(
|
|
||||||
"%s can only accept a valid GeoJson dictionary"
|
|
||||||
" or lists of (x, y)" % self._name
|
|
||||||
)
|
|
||||||
return
|
|
||||||
elif not isinstance(value, (list, tuple)):
|
|
||||||
self.error("%s can only accept lists of [x, y]" % self._name)
|
|
||||||
return
|
|
||||||
|
|
||||||
validate = getattr(self, "_validate_%s" % self._type.lower())
|
|
||||||
error = validate(value)
|
|
||||||
if error:
|
|
||||||
self.error(error)
|
|
||||||
|
|
||||||
def _validate_polygon(self, value, top_level=True):
|
|
||||||
if not isinstance(value, (list, tuple)):
|
|
||||||
return "Polygons must contain list of linestrings"
|
|
||||||
|
|
||||||
# Quick and dirty validator
|
|
||||||
try:
|
|
||||||
value[0][0][0]
|
|
||||||
except (TypeError, IndexError):
|
|
||||||
return "Invalid Polygon must contain at least one valid linestring"
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
for val in value:
|
|
||||||
error = self._validate_linestring(val, False)
|
|
||||||
if not error and val[0] != val[-1]:
|
|
||||||
error = "LineStrings must start and end at the same point"
|
|
||||||
if error and error not in errors:
|
|
||||||
errors.append(error)
|
|
||||||
if errors:
|
|
||||||
if top_level:
|
|
||||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
|
||||||
else:
|
|
||||||
return "%s" % ", ".join(errors)
|
|
||||||
|
|
||||||
def _validate_linestring(self, value, top_level=True):
|
|
||||||
"""Validate a linestring."""
|
|
||||||
if not isinstance(value, (list, tuple)):
|
|
||||||
return "LineStrings must contain list of coordinate pairs"
|
|
||||||
|
|
||||||
# Quick and dirty validator
|
|
||||||
try:
|
|
||||||
value[0][0]
|
|
||||||
except (TypeError, IndexError):
|
|
||||||
return "Invalid LineString must contain at least one valid point"
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
for val in value:
|
|
||||||
error = self._validate_point(val)
|
|
||||||
if error and error not in errors:
|
|
||||||
errors.append(error)
|
|
||||||
if errors:
|
|
||||||
if top_level:
|
|
||||||
return "Invalid LineString:\n%s" % ", ".join(errors)
|
|
||||||
else:
|
|
||||||
return "%s" % ", ".join(errors)
|
|
||||||
|
|
||||||
def _validate_point(self, value):
|
|
||||||
"""Validate each set of coords"""
|
|
||||||
if not isinstance(value, (list, tuple)):
|
|
||||||
return "Points must be a list of coordinate pairs"
|
|
||||||
elif not len(value) == 2:
|
|
||||||
return "Value (%s) must be a two-dimensional point" % repr(value)
|
|
||||||
elif not isinstance(value[0], (float, int)) or not isinstance(
|
|
||||||
value[1], (float, int)
|
|
||||||
):
|
|
||||||
return "Both values (%s) in point must be float or int" % repr(value)
|
|
||||||
|
|
||||||
def _validate_multipoint(self, value):
|
|
||||||
if not isinstance(value, (list, tuple)):
|
|
||||||
return "MultiPoint must be a list of Point"
|
|
||||||
|
|
||||||
# Quick and dirty validator
|
|
||||||
try:
|
|
||||||
value[0][0]
|
|
||||||
except (TypeError, IndexError):
|
|
||||||
return "Invalid MultiPoint must contain at least one valid point"
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
for point in value:
|
|
||||||
error = self._validate_point(point)
|
|
||||||
if error and error not in errors:
|
|
||||||
errors.append(error)
|
|
||||||
|
|
||||||
if errors:
|
|
||||||
return "%s" % ", ".join(errors)
|
|
||||||
|
|
||||||
def _validate_multilinestring(self, value, top_level=True):
|
|
||||||
if not isinstance(value, (list, tuple)):
|
|
||||||
return "MultiLineString must be a list of LineString"
|
|
||||||
|
|
||||||
# Quick and dirty validator
|
|
||||||
try:
|
|
||||||
value[0][0][0]
|
|
||||||
except (TypeError, IndexError):
|
|
||||||
return "Invalid MultiLineString must contain at least one valid linestring"
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
for linestring in value:
|
|
||||||
error = self._validate_linestring(linestring, False)
|
|
||||||
if error and error not in errors:
|
|
||||||
errors.append(error)
|
|
||||||
|
|
||||||
if errors:
|
|
||||||
if top_level:
|
|
||||||
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
|
|
||||||
else:
|
|
||||||
return "%s" % ", ".join(errors)
|
|
||||||
|
|
||||||
def _validate_multipolygon(self, value):
|
|
||||||
if not isinstance(value, (list, tuple)):
|
|
||||||
return "MultiPolygon must be a list of Polygon"
|
|
||||||
|
|
||||||
# Quick and dirty validator
|
|
||||||
try:
|
|
||||||
value[0][0][0][0]
|
|
||||||
except (TypeError, IndexError):
|
|
||||||
return "Invalid MultiPolygon must contain at least one valid Polygon"
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
for polygon in value:
|
|
||||||
error = self._validate_polygon(polygon, False)
|
|
||||||
if error and error not in errors:
|
|
||||||
errors.append(error)
|
|
||||||
|
|
||||||
if errors:
|
|
||||||
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
|
|
||||||
|
|
||||||
def to_mongo(self, value):
|
|
||||||
if isinstance(value, dict):
|
|
||||||
return value
|
|
||||||
return SON([("type", self._type), ("coordinates", value)])
|
|
@ -1,469 +0,0 @@
|
|||||||
import itertools
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from mongoengine.base.common import _document_registry
|
|
||||||
from mongoengine.base.fields import (
|
|
||||||
BaseField,
|
|
||||||
ComplexBaseField,
|
|
||||||
ObjectIdField,
|
|
||||||
)
|
|
||||||
from mongoengine.common import _import_class
|
|
||||||
from mongoengine.errors import InvalidDocumentError
|
|
||||||
from mongoengine.queryset import (
|
|
||||||
DO_NOTHING,
|
|
||||||
DoesNotExist,
|
|
||||||
MultipleObjectsReturned,
|
|
||||||
QuerySetManager,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass")
|
|
||||||
|
|
||||||
|
|
||||||
class DocumentMetaclass(type):
|
|
||||||
"""Metaclass for all documents."""
|
|
||||||
|
|
||||||
# TODO lower complexity of this method
|
|
||||||
def __new__(mcs, name, bases, attrs):
|
|
||||||
flattened_bases = mcs._get_bases(bases)
|
|
||||||
super_new = super().__new__
|
|
||||||
|
|
||||||
# If a base class just call super
|
|
||||||
metaclass = attrs.get("my_metaclass")
|
|
||||||
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
|
||||||
return super_new(mcs, name, bases, attrs)
|
|
||||||
|
|
||||||
attrs["_is_document"] = attrs.get("_is_document", False)
|
|
||||||
attrs["_cached_reference_fields"] = []
|
|
||||||
|
|
||||||
# EmbeddedDocuments could have meta data for inheritance
|
|
||||||
if "meta" in attrs:
|
|
||||||
attrs["_meta"] = attrs.pop("meta")
|
|
||||||
|
|
||||||
# EmbeddedDocuments should inherit meta data
|
|
||||||
if "_meta" not in attrs:
|
|
||||||
meta = MetaDict()
|
|
||||||
for base in flattened_bases[::-1]:
|
|
||||||
# Add any mixin metadata from plain objects
|
|
||||||
if hasattr(base, "meta"):
|
|
||||||
meta.merge(base.meta)
|
|
||||||
elif hasattr(base, "_meta"):
|
|
||||||
meta.merge(base._meta)
|
|
||||||
attrs["_meta"] = meta
|
|
||||||
attrs["_meta"][
|
|
||||||
"abstract"
|
|
||||||
] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
|
||||||
|
|
||||||
# If allow_inheritance is True, add a "_cls" string field to the attrs
|
|
||||||
if attrs["_meta"].get("allow_inheritance"):
|
|
||||||
StringField = _import_class("StringField")
|
|
||||||
attrs["_cls"] = StringField()
|
|
||||||
|
|
||||||
# Handle document Fields
|
|
||||||
|
|
||||||
# Merge all fields from subclasses
|
|
||||||
doc_fields = {}
|
|
||||||
for base in flattened_bases[::-1]:
|
|
||||||
if hasattr(base, "_fields"):
|
|
||||||
doc_fields.update(base._fields)
|
|
||||||
|
|
||||||
# Standard object mixin - merge in any Fields
|
|
||||||
if not hasattr(base, "_meta"):
|
|
||||||
base_fields = {}
|
|
||||||
for attr_name, attr_value in base.__dict__.items():
|
|
||||||
if not isinstance(attr_value, BaseField):
|
|
||||||
continue
|
|
||||||
attr_value.name = attr_name
|
|
||||||
if not attr_value.db_field:
|
|
||||||
attr_value.db_field = attr_name
|
|
||||||
base_fields[attr_name] = attr_value
|
|
||||||
|
|
||||||
doc_fields.update(base_fields)
|
|
||||||
|
|
||||||
# Discover any document fields
|
|
||||||
field_names = {}
|
|
||||||
for attr_name, attr_value in attrs.items():
|
|
||||||
if not isinstance(attr_value, BaseField):
|
|
||||||
continue
|
|
||||||
attr_value.name = attr_name
|
|
||||||
if not attr_value.db_field:
|
|
||||||
attr_value.db_field = attr_name
|
|
||||||
doc_fields[attr_name] = attr_value
|
|
||||||
|
|
||||||
# Count names to ensure no db_field redefinitions
|
|
||||||
field_names[attr_value.db_field] = (
|
|
||||||
field_names.get(attr_value.db_field, 0) + 1
|
|
||||||
)
|
|
||||||
|
|
||||||
# Ensure no duplicate db_fields
|
|
||||||
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
|
|
||||||
if duplicate_db_fields:
|
|
||||||
msg = "Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields)
|
|
||||||
raise InvalidDocumentError(msg)
|
|
||||||
|
|
||||||
# Set _fields and db_field maps
|
|
||||||
attrs["_fields"] = doc_fields
|
|
||||||
attrs["_db_field_map"] = {
|
|
||||||
k: getattr(v, "db_field", k) for k, v in doc_fields.items()
|
|
||||||
}
|
|
||||||
attrs["_reverse_db_field_map"] = {
|
|
||||||
v: k for k, v in attrs["_db_field_map"].items()
|
|
||||||
}
|
|
||||||
|
|
||||||
attrs["_fields_ordered"] = tuple(
|
|
||||||
i[1]
|
|
||||||
for i in sorted((v.creation_counter, v.name) for v in doc_fields.values())
|
|
||||||
)
|
|
||||||
|
|
||||||
#
|
|
||||||
# Set document hierarchy
|
|
||||||
#
|
|
||||||
superclasses = ()
|
|
||||||
class_name = [name]
|
|
||||||
for base in flattened_bases:
|
|
||||||
if not getattr(base, "_is_base_cls", True) and not getattr(
|
|
||||||
base, "_meta", {}
|
|
||||||
).get("abstract", True):
|
|
||||||
# Collate hierarchy for _cls and _subclasses
|
|
||||||
class_name.append(base.__name__)
|
|
||||||
|
|
||||||
if hasattr(base, "_meta"):
|
|
||||||
# Warn if allow_inheritance isn't set and prevent
|
|
||||||
# inheritance of classes where inheritance is set to False
|
|
||||||
allow_inheritance = base._meta.get("allow_inheritance")
|
|
||||||
if not allow_inheritance and not base._meta.get("abstract"):
|
|
||||||
raise ValueError(
|
|
||||||
"Document %s may not be subclassed. "
|
|
||||||
'To enable inheritance, use the "allow_inheritance" meta attribute.'
|
|
||||||
% base.__name__
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get superclasses from last base superclass
|
|
||||||
document_bases = [b for b in flattened_bases if hasattr(b, "_class_name")]
|
|
||||||
if document_bases:
|
|
||||||
superclasses = document_bases[0]._superclasses
|
|
||||||
superclasses += (document_bases[0]._class_name,)
|
|
||||||
|
|
||||||
_cls = ".".join(reversed(class_name))
|
|
||||||
attrs["_class_name"] = _cls
|
|
||||||
attrs["_superclasses"] = superclasses
|
|
||||||
attrs["_subclasses"] = (_cls,)
|
|
||||||
attrs["_types"] = attrs["_subclasses"] # TODO depreciate _types
|
|
||||||
|
|
||||||
# Create the new_class
|
|
||||||
new_class = super_new(mcs, name, bases, attrs)
|
|
||||||
|
|
||||||
# Set _subclasses
|
|
||||||
for base in document_bases:
|
|
||||||
if _cls not in base._subclasses:
|
|
||||||
base._subclasses += (_cls,)
|
|
||||||
base._types = base._subclasses # TODO depreciate _types
|
|
||||||
|
|
||||||
(
|
|
||||||
Document,
|
|
||||||
EmbeddedDocument,
|
|
||||||
DictField,
|
|
||||||
CachedReferenceField,
|
|
||||||
) = mcs._import_classes()
|
|
||||||
|
|
||||||
if issubclass(new_class, Document):
|
|
||||||
new_class._collection = None
|
|
||||||
|
|
||||||
# Add class to the _document_registry
|
|
||||||
_document_registry[new_class._class_name] = new_class
|
|
||||||
|
|
||||||
# Handle delete rules
|
|
||||||
for field in new_class._fields.values():
|
|
||||||
f = field
|
|
||||||
if f.owner_document is None:
|
|
||||||
f.owner_document = new_class
|
|
||||||
delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING)
|
|
||||||
if isinstance(f, CachedReferenceField):
|
|
||||||
|
|
||||||
if issubclass(new_class, EmbeddedDocument):
|
|
||||||
raise InvalidDocumentError(
|
|
||||||
"CachedReferenceFields is not allowed in EmbeddedDocuments"
|
|
||||||
)
|
|
||||||
|
|
||||||
if f.auto_sync:
|
|
||||||
f.start_listener()
|
|
||||||
|
|
||||||
f.document_type._cached_reference_fields.append(f)
|
|
||||||
|
|
||||||
if isinstance(f, ComplexBaseField) and hasattr(f, "field"):
|
|
||||||
delete_rule = getattr(f.field, "reverse_delete_rule", DO_NOTHING)
|
|
||||||
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
|
|
||||||
msg = (
|
|
||||||
"Reverse delete rules are not supported "
|
|
||||||
"for %s (field: %s)" % (field.__class__.__name__, field.name)
|
|
||||||
)
|
|
||||||
raise InvalidDocumentError(msg)
|
|
||||||
|
|
||||||
f = field.field
|
|
||||||
|
|
||||||
if delete_rule != DO_NOTHING:
|
|
||||||
if issubclass(new_class, EmbeddedDocument):
|
|
||||||
msg = (
|
|
||||||
"Reverse delete rules are not supported for "
|
|
||||||
"EmbeddedDocuments (field: %s)" % field.name
|
|
||||||
)
|
|
||||||
raise InvalidDocumentError(msg)
|
|
||||||
f.document_type.register_delete_rule(new_class, field.name, delete_rule)
|
|
||||||
|
|
||||||
if (
|
|
||||||
field.name
|
|
||||||
and hasattr(Document, field.name)
|
|
||||||
and EmbeddedDocument not in new_class.mro()
|
|
||||||
):
|
|
||||||
msg = "%s is a document method and not a valid field name" % field.name
|
|
||||||
raise InvalidDocumentError(msg)
|
|
||||||
|
|
||||||
return new_class
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_bases(mcs, bases):
|
|
||||||
if isinstance(bases, BasesTuple):
|
|
||||||
return bases
|
|
||||||
seen = []
|
|
||||||
bases = mcs.__get_bases(bases)
|
|
||||||
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
|
|
||||||
return BasesTuple(unique_bases)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __get_bases(mcs, bases):
|
|
||||||
for base in bases:
|
|
||||||
if base is object:
|
|
||||||
continue
|
|
||||||
yield base
|
|
||||||
yield from mcs.__get_bases(base.__bases__)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _import_classes(mcs):
|
|
||||||
Document = _import_class("Document")
|
|
||||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
|
||||||
DictField = _import_class("DictField")
|
|
||||||
CachedReferenceField = _import_class("CachedReferenceField")
|
|
||||||
return Document, EmbeddedDocument, DictField, CachedReferenceField
|
|
||||||
|
|
||||||
|
|
||||||
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|
||||||
"""Metaclass for top-level documents (i.e. documents that have their own
|
|
||||||
collection in the database.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __new__(mcs, name, bases, attrs):
|
|
||||||
flattened_bases = mcs._get_bases(bases)
|
|
||||||
super_new = super().__new__
|
|
||||||
|
|
||||||
# Set default _meta data if base class, otherwise get user defined meta
|
|
||||||
if attrs.get("my_metaclass") == TopLevelDocumentMetaclass:
|
|
||||||
# defaults
|
|
||||||
attrs["_meta"] = {
|
|
||||||
"abstract": True,
|
|
||||||
"max_documents": None,
|
|
||||||
"max_size": None,
|
|
||||||
"ordering": [], # default ordering applied at runtime
|
|
||||||
"indexes": [], # indexes to be ensured at runtime
|
|
||||||
"id_field": None,
|
|
||||||
"index_background": False,
|
|
||||||
"index_opts": None,
|
|
||||||
"delete_rules": None,
|
|
||||||
# allow_inheritance can be True, False, and None. True means
|
|
||||||
# "allow inheritance", False means "don't allow inheritance",
|
|
||||||
# None means "do whatever your parent does, or don't allow
|
|
||||||
# inheritance if you're a top-level class".
|
|
||||||
"allow_inheritance": None,
|
|
||||||
}
|
|
||||||
attrs["_is_base_cls"] = True
|
|
||||||
attrs["_meta"].update(attrs.get("meta", {}))
|
|
||||||
else:
|
|
||||||
attrs["_meta"] = attrs.get("meta", {})
|
|
||||||
# Explicitly set abstract to false unless set
|
|
||||||
attrs["_meta"]["abstract"] = attrs["_meta"].get("abstract", False)
|
|
||||||
attrs["_is_base_cls"] = False
|
|
||||||
|
|
||||||
# Set flag marking as document class - as opposed to an object mixin
|
|
||||||
attrs["_is_document"] = True
|
|
||||||
|
|
||||||
# Ensure queryset_class is inherited
|
|
||||||
if "objects" in attrs:
|
|
||||||
manager = attrs["objects"]
|
|
||||||
if hasattr(manager, "queryset_class"):
|
|
||||||
attrs["_meta"]["queryset_class"] = manager.queryset_class
|
|
||||||
|
|
||||||
# Clean up top level meta
|
|
||||||
if "meta" in attrs:
|
|
||||||
del attrs["meta"]
|
|
||||||
|
|
||||||
# Find the parent document class
|
|
||||||
parent_doc_cls = [
|
|
||||||
b for b in flattened_bases if b.__class__ == TopLevelDocumentMetaclass
|
|
||||||
]
|
|
||||||
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
|
|
||||||
|
|
||||||
# Prevent classes setting collection different to their parents
|
|
||||||
# If parent wasn't an abstract class
|
|
||||||
if (
|
|
||||||
parent_doc_cls
|
|
||||||
and "collection" in attrs.get("_meta", {})
|
|
||||||
and not parent_doc_cls._meta.get("abstract", True)
|
|
||||||
):
|
|
||||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
|
||||||
warnings.warn(msg, SyntaxWarning)
|
|
||||||
del attrs["_meta"]["collection"]
|
|
||||||
|
|
||||||
# Ensure abstract documents have abstract bases
|
|
||||||
if attrs.get("_is_base_cls") or attrs["_meta"].get("abstract"):
|
|
||||||
if parent_doc_cls and not parent_doc_cls._meta.get("abstract", False):
|
|
||||||
msg = "Abstract document cannot have non-abstract base"
|
|
||||||
raise ValueError(msg)
|
|
||||||
return super_new(mcs, name, bases, attrs)
|
|
||||||
|
|
||||||
# Merge base class metas.
|
|
||||||
# Uses a special MetaDict that handles various merging rules
|
|
||||||
meta = MetaDict()
|
|
||||||
for base in flattened_bases[::-1]:
|
|
||||||
# Add any mixin metadata from plain objects
|
|
||||||
if hasattr(base, "meta"):
|
|
||||||
meta.merge(base.meta)
|
|
||||||
elif hasattr(base, "_meta"):
|
|
||||||
meta.merge(base._meta)
|
|
||||||
|
|
||||||
# Set collection in the meta if its callable
|
|
||||||
if getattr(base, "_is_document", False) and not base._meta.get("abstract"):
|
|
||||||
collection = meta.get("collection", None)
|
|
||||||
if callable(collection):
|
|
||||||
meta["collection"] = collection(base)
|
|
||||||
|
|
||||||
meta.merge(attrs.get("_meta", {})) # Top level meta
|
|
||||||
|
|
||||||
# Only simple classes (i.e. direct subclasses of Document) may set
|
|
||||||
# allow_inheritance to False. If the base Document allows inheritance,
|
|
||||||
# none of its subclasses can override allow_inheritance to False.
|
|
||||||
simple_class = all(
|
|
||||||
b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
not simple_class
|
|
||||||
and meta["allow_inheritance"] is False
|
|
||||||
and not meta["abstract"]
|
|
||||||
):
|
|
||||||
raise ValueError(
|
|
||||||
"Only direct subclasses of Document may set "
|
|
||||||
'"allow_inheritance" to False'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Set default collection name
|
|
||||||
if "collection" not in meta:
|
|
||||||
meta["collection"] = (
|
|
||||||
"".join("_%s" % c if c.isupper() else c for c in name)
|
|
||||||
.strip("_")
|
|
||||||
.lower()
|
|
||||||
)
|
|
||||||
attrs["_meta"] = meta
|
|
||||||
|
|
||||||
# Call super and get the new class
|
|
||||||
new_class = super_new(mcs, name, bases, attrs)
|
|
||||||
|
|
||||||
meta = new_class._meta
|
|
||||||
|
|
||||||
# Set index specifications
|
|
||||||
meta["index_specs"] = new_class._build_index_specs(meta["indexes"])
|
|
||||||
|
|
||||||
# If collection is a callable - call it and set the value
|
|
||||||
collection = meta.get("collection")
|
|
||||||
if callable(collection):
|
|
||||||
new_class._meta["collection"] = collection(new_class)
|
|
||||||
|
|
||||||
# Provide a default queryset unless exists or one has been set
|
|
||||||
if "objects" not in dir(new_class):
|
|
||||||
new_class.objects = QuerySetManager()
|
|
||||||
|
|
||||||
# Validate the fields and set primary key if needed
|
|
||||||
for field_name, field in new_class._fields.items():
|
|
||||||
if field.primary_key:
|
|
||||||
# Ensure only one primary key is set
|
|
||||||
current_pk = new_class._meta.get("id_field")
|
|
||||||
if current_pk and current_pk != field_name:
|
|
||||||
raise ValueError("Cannot override primary key field")
|
|
||||||
|
|
||||||
# Set primary key
|
|
||||||
if not current_pk:
|
|
||||||
new_class._meta["id_field"] = field_name
|
|
||||||
new_class.id = field
|
|
||||||
|
|
||||||
# If the document doesn't explicitly define a primary key field, create
|
|
||||||
# one. Make it an ObjectIdField and give it a non-clashing name ("id"
|
|
||||||
# by default, but can be different if that one's taken).
|
|
||||||
if not new_class._meta.get("id_field"):
|
|
||||||
id_name, id_db_name = mcs.get_auto_id_names(new_class)
|
|
||||||
new_class._meta["id_field"] = id_name
|
|
||||||
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
|
|
||||||
new_class._fields[id_name].name = id_name
|
|
||||||
new_class.id = new_class._fields[id_name]
|
|
||||||
new_class._db_field_map[id_name] = id_db_name
|
|
||||||
new_class._reverse_db_field_map[id_db_name] = id_name
|
|
||||||
|
|
||||||
# Prepend the ID field to _fields_ordered (so that it's *always*
|
|
||||||
# the first field).
|
|
||||||
new_class._fields_ordered = (id_name,) + new_class._fields_ordered
|
|
||||||
|
|
||||||
# Merge in exceptions with parent hierarchy.
|
|
||||||
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
|
|
||||||
module = attrs.get("__module__")
|
|
||||||
for exc in exceptions_to_merge:
|
|
||||||
name = exc.__name__
|
|
||||||
parents = tuple(
|
|
||||||
getattr(base, name) for base in flattened_bases if hasattr(base, name)
|
|
||||||
) or (exc,)
|
|
||||||
|
|
||||||
# Create a new exception and set it as an attribute on the new
|
|
||||||
# class.
|
|
||||||
exception = type(name, parents, {"__module__": module})
|
|
||||||
setattr(new_class, name, exception)
|
|
||||||
|
|
||||||
return new_class
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_auto_id_names(mcs, new_class):
|
|
||||||
"""Find a name for the automatic ID field for the given new class.
|
|
||||||
|
|
||||||
Return a two-element tuple where the first item is the field name (i.e.
|
|
||||||
the attribute name on the object) and the second element is the DB
|
|
||||||
field name (i.e. the name of the key stored in MongoDB).
|
|
||||||
|
|
||||||
Defaults to ('id', '_id'), or generates a non-clashing name in the form
|
|
||||||
of ('auto_id_X', '_auto_id_X') if the default name is already taken.
|
|
||||||
"""
|
|
||||||
id_name, id_db_name = ("id", "_id")
|
|
||||||
existing_fields = {field_name for field_name in new_class._fields}
|
|
||||||
existing_db_fields = {v.db_field for v in new_class._fields.values()}
|
|
||||||
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
|
||||||
return id_name, id_db_name
|
|
||||||
|
|
||||||
id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0)
|
|
||||||
for i in itertools.count():
|
|
||||||
id_name = f"{id_basename}_{i}"
|
|
||||||
id_db_name = f"{id_db_basename}_{i}"
|
|
||||||
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
|
||||||
return id_name, id_db_name
|
|
||||||
|
|
||||||
|
|
||||||
class MetaDict(dict):
|
|
||||||
"""Custom dictionary for meta classes.
|
|
||||||
Handles the merging of set indexes
|
|
||||||
"""
|
|
||||||
|
|
||||||
_merge_options = ("indexes",)
|
|
||||||
|
|
||||||
def merge(self, new_options):
|
|
||||||
for k, v in new_options.items():
|
|
||||||
if k in self._merge_options:
|
|
||||||
self[k] = self.get(k, []) + v
|
|
||||||
else:
|
|
||||||
self[k] = v
|
|
||||||
|
|
||||||
|
|
||||||
class BasesTuple(tuple):
|
|
||||||
"""Special class to handle introspection of bases tuple in __new__"""
|
|
||||||
|
|
||||||
pass
|
|
@ -1,22 +0,0 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
class LazyRegexCompiler:
|
|
||||||
"""Descriptor to allow lazy compilation of regex"""
|
|
||||||
|
|
||||||
def __init__(self, pattern, flags=0):
|
|
||||||
self._pattern = pattern
|
|
||||||
self._flags = flags
|
|
||||||
self._compiled_regex = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def compiled_regex(self):
|
|
||||||
if self._compiled_regex is None:
|
|
||||||
self._compiled_regex = re.compile(self._pattern, self._flags)
|
|
||||||
return self._compiled_regex
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
return self.compiled_regex
|
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
|
||||||
raise AttributeError("Can not set attribute LazyRegexCompiler")
|
|
@ -1,64 +0,0 @@
|
|||||||
_class_registry_cache = {}
|
|
||||||
_field_list_cache = []
|
|
||||||
|
|
||||||
|
|
||||||
def _import_class(cls_name):
|
|
||||||
"""Cache mechanism for imports.
|
|
||||||
|
|
||||||
Due to complications of circular imports mongoengine needs to do lots of
|
|
||||||
inline imports in functions. This is inefficient as classes are
|
|
||||||
imported repeated throughout the mongoengine code. This is
|
|
||||||
compounded by some recursive functions requiring inline imports.
|
|
||||||
|
|
||||||
:mod:`mongoengine.common` provides a single point to import all these
|
|
||||||
classes. Circular imports aren't an issue as it dynamically imports the
|
|
||||||
class when first needed. Subsequent calls to the
|
|
||||||
:func:`~mongoengine.common._import_class` can then directly retrieve the
|
|
||||||
class from the :data:`mongoengine.common._class_registry_cache`.
|
|
||||||
"""
|
|
||||||
if cls_name in _class_registry_cache:
|
|
||||||
return _class_registry_cache.get(cls_name)
|
|
||||||
|
|
||||||
doc_classes = (
|
|
||||||
"Document",
|
|
||||||
"DynamicEmbeddedDocument",
|
|
||||||
"EmbeddedDocument",
|
|
||||||
"MapReduceDocument",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Field Classes
|
|
||||||
if not _field_list_cache:
|
|
||||||
from mongoengine.fields import __all__ as fields
|
|
||||||
|
|
||||||
_field_list_cache.extend(fields)
|
|
||||||
from mongoengine.base.fields import __all__ as fields
|
|
||||||
|
|
||||||
_field_list_cache.extend(fields)
|
|
||||||
|
|
||||||
field_classes = _field_list_cache
|
|
||||||
|
|
||||||
deref_classes = ("DeReference",)
|
|
||||||
|
|
||||||
if cls_name == "BaseDocument":
|
|
||||||
from mongoengine.base import document as module
|
|
||||||
|
|
||||||
import_classes = ["BaseDocument"]
|
|
||||||
elif cls_name in doc_classes:
|
|
||||||
from mongoengine import document as module
|
|
||||||
|
|
||||||
import_classes = doc_classes
|
|
||||||
elif cls_name in field_classes:
|
|
||||||
from mongoengine import fields as module
|
|
||||||
|
|
||||||
import_classes = field_classes
|
|
||||||
elif cls_name in deref_classes:
|
|
||||||
from mongoengine import dereference as module
|
|
||||||
|
|
||||||
import_classes = deref_classes
|
|
||||||
else:
|
|
||||||
raise ValueError("No import set for: %s" % cls_name)
|
|
||||||
|
|
||||||
for cls in import_classes:
|
|
||||||
_class_registry_cache[cls] = getattr(module, cls)
|
|
||||||
|
|
||||||
return _class_registry_cache.get(cls_name)
|
|
@ -1,403 +1,166 @@
|
|||||||
from pymongo import MongoClient, ReadPreference, uri_parser
|
import pymongo
|
||||||
from pymongo.database import _check_name
|
from pymongo import Connection, ReplicaSetConnection, uri_parser
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"DEFAULT_CONNECTION_NAME",
|
|
||||||
"DEFAULT_DATABASE_NAME",
|
|
||||||
"ConnectionFailure",
|
|
||||||
"connect",
|
|
||||||
"disconnect",
|
|
||||||
"disconnect_all",
|
|
||||||
"get_connection",
|
|
||||||
"get_db",
|
|
||||||
"register_connection",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CONNECTION_NAME = "default"
|
__all__ = ['ConnectionError', 'connect', 'register_connection',
|
||||||
DEFAULT_DATABASE_NAME = "test"
|
'DEFAULT_CONNECTION_NAME']
|
||||||
DEFAULT_HOST = "localhost"
|
|
||||||
DEFAULT_PORT = 27017
|
|
||||||
|
DEFAULT_CONNECTION_NAME = 'default'
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectionError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
_connection_settings = {}
|
_connection_settings = {}
|
||||||
_connections = {}
|
_connections = {}
|
||||||
_dbs = {}
|
_dbs = {}
|
||||||
|
|
||||||
READ_PREFERENCE = ReadPreference.PRIMARY
|
|
||||||
|
|
||||||
|
def register_connection(alias, name, host='localhost', port=27017,
|
||||||
|
is_slave=False, read_preference=False, slaves=None,
|
||||||
|
username=None, password=None, **kwargs):
|
||||||
|
"""Add a connection.
|
||||||
|
|
||||||
class ConnectionFailure(Exception):
|
:param alias: the name that will be used to refer to this connection
|
||||||
"""Error raised when the database connection can't be established or
|
throughout MongoEngine
|
||||||
when a connection with a requested alias can't be retrieved.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _check_db_name(name):
|
|
||||||
"""Check if a database name is valid.
|
|
||||||
This functionality is copied from pymongo Database class constructor.
|
|
||||||
"""
|
|
||||||
if not isinstance(name, str):
|
|
||||||
raise TypeError("name must be an instance of %s" % str)
|
|
||||||
elif name != "$external":
|
|
||||||
_check_name(name)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_connection_settings(
|
|
||||||
db=None,
|
|
||||||
name=None,
|
|
||||||
host=None,
|
|
||||||
port=None,
|
|
||||||
read_preference=READ_PREFERENCE,
|
|
||||||
username=None,
|
|
||||||
password=None,
|
|
||||||
authentication_source=None,
|
|
||||||
authentication_mechanism=None,
|
|
||||||
**kwargs,
|
|
||||||
):
|
|
||||||
"""Get the connection settings as a dict
|
|
||||||
|
|
||||||
:param db: the name of the database to use, for compatibility with connect
|
|
||||||
:param name: the name of the specific database to use
|
:param name: the name of the specific database to use
|
||||||
:param host: the host name of the: program: `mongod` instance to connect to
|
:param host: the host name of the :program:`mongod` instance to connect to
|
||||||
:param port: the port that the: program: `mongod` instance is running on
|
:param port: the port that the :program:`mongod` instance is running on
|
||||||
:param read_preference: The read preference for the collection
|
:param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+
|
||||||
|
:param read_preference: The read preference for the collection ** Added pymongo 2.1
|
||||||
|
:param slaves: a list of aliases of slave connections; each of these must
|
||||||
|
be a registered connection that has :attr:`is_slave` set to ``True``
|
||||||
:param username: username to authenticate with
|
:param username: username to authenticate with
|
||||||
:param password: password to authenticate with
|
:param password: password to authenticate with
|
||||||
:param authentication_source: database to authenticate against
|
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
|
||||||
:param authentication_mechanism: database authentication mechanisms.
|
|
||||||
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
|
||||||
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
|
||||||
:param is_mock: explicitly use mongomock for this connection
|
|
||||||
(can also be done by using `mongomock: // ` as db host prefix)
|
|
||||||
:param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
|
||||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
|
||||||
for pymongo's `MongoClient` for a full list.
|
|
||||||
"""
|
"""
|
||||||
|
global _connection_settings
|
||||||
|
|
||||||
conn_settings = {
|
conn_settings = {
|
||||||
"name": name or db or DEFAULT_DATABASE_NAME,
|
'name': name,
|
||||||
"host": host or DEFAULT_HOST,
|
'host': host,
|
||||||
"port": port or DEFAULT_PORT,
|
'port': port,
|
||||||
"read_preference": read_preference,
|
'is_slave': is_slave,
|
||||||
"username": username,
|
'slaves': slaves or [],
|
||||||
"password": password,
|
'username': username,
|
||||||
"authentication_source": authentication_source,
|
'password': password,
|
||||||
"authentication_mechanism": authentication_mechanism,
|
'read_preference': read_preference
|
||||||
}
|
}
|
||||||
|
|
||||||
_check_db_name(conn_settings["name"])
|
# Handle uri style connections
|
||||||
conn_host = conn_settings["host"]
|
if "://" in host:
|
||||||
|
uri_dict = uri_parser.parse_uri(host)
|
||||||
# Host can be a list or a string, so if string, force to a list.
|
if uri_dict.get('database') is None:
|
||||||
if isinstance(conn_host, str):
|
raise ConnectionError("If using URI style connection include "\
|
||||||
conn_host = [conn_host]
|
"database name in string")
|
||||||
|
conn_settings.update({
|
||||||
resolved_hosts = []
|
'host': host,
|
||||||
for entity in conn_host:
|
'name': uri_dict.get('database'),
|
||||||
|
'username': uri_dict.get('username'),
|
||||||
# Handle Mongomock
|
'password': uri_dict.get('password'),
|
||||||
if entity.startswith("mongomock://"):
|
'read_preference': read_preference,
|
||||||
conn_settings["is_mock"] = True
|
})
|
||||||
# `mongomock://` is not a valid url prefix and must be replaced by `mongodb://`
|
if "replicaSet" in host:
|
||||||
new_entity = entity.replace("mongomock://", "mongodb://", 1)
|
conn_settings['replicaSet'] = True
|
||||||
resolved_hosts.append(new_entity)
|
|
||||||
|
|
||||||
uri_dict = uri_parser.parse_uri(new_entity)
|
|
||||||
|
|
||||||
database = uri_dict.get("database")
|
|
||||||
if database:
|
|
||||||
conn_settings["name"] = database
|
|
||||||
|
|
||||||
# Handle URI style connections, only updating connection params which
|
|
||||||
# were explicitly specified in the URI.
|
|
||||||
elif "://" in entity:
|
|
||||||
uri_dict = uri_parser.parse_uri(entity)
|
|
||||||
resolved_hosts.append(entity)
|
|
||||||
|
|
||||||
database = uri_dict.get("database")
|
|
||||||
if database:
|
|
||||||
conn_settings["name"] = database
|
|
||||||
|
|
||||||
for param in ("read_preference", "username", "password"):
|
|
||||||
if uri_dict.get(param):
|
|
||||||
conn_settings[param] = uri_dict[param]
|
|
||||||
|
|
||||||
uri_options = uri_dict["options"]
|
|
||||||
if "replicaset" in uri_options:
|
|
||||||
conn_settings["replicaSet"] = uri_options["replicaset"]
|
|
||||||
if "authsource" in uri_options:
|
|
||||||
conn_settings["authentication_source"] = uri_options["authsource"]
|
|
||||||
if "authmechanism" in uri_options:
|
|
||||||
conn_settings["authentication_mechanism"] = uri_options["authmechanism"]
|
|
||||||
if "readpreference" in uri_options:
|
|
||||||
read_preferences = (
|
|
||||||
ReadPreference.NEAREST,
|
|
||||||
ReadPreference.PRIMARY,
|
|
||||||
ReadPreference.PRIMARY_PREFERRED,
|
|
||||||
ReadPreference.SECONDARY,
|
|
||||||
ReadPreference.SECONDARY_PREFERRED,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Starting with PyMongo v3.5, the "readpreference" option is
|
|
||||||
# returned as a string (e.g. "secondaryPreferred") and not an
|
|
||||||
# int (e.g. 3).
|
|
||||||
# TODO simplify the code below once we drop support for
|
|
||||||
# PyMongo v3.4.
|
|
||||||
read_pf_mode = uri_options["readpreference"]
|
|
||||||
if isinstance(read_pf_mode, str):
|
|
||||||
read_pf_mode = read_pf_mode.lower()
|
|
||||||
for preference in read_preferences:
|
|
||||||
if (
|
|
||||||
preference.name.lower() == read_pf_mode
|
|
||||||
or preference.mode == read_pf_mode
|
|
||||||
):
|
|
||||||
conn_settings["read_preference"] = preference
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
resolved_hosts.append(entity)
|
|
||||||
conn_settings["host"] = resolved_hosts
|
|
||||||
|
|
||||||
# Deprecated parameters that should not be passed on
|
|
||||||
kwargs.pop("slaves", None)
|
|
||||||
kwargs.pop("is_slave", None)
|
|
||||||
|
|
||||||
conn_settings.update(kwargs)
|
conn_settings.update(kwargs)
|
||||||
return conn_settings
|
|
||||||
|
|
||||||
|
|
||||||
def register_connection(
|
|
||||||
alias,
|
|
||||||
db=None,
|
|
||||||
name=None,
|
|
||||||
host=None,
|
|
||||||
port=None,
|
|
||||||
read_preference=READ_PREFERENCE,
|
|
||||||
username=None,
|
|
||||||
password=None,
|
|
||||||
authentication_source=None,
|
|
||||||
authentication_mechanism=None,
|
|
||||||
**kwargs,
|
|
||||||
):
|
|
||||||
"""Register the connection settings.
|
|
||||||
|
|
||||||
:param alias: the name that will be used to refer to this connection throughout MongoEngine
|
|
||||||
:param db: the name of the database to use, for compatibility with connect
|
|
||||||
:param name: the name of the specific database to use
|
|
||||||
:param host: the host name of the: program: `mongod` instance to connect to
|
|
||||||
:param port: the port that the: program: `mongod` instance is running on
|
|
||||||
:param read_preference: The read preference for the collection
|
|
||||||
:param username: username to authenticate with
|
|
||||||
:param password: password to authenticate with
|
|
||||||
:param authentication_source: database to authenticate against
|
|
||||||
:param authentication_mechanism: database authentication mechanisms.
|
|
||||||
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
|
||||||
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
|
||||||
:param is_mock: explicitly use mongomock for this connection
|
|
||||||
(can also be done by using `mongomock: // ` as db host prefix)
|
|
||||||
:param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
|
||||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
|
||||||
for pymongo's `MongoClient` for a full list.
|
|
||||||
"""
|
|
||||||
conn_settings = _get_connection_settings(
|
|
||||||
db=db,
|
|
||||||
name=name,
|
|
||||||
host=host,
|
|
||||||
port=port,
|
|
||||||
read_preference=read_preference,
|
|
||||||
username=username,
|
|
||||||
password=password,
|
|
||||||
authentication_source=authentication_source,
|
|
||||||
authentication_mechanism=authentication_mechanism,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
_connection_settings[alias] = conn_settings
|
_connection_settings[alias] = conn_settings
|
||||||
|
|
||||||
|
|
||||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||||
"""Close the connection with a given alias."""
|
global _connections
|
||||||
from mongoengine import Document
|
global _dbs
|
||||||
from mongoengine.base.common import _get_documents_by_db
|
|
||||||
|
|
||||||
if alias in _connections:
|
if alias in _connections:
|
||||||
get_connection(alias=alias).close()
|
get_connection(alias=alias).disconnect()
|
||||||
del _connections[alias]
|
del _connections[alias]
|
||||||
|
|
||||||
if alias in _dbs:
|
if alias in _dbs:
|
||||||
# Detach all cached collections in Documents
|
|
||||||
for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME):
|
|
||||||
if issubclass(doc_cls, Document): # Skip EmbeddedDocument
|
|
||||||
doc_cls._disconnect()
|
|
||||||
|
|
||||||
del _dbs[alias]
|
del _dbs[alias]
|
||||||
|
|
||||||
if alias in _connection_settings:
|
|
||||||
del _connection_settings[alias]
|
|
||||||
|
|
||||||
|
|
||||||
def disconnect_all():
|
|
||||||
"""Close all registered database."""
|
|
||||||
for alias in list(_connections.keys()):
|
|
||||||
disconnect(alias)
|
|
||||||
|
|
||||||
|
|
||||||
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||||
"""Return a connection with a given alias."""
|
global _connections
|
||||||
|
|
||||||
# Connect to the database if not already connected
|
# Connect to the database if not already connected
|
||||||
if reconnect:
|
if reconnect:
|
||||||
disconnect(alias)
|
disconnect(alias)
|
||||||
|
|
||||||
# If the requested alias already exists in the _connections list, return
|
if alias not in _connections:
|
||||||
# it immediately.
|
if alias not in _connection_settings:
|
||||||
if alias in _connections:
|
|
||||||
return _connections[alias]
|
|
||||||
|
|
||||||
# Validate that the requested alias exists in the _connection_settings.
|
|
||||||
# Raise ConnectionFailure if it doesn't.
|
|
||||||
if alias not in _connection_settings:
|
|
||||||
if alias == DEFAULT_CONNECTION_NAME:
|
|
||||||
msg = "You have not defined a default connection"
|
|
||||||
else:
|
|
||||||
msg = 'Connection with alias "%s" has not been defined' % alias
|
msg = 'Connection with alias "%s" has not been defined' % alias
|
||||||
raise ConnectionFailure(msg)
|
if alias == DEFAULT_CONNECTION_NAME:
|
||||||
|
msg = 'You have not defined a default connection'
|
||||||
|
raise ConnectionError(msg)
|
||||||
|
conn_settings = _connection_settings[alias].copy()
|
||||||
|
|
||||||
def _clean_settings(settings_dict):
|
if hasattr(pymongo, 'version_tuple'): # Support for 2.1+
|
||||||
irrelevant_fields_set = {
|
conn_settings.pop('name', None)
|
||||||
"name",
|
conn_settings.pop('slaves', None)
|
||||||
"username",
|
conn_settings.pop('is_slave', None)
|
||||||
"password",
|
conn_settings.pop('username', None)
|
||||||
"authentication_source",
|
conn_settings.pop('password', None)
|
||||||
"authentication_mechanism",
|
else:
|
||||||
}
|
# Get all the slave connections
|
||||||
return {
|
if 'slaves' in conn_settings:
|
||||||
k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set
|
slaves = []
|
||||||
}
|
for slave_alias in conn_settings['slaves']:
|
||||||
|
slaves.append(get_connection(slave_alias))
|
||||||
|
conn_settings['slaves'] = slaves
|
||||||
|
conn_settings.pop('read_preference', None)
|
||||||
|
|
||||||
raw_conn_settings = _connection_settings[alias].copy()
|
connection_class = Connection
|
||||||
|
if 'replicaSet' in conn_settings:
|
||||||
|
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||||
|
# Discard port since it can't be used on ReplicaSetConnection
|
||||||
|
conn_settings.pop('port', None)
|
||||||
|
# Discard replicaSet if not base string
|
||||||
|
if not isinstance(conn_settings['replicaSet'], basestring):
|
||||||
|
conn_settings.pop('replicaSet', None)
|
||||||
|
connection_class = ReplicaSetConnection
|
||||||
|
|
||||||
# Retrieve a copy of the connection settings associated with the requested
|
|
||||||
# alias and remove the database name and authentication info (we don't
|
|
||||||
# care about them at this point).
|
|
||||||
conn_settings = _clean_settings(raw_conn_settings)
|
|
||||||
|
|
||||||
# Determine if we should use PyMongo's or mongomock's MongoClient.
|
|
||||||
is_mock = conn_settings.pop("is_mock", False)
|
|
||||||
if is_mock:
|
|
||||||
try:
|
try:
|
||||||
import mongomock
|
_connections[alias] = connection_class(**conn_settings)
|
||||||
except ImportError:
|
except Exception, e:
|
||||||
raise RuntimeError("You need mongomock installed to mock MongoEngine.")
|
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
|
||||||
connection_class = mongomock.MongoClient
|
|
||||||
else:
|
|
||||||
connection_class = MongoClient
|
|
||||||
|
|
||||||
# Re-use existing connection if one is suitable.
|
|
||||||
existing_connection = _find_existing_connection(raw_conn_settings)
|
|
||||||
if existing_connection:
|
|
||||||
connection = existing_connection
|
|
||||||
else:
|
|
||||||
connection = _create_connection(
|
|
||||||
alias=alias, connection_class=connection_class, **conn_settings
|
|
||||||
)
|
|
||||||
_connections[alias] = connection
|
|
||||||
return _connections[alias]
|
return _connections[alias]
|
||||||
|
|
||||||
|
|
||||||
def _create_connection(alias, connection_class, **connection_settings):
|
|
||||||
"""
|
|
||||||
Create the new connection for this alias. Raise
|
|
||||||
ConnectionFailure if it can't be established.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return connection_class(**connection_settings)
|
|
||||||
except Exception as e:
|
|
||||||
raise ConnectionFailure(f"Cannot connect to database {alias} :\n{e}")
|
|
||||||
|
|
||||||
|
|
||||||
def _find_existing_connection(connection_settings):
|
|
||||||
"""
|
|
||||||
Check if an existing connection could be reused
|
|
||||||
|
|
||||||
Iterate over all of the connection settings and if an existing connection
|
|
||||||
with the same parameters is suitable, return it
|
|
||||||
|
|
||||||
:param connection_settings: the settings of the new connection
|
|
||||||
:return: An existing connection or None
|
|
||||||
"""
|
|
||||||
connection_settings_bis = (
|
|
||||||
(db_alias, settings.copy())
|
|
||||||
for db_alias, settings in _connection_settings.items()
|
|
||||||
)
|
|
||||||
|
|
||||||
def _clean_settings(settings_dict):
|
|
||||||
# Only remove the name but it's important to
|
|
||||||
# keep the username/password/authentication_source/authentication_mechanism
|
|
||||||
# to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047)
|
|
||||||
return {k: v for k, v in settings_dict.items() if k != "name"}
|
|
||||||
|
|
||||||
cleaned_conn_settings = _clean_settings(connection_settings)
|
|
||||||
for db_alias, connection_settings in connection_settings_bis:
|
|
||||||
db_conn_settings = _clean_settings(connection_settings)
|
|
||||||
if cleaned_conn_settings == db_conn_settings and _connections.get(db_alias):
|
|
||||||
return _connections[db_alias]
|
|
||||||
|
|
||||||
|
|
||||||
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||||
|
global _dbs
|
||||||
if reconnect:
|
if reconnect:
|
||||||
disconnect(alias)
|
disconnect(alias)
|
||||||
|
|
||||||
if alias not in _dbs:
|
if alias not in _dbs:
|
||||||
conn = get_connection(alias)
|
conn = get_connection(alias)
|
||||||
conn_settings = _connection_settings[alias]
|
conn_settings = _connection_settings[alias]
|
||||||
db = conn[conn_settings["name"]]
|
_dbs[alias] = conn[conn_settings['name']]
|
||||||
auth_kwargs = {"source": conn_settings["authentication_source"]}
|
|
||||||
if conn_settings["authentication_mechanism"] is not None:
|
|
||||||
auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"]
|
|
||||||
# Authenticate if necessary
|
# Authenticate if necessary
|
||||||
if conn_settings["username"] and (
|
if conn_settings['username'] and conn_settings['password']:
|
||||||
conn_settings["password"]
|
_dbs[alias].authenticate(conn_settings['username'],
|
||||||
or conn_settings["authentication_mechanism"] == "MONGODB-X509"
|
conn_settings['password'])
|
||||||
):
|
|
||||||
db.authenticate(
|
|
||||||
conn_settings["username"], conn_settings["password"], **auth_kwargs
|
|
||||||
)
|
|
||||||
_dbs[alias] = db
|
|
||||||
return _dbs[alias]
|
return _dbs[alias]
|
||||||
|
|
||||||
|
|
||||||
def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||||
"""Connect to the database specified by the 'db' argument.
|
"""Connect to the database specified by the 'db' argument.
|
||||||
|
|
||||||
Connection settings may be provided here as well if the database is not
|
Connection settings may be provided here as well if the database is not
|
||||||
running on the default port on localhost. If authentication is needed,
|
running on the default port on localhost. If authentication is needed,
|
||||||
provide username and password arguments as well.
|
provide username and password arguments as well.
|
||||||
|
|
||||||
Multiple databases are supported by using aliases. Provide a separate
|
Multiple databases are supported by using aliases. Provide a separate
|
||||||
`alias` to connect to a different instance of: program: `mongod`.
|
`alias` to connect to a different instance of :program:`mongod`.
|
||||||
|
|
||||||
In order to replace a connection identified by a given alias, you'll
|
.. versionchanged:: 0.6 - added multiple database support.
|
||||||
need to call ``disconnect`` first
|
|
||||||
|
|
||||||
See the docstring for `register_connection` for more details about all
|
|
||||||
supported kwargs.
|
|
||||||
"""
|
"""
|
||||||
if alias in _connections:
|
global _connections
|
||||||
prev_conn_setting = _connection_settings[alias]
|
if alias not in _connections:
|
||||||
new_conn_settings = _get_connection_settings(db, **kwargs)
|
|
||||||
|
|
||||||
if new_conn_settings != prev_conn_setting:
|
|
||||||
err_msg = (
|
|
||||||
"A different connection with alias `{}` was already "
|
|
||||||
"registered. Use disconnect() first"
|
|
||||||
).format(alias)
|
|
||||||
raise ConnectionFailure(err_msg)
|
|
||||||
else:
|
|
||||||
register_connection(alias, db, **kwargs)
|
register_connection(alias, db, **kwargs)
|
||||||
|
|
||||||
return get_connection(alias)
|
return get_connection(alias)
|
||||||
|
|
||||||
|
|
||||||
# Support old naming convention
|
# Support old naming convention
|
||||||
_get_connection = get_connection
|
_get_connection = get_connection
|
||||||
_get_db = get_db
|
_get_db = get_db
|
||||||
|
@ -1,291 +0,0 @@
|
|||||||
from contextlib import contextmanager
|
|
||||||
|
|
||||||
from pymongo.read_concern import ReadConcern
|
|
||||||
from pymongo.write_concern import WriteConcern
|
|
||||||
|
|
||||||
from mongoengine.common import _import_class
|
|
||||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
|
||||||
from mongoengine.pymongo_support import count_documents
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"switch_db",
|
|
||||||
"switch_collection",
|
|
||||||
"no_dereference",
|
|
||||||
"no_sub_classes",
|
|
||||||
"query_counter",
|
|
||||||
"set_write_concern",
|
|
||||||
"set_read_write_concern",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class switch_db:
|
|
||||||
"""switch_db alias context manager.
|
|
||||||
|
|
||||||
Example ::
|
|
||||||
|
|
||||||
# Register connections
|
|
||||||
register_connection('default', 'mongoenginetest')
|
|
||||||
register_connection('testdb-1', 'mongoenginetest2')
|
|
||||||
|
|
||||||
class Group(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Group(name='test').save() # Saves in the default db
|
|
||||||
|
|
||||||
with switch_db(Group, 'testdb-1') as Group:
|
|
||||||
Group(name='hello testdb!').save() # Saves in testdb-1
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cls, db_alias):
|
|
||||||
"""Construct the switch_db context manager
|
|
||||||
|
|
||||||
:param cls: the class to change the registered db
|
|
||||||
:param db_alias: the name of the specific database to use
|
|
||||||
"""
|
|
||||||
self.cls = cls
|
|
||||||
self.collection = cls._get_collection()
|
|
||||||
self.db_alias = db_alias
|
|
||||||
self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
"""Change the db_alias and clear the cached collection."""
|
|
||||||
self.cls._meta["db_alias"] = self.db_alias
|
|
||||||
self.cls._collection = None
|
|
||||||
return self.cls
|
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
|
||||||
"""Reset the db_alias and collection."""
|
|
||||||
self.cls._meta["db_alias"] = self.ori_db_alias
|
|
||||||
self.cls._collection = self.collection
|
|
||||||
|
|
||||||
|
|
||||||
class switch_collection:
|
|
||||||
"""switch_collection alias context manager.
|
|
||||||
|
|
||||||
Example ::
|
|
||||||
|
|
||||||
class Group(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Group(name='test').save() # Saves in the default db
|
|
||||||
|
|
||||||
with switch_collection(Group, 'group1') as Group:
|
|
||||||
Group(name='hello testdb!').save() # Saves in group1 collection
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cls, collection_name):
|
|
||||||
"""Construct the switch_collection context manager.
|
|
||||||
|
|
||||||
:param cls: the class to change the registered db
|
|
||||||
:param collection_name: the name of the collection to use
|
|
||||||
"""
|
|
||||||
self.cls = cls
|
|
||||||
self.ori_collection = cls._get_collection()
|
|
||||||
self.ori_get_collection_name = cls._get_collection_name
|
|
||||||
self.collection_name = collection_name
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
"""Change the _get_collection_name and clear the cached collection."""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_collection_name(cls):
|
|
||||||
return self.collection_name
|
|
||||||
|
|
||||||
self.cls._get_collection_name = _get_collection_name
|
|
||||||
self.cls._collection = None
|
|
||||||
return self.cls
|
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
|
||||||
"""Reset the collection."""
|
|
||||||
self.cls._collection = self.ori_collection
|
|
||||||
self.cls._get_collection_name = self.ori_get_collection_name
|
|
||||||
|
|
||||||
|
|
||||||
class no_dereference:
|
|
||||||
"""no_dereference context manager.
|
|
||||||
|
|
||||||
Turns off all dereferencing in Documents for the duration of the context
|
|
||||||
manager::
|
|
||||||
|
|
||||||
with no_dereference(Group) as Group:
|
|
||||||
Group.objects.find()
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cls):
|
|
||||||
"""Construct the no_dereference context manager.
|
|
||||||
|
|
||||||
:param cls: the class to turn dereferencing off on
|
|
||||||
"""
|
|
||||||
self.cls = cls
|
|
||||||
|
|
||||||
ReferenceField = _import_class("ReferenceField")
|
|
||||||
GenericReferenceField = _import_class("GenericReferenceField")
|
|
||||||
ComplexBaseField = _import_class("ComplexBaseField")
|
|
||||||
|
|
||||||
self.deref_fields = [
|
|
||||||
k
|
|
||||||
for k, v in self.cls._fields.items()
|
|
||||||
if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField))
|
|
||||||
]
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
"""Change the objects default and _auto_dereference values."""
|
|
||||||
for field in self.deref_fields:
|
|
||||||
self.cls._fields[field]._auto_dereference = False
|
|
||||||
return self.cls
|
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
|
||||||
"""Reset the default and _auto_dereference values."""
|
|
||||||
for field in self.deref_fields:
|
|
||||||
self.cls._fields[field]._auto_dereference = True
|
|
||||||
return self.cls
|
|
||||||
|
|
||||||
|
|
||||||
class no_sub_classes:
|
|
||||||
"""no_sub_classes context manager.
|
|
||||||
|
|
||||||
Only returns instances of this class and no sub (inherited) classes::
|
|
||||||
|
|
||||||
with no_sub_classes(Group) as Group:
|
|
||||||
Group.objects.find()
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cls):
|
|
||||||
"""Construct the no_sub_classes context manager.
|
|
||||||
|
|
||||||
:param cls: the class to turn querying sub classes on
|
|
||||||
"""
|
|
||||||
self.cls = cls
|
|
||||||
self.cls_initial_subclasses = None
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
"""Change the objects default and _auto_dereference values."""
|
|
||||||
self.cls_initial_subclasses = self.cls._subclasses
|
|
||||||
self.cls._subclasses = (self.cls._class_name,)
|
|
||||||
return self.cls
|
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
|
||||||
"""Reset the default and _auto_dereference values."""
|
|
||||||
self.cls._subclasses = self.cls_initial_subclasses
|
|
||||||
|
|
||||||
|
|
||||||
class query_counter:
|
|
||||||
"""Query_counter context manager to get the number of queries.
|
|
||||||
This works by updating the `profiling_level` of the database so that all queries get logged,
|
|
||||||
resetting the db.system.profile collection at the beginning of the context and counting the new entries.
|
|
||||||
|
|
||||||
This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes
|
|
||||||
can interfere with it
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
user = User(name='Bob')
|
|
||||||
assert q == 0 # no query fired yet
|
|
||||||
user.save()
|
|
||||||
assert q == 1 # 1 query was fired, an 'insert'
|
|
||||||
user_bis = User.objects().first()
|
|
||||||
assert q == 2 # a 2nd query was fired, a 'find_one'
|
|
||||||
|
|
||||||
Be aware that:
|
|
||||||
|
|
||||||
- Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches)
|
|
||||||
- Some queries are ignored by default by the counter (killcursors, db.system.indexes)
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, alias=DEFAULT_CONNECTION_NAME):
|
|
||||||
self.db = get_db(alias=alias)
|
|
||||||
self.initial_profiling_level = None
|
|
||||||
self._ctx_query_counter = 0 # number of queries issued by the context
|
|
||||||
|
|
||||||
self._ignored_query = {
|
|
||||||
"ns": {"$ne": "%s.system.indexes" % self.db.name},
|
|
||||||
"op": {"$ne": "killcursors"}, # MONGODB < 3.2
|
|
||||||
"command.killCursors": {"$exists": False}, # MONGODB >= 3.2
|
|
||||||
}
|
|
||||||
|
|
||||||
def _turn_on_profiling(self):
|
|
||||||
self.initial_profiling_level = self.db.profiling_level()
|
|
||||||
self.db.set_profiling_level(0)
|
|
||||||
self.db.system.profile.drop()
|
|
||||||
self.db.set_profiling_level(2)
|
|
||||||
|
|
||||||
def _resets_profiling(self):
|
|
||||||
self.db.set_profiling_level(self.initial_profiling_level)
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self._turn_on_profiling()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
|
||||||
self._resets_profiling()
|
|
||||||
|
|
||||||
def __eq__(self, value):
|
|
||||||
counter = self._get_count()
|
|
||||||
return value == counter
|
|
||||||
|
|
||||||
def __ne__(self, value):
|
|
||||||
return not self.__eq__(value)
|
|
||||||
|
|
||||||
def __lt__(self, value):
|
|
||||||
return self._get_count() < value
|
|
||||||
|
|
||||||
def __le__(self, value):
|
|
||||||
return self._get_count() <= value
|
|
||||||
|
|
||||||
def __gt__(self, value):
|
|
||||||
return self._get_count() > value
|
|
||||||
|
|
||||||
def __ge__(self, value):
|
|
||||||
return self._get_count() >= value
|
|
||||||
|
|
||||||
def __int__(self):
|
|
||||||
return self._get_count()
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
"""repr query_counter as the number of queries."""
|
|
||||||
return "%s" % self._get_count()
|
|
||||||
|
|
||||||
def _get_count(self):
|
|
||||||
"""Get the number of queries by counting the current number of entries in db.system.profile
|
|
||||||
and substracting the queries issued by this context. In fact everytime this is called, 1 query is
|
|
||||||
issued so we need to balance that
|
|
||||||
"""
|
|
||||||
count = (
|
|
||||||
count_documents(self.db.system.profile, self._ignored_query)
|
|
||||||
- self._ctx_query_counter
|
|
||||||
)
|
|
||||||
self._ctx_query_counter += (
|
|
||||||
1 # Account for the query we just issued to gather the information
|
|
||||||
)
|
|
||||||
return count
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def set_write_concern(collection, write_concerns):
|
|
||||||
combined_concerns = dict(collection.write_concern.document.items())
|
|
||||||
combined_concerns.update(write_concerns)
|
|
||||||
yield collection.with_options(write_concern=WriteConcern(**combined_concerns))
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def set_read_write_concern(collection, write_concerns, read_concerns):
|
|
||||||
combined_write_concerns = dict(collection.write_concern.document.items())
|
|
||||||
|
|
||||||
if write_concerns is not None:
|
|
||||||
combined_write_concerns.update(write_concerns)
|
|
||||||
|
|
||||||
combined_read_concerns = dict(collection.read_concern.document.items())
|
|
||||||
|
|
||||||
if read_concerns is not None:
|
|
||||||
combined_read_concerns.update(read_concerns)
|
|
||||||
|
|
||||||
yield collection.with_options(
|
|
||||||
write_concern=WriteConcern(**combined_write_concerns),
|
|
||||||
read_concern=ReadConcern(**combined_read_concerns),
|
|
||||||
)
|
|
@ -1,29 +1,18 @@
|
|||||||
from bson import SON, DBRef
|
from bson import DBRef, SON
|
||||||
|
|
||||||
from mongoengine.base import (
|
from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document)
|
||||||
BaseDict,
|
from fields import (ReferenceField, ListField, DictField, MapField)
|
||||||
BaseList,
|
from connection import get_db
|
||||||
EmbeddedDocumentList,
|
from queryset import QuerySet
|
||||||
TopLevelDocumentMetaclass,
|
from document import Document
|
||||||
get_document,
|
|
||||||
)
|
|
||||||
from mongoengine.base.datastructures import LazyReference
|
|
||||||
from mongoengine.connection import get_db
|
|
||||||
from mongoengine.document import Document, EmbeddedDocument
|
|
||||||
from mongoengine.fields import (
|
|
||||||
DictField,
|
|
||||||
ListField,
|
|
||||||
MapField,
|
|
||||||
ReferenceField,
|
|
||||||
)
|
|
||||||
from mongoengine.queryset import QuerySet
|
|
||||||
|
|
||||||
|
|
||||||
class DeReference:
|
class DeReference(object):
|
||||||
|
|
||||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||||
"""
|
"""
|
||||||
Cheaply dereferences the items to a set depth.
|
Cheaply dereferences the items to a set depth.
|
||||||
Also handles the conversion of complex data types.
|
Also handles the convertion of complex data types.
|
||||||
|
|
||||||
:param items: The iterable (dict, list, queryset) to be dereferenced.
|
:param items: The iterable (dict, list, queryset) to be dereferenced.
|
||||||
:param max_depth: The maximum depth to recurse to
|
:param max_depth: The maximum depth to recurse to
|
||||||
@ -33,7 +22,7 @@ class DeReference:
|
|||||||
:class:`~mongoengine.base.ComplexBaseField`
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
:param get: A boolean determining if being called by __get__
|
:param get: A boolean determining if being called by __get__
|
||||||
"""
|
"""
|
||||||
if items is None or isinstance(items, str):
|
if items is None or isinstance(items, basestring):
|
||||||
return items
|
return items
|
||||||
|
|
||||||
# cheapest way to convert a queryset to a list
|
# cheapest way to convert a queryset to a list
|
||||||
@ -42,61 +31,34 @@ class DeReference:
|
|||||||
items = [i for i in items]
|
items = [i for i in items]
|
||||||
|
|
||||||
self.max_depth = max_depth
|
self.max_depth = max_depth
|
||||||
doc_type = None
|
|
||||||
|
|
||||||
if instance and isinstance(
|
doc_type = None
|
||||||
instance, (Document, EmbeddedDocument, TopLevelDocumentMetaclass)
|
if instance and instance._fields:
|
||||||
):
|
doc_type = instance._fields[name]
|
||||||
doc_type = instance._fields.get(name)
|
if hasattr(doc_type, 'field'):
|
||||||
while hasattr(doc_type, "field"):
|
|
||||||
doc_type = doc_type.field
|
doc_type = doc_type.field
|
||||||
|
|
||||||
if isinstance(doc_type, ReferenceField):
|
if isinstance(doc_type, ReferenceField):
|
||||||
field = doc_type
|
field = doc_type
|
||||||
doc_type = doc_type.document_type
|
doc_type = doc_type.document_type
|
||||||
is_list = not hasattr(items, "items")
|
is_list = not hasattr(items, 'items')
|
||||||
|
|
||||||
if is_list and all(i.__class__ == doc_type for i in items):
|
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||||
return items
|
return items
|
||||||
elif not is_list and all(
|
elif not is_list and all([i.__class__ == doc_type
|
||||||
i.__class__ == doc_type for i in items.values()
|
for i in items.values()]):
|
||||||
):
|
|
||||||
return items
|
return items
|
||||||
elif not field.dbref:
|
elif not field.dbref:
|
||||||
# We must turn the ObjectIds into DBRefs
|
if not hasattr(items, 'items'):
|
||||||
|
items = [field.to_python(v)
|
||||||
# Recursively dig into the sub items of a list/dict
|
if not isinstance(v, (DBRef, Document)) else v
|
||||||
# to turn the ObjectIds into DBRefs
|
for v in items]
|
||||||
def _get_items_from_list(items):
|
|
||||||
new_items = []
|
|
||||||
for v in items:
|
|
||||||
value = v
|
|
||||||
if isinstance(v, dict):
|
|
||||||
value = _get_items_from_dict(v)
|
|
||||||
elif isinstance(v, list):
|
|
||||||
value = _get_items_from_list(v)
|
|
||||||
elif not isinstance(v, (DBRef, Document)):
|
|
||||||
value = field.to_python(v)
|
|
||||||
new_items.append(value)
|
|
||||||
return new_items
|
|
||||||
|
|
||||||
def _get_items_from_dict(items):
|
|
||||||
new_items = {}
|
|
||||||
for k, v in items.items():
|
|
||||||
value = v
|
|
||||||
if isinstance(v, list):
|
|
||||||
value = _get_items_from_list(v)
|
|
||||||
elif isinstance(v, dict):
|
|
||||||
value = _get_items_from_dict(v)
|
|
||||||
elif not isinstance(v, (DBRef, Document)):
|
|
||||||
value = field.to_python(v)
|
|
||||||
new_items[k] = value
|
|
||||||
return new_items
|
|
||||||
|
|
||||||
if not hasattr(items, "items"):
|
|
||||||
items = _get_items_from_list(items)
|
|
||||||
else:
|
else:
|
||||||
items = _get_items_from_dict(items)
|
items = dict([
|
||||||
|
(k, field.to_python(v))
|
||||||
|
if not isinstance(v, (DBRef, Document)) else (k, v)
|
||||||
|
for k, v in items.iteritems()]
|
||||||
|
)
|
||||||
|
|
||||||
self.reference_map = self._find_references(items)
|
self.reference_map = self._find_references(items)
|
||||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||||
@ -114,97 +76,68 @@ class DeReference:
|
|||||||
return reference_map
|
return reference_map
|
||||||
|
|
||||||
# Determine the iterator to use
|
# Determine the iterator to use
|
||||||
if isinstance(items, dict):
|
if not hasattr(items, 'items'):
|
||||||
iterator = items.values()
|
iterator = enumerate(items)
|
||||||
else:
|
else:
|
||||||
iterator = items
|
iterator = items.iteritems()
|
||||||
|
|
||||||
# Recursively find dbreferences
|
# Recursively find dbreferences
|
||||||
depth += 1
|
depth += 1
|
||||||
for item in iterator:
|
for k, item in iterator:
|
||||||
if isinstance(item, (Document, EmbeddedDocument)):
|
if hasattr(item, '_fields'):
|
||||||
for field_name, field in item._fields.items():
|
for field_name, field in item._fields.iteritems():
|
||||||
v = item._data.get(field_name, None)
|
v = item._data.get(field_name, None)
|
||||||
if isinstance(v, LazyReference):
|
if isinstance(v, (DBRef)):
|
||||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
reference_map.setdefault(field.document_type, []).append(v.id)
|
||||||
continue
|
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||||
elif isinstance(v, DBRef):
|
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
|
||||||
reference_map.setdefault(field.document_type, set()).add(v.id)
|
|
||||||
elif isinstance(v, (dict, SON)) and "_ref" in v:
|
|
||||||
reference_map.setdefault(get_document(v["_cls"]), set()).add(
|
|
||||||
v["_ref"].id
|
|
||||||
)
|
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
field_cls = getattr(
|
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||||
getattr(field, "field", None), "document_type", None
|
|
||||||
)
|
|
||||||
references = self._find_references(v, depth)
|
references = self._find_references(v, depth)
|
||||||
for key, refs in references.items():
|
for key, refs in references.iteritems():
|
||||||
if isinstance(
|
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||||
field_cls, (Document, TopLevelDocumentMetaclass)
|
|
||||||
):
|
|
||||||
key = field_cls
|
key = field_cls
|
||||||
reference_map.setdefault(key, set()).update(refs)
|
reference_map.setdefault(key, []).extend(refs)
|
||||||
elif isinstance(item, LazyReference):
|
elif isinstance(item, (DBRef)):
|
||||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
reference_map.setdefault(item.collection, []).append(item.id)
|
||||||
continue
|
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||||
elif isinstance(item, DBRef):
|
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
|
||||||
reference_map.setdefault(item.collection, set()).add(item.id)
|
|
||||||
elif isinstance(item, (dict, SON)) and "_ref" in item:
|
|
||||||
reference_map.setdefault(get_document(item["_cls"]), set()).add(
|
|
||||||
item["_ref"].id
|
|
||||||
)
|
|
||||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||||
references = self._find_references(item, depth - 1)
|
references = self._find_references(item, depth - 1)
|
||||||
for key, refs in references.items():
|
for key, refs in references.iteritems():
|
||||||
reference_map.setdefault(key, set()).update(refs)
|
reference_map.setdefault(key, []).extend(refs)
|
||||||
|
|
||||||
return reference_map
|
return reference_map
|
||||||
|
|
||||||
def _fetch_objects(self, doc_type=None):
|
def _fetch_objects(self, doc_type=None):
|
||||||
"""Fetch all references and convert to their document objects"""
|
"""Fetch all references and convert to their document objects
|
||||||
|
"""
|
||||||
object_map = {}
|
object_map = {}
|
||||||
for collection, dbrefs in self.reference_map.items():
|
for col, dbrefs in self.reference_map.iteritems():
|
||||||
|
keys = object_map.keys()
|
||||||
# we use getattr instead of hasattr because hasattr swallows any exception under python2
|
refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys]))
|
||||||
# so it could hide nasty things without raising exceptions (cfr bug #1688))
|
if hasattr(col, 'objects'): # We have a document class for the refs
|
||||||
ref_document_cls_exists = getattr(collection, "objects", None) is not None
|
references = col.objects.in_bulk(refs)
|
||||||
|
for key, doc in references.iteritems():
|
||||||
if ref_document_cls_exists:
|
object_map[key] = doc
|
||||||
col_name = collection._get_collection_name()
|
|
||||||
refs = [
|
|
||||||
dbref for dbref in dbrefs if (col_name, dbref) not in object_map
|
|
||||||
]
|
|
||||||
references = collection.objects.in_bulk(refs)
|
|
||||||
for key, doc in references.items():
|
|
||||||
object_map[(col_name, key)] = doc
|
|
||||||
else: # Generic reference: use the refs data to convert to document
|
else: # Generic reference: use the refs data to convert to document
|
||||||
if isinstance(doc_type, (ListField, DictField, MapField)):
|
if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ):
|
||||||
continue
|
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
|
||||||
|
|
||||||
refs = [
|
|
||||||
dbref for dbref in dbrefs if (collection, dbref) not in object_map
|
|
||||||
]
|
|
||||||
|
|
||||||
if doc_type:
|
|
||||||
references = doc_type._get_db()[collection].find(
|
|
||||||
{"_id": {"$in": refs}}
|
|
||||||
)
|
|
||||||
for ref in references:
|
for ref in references:
|
||||||
doc = doc_type._from_son(ref)
|
doc = doc_type._from_son(ref)
|
||||||
object_map[(collection, doc.id)] = doc
|
object_map[doc.id] = doc
|
||||||
else:
|
else:
|
||||||
references = get_db()[collection].find({"_id": {"$in": refs}})
|
references = get_db()[col].find({'_id': {'$in': refs}})
|
||||||
for ref in references:
|
for ref in references:
|
||||||
if "_cls" in ref:
|
if '_cls' in ref:
|
||||||
doc = get_document(ref["_cls"])._from_son(ref)
|
doc = get_document(ref["_cls"])._from_son(ref)
|
||||||
elif doc_type is None:
|
elif doc_type is None:
|
||||||
doc = get_document(
|
doc = get_document(
|
||||||
"".join(x.capitalize() for x in collection.split("_"))
|
''.join(x.capitalize()
|
||||||
)._from_son(ref)
|
for x in col.split('_')))._from_son(ref)
|
||||||
else:
|
else:
|
||||||
doc = doc_type._from_son(ref)
|
doc = doc_type._from_son(ref)
|
||||||
object_map[(collection, doc.id)] = doc
|
object_map[doc.id] = doc
|
||||||
return object_map
|
return object_map
|
||||||
|
|
||||||
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
||||||
@ -229,30 +162,20 @@ class DeReference:
|
|||||||
return BaseList(items, instance, name)
|
return BaseList(items, instance, name)
|
||||||
|
|
||||||
if isinstance(items, (dict, SON)):
|
if isinstance(items, (dict, SON)):
|
||||||
if "_ref" in items:
|
if '_ref' in items:
|
||||||
return self.object_map.get(
|
return self.object_map.get(items['_ref'].id, items)
|
||||||
(items["_ref"].collection, items["_ref"].id), items
|
elif '_types' in items and '_cls' in items:
|
||||||
)
|
doc = get_document(items['_cls'])._from_son(items)
|
||||||
elif "_cls" in items:
|
doc._data = self._attach_objects(doc._data, depth, doc, name)
|
||||||
doc = get_document(items["_cls"])._from_son(items)
|
|
||||||
_cls = doc._data.pop("_cls", None)
|
|
||||||
del items["_cls"]
|
|
||||||
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
|
||||||
if _cls is not None:
|
|
||||||
doc._data["_cls"] = _cls
|
|
||||||
return doc
|
return doc
|
||||||
|
|
||||||
if not hasattr(items, "items"):
|
if not hasattr(items, 'items'):
|
||||||
is_list = True
|
is_list = True
|
||||||
list_type = BaseList
|
|
||||||
if isinstance(items, EmbeddedDocumentList):
|
|
||||||
list_type = EmbeddedDocumentList
|
|
||||||
as_tuple = isinstance(items, tuple)
|
|
||||||
iterator = enumerate(items)
|
iterator = enumerate(items)
|
||||||
data = []
|
data = []
|
||||||
else:
|
else:
|
||||||
is_list = False
|
is_list = False
|
||||||
iterator = items.items()
|
iterator = items.iteritems()
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
depth += 1
|
depth += 1
|
||||||
@ -264,33 +187,25 @@ class DeReference:
|
|||||||
|
|
||||||
if k in self.object_map and not is_list:
|
if k in self.object_map and not is_list:
|
||||||
data[k] = self.object_map[k]
|
data[k] = self.object_map[k]
|
||||||
elif isinstance(v, (Document, EmbeddedDocument)):
|
elif hasattr(v, '_fields'):
|
||||||
for field_name in v._fields:
|
for field_name, field in v._fields.iteritems():
|
||||||
v = data[k]._data.get(field_name, None)
|
v = data[k]._data.get(field_name, None)
|
||||||
if isinstance(v, DBRef):
|
if isinstance(v, (DBRef)):
|
||||||
data[k]._data[field_name] = self.object_map.get(
|
data[k]._data[field_name] = self.object_map.get(v.id, v)
|
||||||
(v.collection, v.id), v
|
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||||
)
|
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
|
||||||
elif isinstance(v, (dict, SON)) and "_ref" in v:
|
elif isinstance(v, dict) and depth <= self.max_depth:
|
||||||
data[k]._data[field_name] = self.object_map.get(
|
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||||
(v["_ref"].collection, v["_ref"].id), v
|
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
|
||||||
)
|
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
|
||||||
item_name = f"{name}.{k}.{field_name}"
|
|
||||||
data[k]._data[field_name] = self._attach_objects(
|
|
||||||
v, depth, instance=instance, name=item_name
|
|
||||||
)
|
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
item_name = f"{name}.{k}" if name else name
|
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
|
||||||
data[k] = self._attach_objects(
|
elif hasattr(v, 'id'):
|
||||||
v, depth - 1, instance=instance, name=item_name
|
data[k] = self.object_map.get(v.id, v)
|
||||||
)
|
|
||||||
elif isinstance(v, DBRef) and hasattr(v, "id"):
|
|
||||||
data[k] = self.object_map.get((v.collection, v.id), v)
|
|
||||||
|
|
||||||
if instance and name:
|
if instance and name:
|
||||||
if is_list:
|
if is_list:
|
||||||
return tuple(data) if as_tuple else list_type(data, instance, name)
|
return BaseList(data, instance, name)
|
||||||
return BaseDict(data, instance, name)
|
return BaseDict(data, instance, name)
|
||||||
depth += 1
|
depth += 1
|
||||||
return data
|
return data
|
||||||
|
159
mongoengine/django/auth.py
Normal file
159
mongoengine/django/auth.py
Normal file
@ -0,0 +1,159 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
from django.utils.encoding import smart_str
|
||||||
|
from django.contrib.auth.models import AnonymousUser
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
try:
|
||||||
|
from django.contrib.auth.hashers import check_password, make_password
|
||||||
|
except ImportError:
|
||||||
|
"""Handle older versions of Django"""
|
||||||
|
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||||
|
|
||||||
|
def get_hexdigest(algorithm, salt, raw_password):
|
||||||
|
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||||
|
if algorithm == 'md5':
|
||||||
|
return md5_constructor(salt + raw_password).hexdigest()
|
||||||
|
elif algorithm == 'sha1':
|
||||||
|
return sha_constructor(salt + raw_password).hexdigest()
|
||||||
|
raise ValueError('Got unknown password algorithm type in password')
|
||||||
|
|
||||||
|
def check_password(raw_password, password):
|
||||||
|
algo, salt, hash = password.split('$')
|
||||||
|
return hash == get_hexdigest(algo, salt, raw_password)
|
||||||
|
|
||||||
|
def make_password(raw_password):
|
||||||
|
from random import random
|
||||||
|
algo = 'sha1'
|
||||||
|
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||||
|
hash = get_hexdigest(algo, salt, raw_password)
|
||||||
|
return '%s$%s$%s' % (algo, salt, hash)
|
||||||
|
|
||||||
|
|
||||||
|
REDIRECT_FIELD_NAME = 'next'
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
"""A User document that aims to mirror most of the API specified by Django
|
||||||
|
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||||
|
"""
|
||||||
|
username = StringField(max_length=30, required=True,
|
||||||
|
verbose_name=_('username'),
|
||||||
|
help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
|
||||||
|
|
||||||
|
first_name = StringField(max_length=30,
|
||||||
|
verbose_name=_('first name'))
|
||||||
|
|
||||||
|
last_name = StringField(max_length=30,
|
||||||
|
verbose_name=_('last name'))
|
||||||
|
email = EmailField(verbose_name=_('e-mail address'))
|
||||||
|
password = StringField(max_length=128,
|
||||||
|
verbose_name=_('password'),
|
||||||
|
help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
||||||
|
is_staff = BooleanField(default=False,
|
||||||
|
verbose_name=_('staff status'),
|
||||||
|
help_text=_("Designates whether the user can log into this admin site."))
|
||||||
|
is_active = BooleanField(default=True,
|
||||||
|
verbose_name=_('active'),
|
||||||
|
help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
|
||||||
|
is_superuser = BooleanField(default=False,
|
||||||
|
verbose_name=_('superuser status'),
|
||||||
|
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
|
||||||
|
last_login = DateTimeField(default=datetime.datetime.now,
|
||||||
|
verbose_name=_('last login'))
|
||||||
|
date_joined = DateTimeField(default=datetime.datetime.now,
|
||||||
|
verbose_name=_('date joined'))
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': [
|
||||||
|
{'fields': ['username'], 'unique': True}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.username
|
||||||
|
|
||||||
|
def get_full_name(self):
|
||||||
|
"""Returns the users first and last names, separated by a space.
|
||||||
|
"""
|
||||||
|
full_name = u'%s %s' % (self.first_name or '', self.last_name or '')
|
||||||
|
return full_name.strip()
|
||||||
|
|
||||||
|
def is_anonymous(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def is_authenticated(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def set_password(self, raw_password):
|
||||||
|
"""Sets the user's password - always use this rather than directly
|
||||||
|
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
||||||
|
password is hashed before storage.
|
||||||
|
"""
|
||||||
|
self.password = make_password(raw_password)
|
||||||
|
self.save()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def check_password(self, raw_password):
|
||||||
|
"""Checks the user's password against a provided password - always use
|
||||||
|
this rather than directly comparing to
|
||||||
|
:attr:`~mongoengine.django.auth.User.password` as the password is
|
||||||
|
hashed before storage.
|
||||||
|
"""
|
||||||
|
return check_password(raw_password, self.password)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_user(cls, username, password, email=None):
|
||||||
|
"""Create (and save) a new user with the given username, password and
|
||||||
|
email address.
|
||||||
|
"""
|
||||||
|
now = datetime.datetime.now()
|
||||||
|
|
||||||
|
# Normalize the address by lowercasing the domain part of the email
|
||||||
|
# address.
|
||||||
|
if email is not None:
|
||||||
|
try:
|
||||||
|
email_name, domain_part = email.strip().split('@', 1)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
email = '@'.join([email_name, domain_part.lower()])
|
||||||
|
|
||||||
|
user = cls(username=username, email=email, date_joined=now)
|
||||||
|
user.set_password(password)
|
||||||
|
user.save()
|
||||||
|
return user
|
||||||
|
|
||||||
|
def get_and_delete_messages(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
class MongoEngineBackend(object):
|
||||||
|
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
||||||
|
"""
|
||||||
|
|
||||||
|
supports_object_permissions = False
|
||||||
|
supports_anonymous_user = False
|
||||||
|
supports_inactive_user = False
|
||||||
|
|
||||||
|
def authenticate(self, username=None, password=None):
|
||||||
|
user = User.objects(username=username).first()
|
||||||
|
if user:
|
||||||
|
if password and user.check_password(password):
|
||||||
|
return user
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_user(self, user_id):
|
||||||
|
return User.objects.with_id(user_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_user(userid):
|
||||||
|
"""Returns a User object from an id (User.id). Django's equivalent takes
|
||||||
|
request, but taking an id instead leaves it up to the developer to store
|
||||||
|
the id in any way they want (session, signed cookie, etc.)
|
||||||
|
"""
|
||||||
|
if not userid:
|
||||||
|
return AnonymousUser()
|
||||||
|
return MongoEngineBackend().get_user(userid) or AnonymousUser()
|
74
mongoengine/django/sessions.py
Normal file
74
mongoengine/django/sessions.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
||||||
|
from django.core.exceptions import SuspiciousOperation
|
||||||
|
from django.utils.encoding import force_unicode
|
||||||
|
|
||||||
|
from mongoengine.document import Document
|
||||||
|
from mongoengine import fields
|
||||||
|
from mongoengine.queryset import OperationError
|
||||||
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
||||||
|
|
||||||
|
|
||||||
|
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
||||||
|
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
||||||
|
DEFAULT_CONNECTION_NAME)
|
||||||
|
|
||||||
|
|
||||||
|
class MongoSession(Document):
|
||||||
|
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||||
|
session_data = fields.StringField()
|
||||||
|
expire_date = fields.DateTimeField()
|
||||||
|
|
||||||
|
meta = {'collection': 'django_session',
|
||||||
|
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
||||||
|
'allow_inheritance': False}
|
||||||
|
|
||||||
|
|
||||||
|
class SessionStore(SessionBase):
|
||||||
|
"""A MongoEngine-based session store for Django.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
try:
|
||||||
|
s = MongoSession.objects(session_key=self.session_key,
|
||||||
|
expire_date__gt=datetime.now())[0]
|
||||||
|
return self.decode(force_unicode(s.session_data))
|
||||||
|
except (IndexError, SuspiciousOperation):
|
||||||
|
self.create()
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def exists(self, session_key):
|
||||||
|
return bool(MongoSession.objects(session_key=session_key).first())
|
||||||
|
|
||||||
|
def create(self):
|
||||||
|
while True:
|
||||||
|
self._session_key = self._get_new_session_key()
|
||||||
|
try:
|
||||||
|
self.save(must_create=True)
|
||||||
|
except CreateError:
|
||||||
|
continue
|
||||||
|
self.modified = True
|
||||||
|
self._session_cache = {}
|
||||||
|
return
|
||||||
|
|
||||||
|
def save(self, must_create=False):
|
||||||
|
if self.session_key is None:
|
||||||
|
self._session_key = self._get_new_session_key()
|
||||||
|
s = MongoSession(session_key=self.session_key)
|
||||||
|
s.session_data = self.encode(self._get_session(no_load=must_create))
|
||||||
|
s.expire_date = self.get_expiry_date()
|
||||||
|
try:
|
||||||
|
s.save(force_insert=must_create, safe=True)
|
||||||
|
except OperationError:
|
||||||
|
if must_create:
|
||||||
|
raise CreateError
|
||||||
|
raise
|
||||||
|
|
||||||
|
def delete(self, session_key=None):
|
||||||
|
if session_key is None:
|
||||||
|
if self.session_key is None:
|
||||||
|
return
|
||||||
|
session_key = self.session_key
|
||||||
|
MongoSession.objects(session_key=session_key).delete()
|
47
mongoengine/django/shortcuts.py
Normal file
47
mongoengine/django/shortcuts.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
from mongoengine.queryset import QuerySet
|
||||||
|
from mongoengine.base import BaseDocument
|
||||||
|
from mongoengine.base import ValidationError
|
||||||
|
|
||||||
|
def _get_queryset(cls):
|
||||||
|
"""Inspired by django.shortcuts.*"""
|
||||||
|
if isinstance(cls, QuerySet):
|
||||||
|
return cls
|
||||||
|
else:
|
||||||
|
return cls.objects
|
||||||
|
|
||||||
|
def get_document_or_404(cls, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Uses get() to return an document, or raises a Http404 exception if the document
|
||||||
|
does not exist.
|
||||||
|
|
||||||
|
cls may be a Document or QuerySet object. All other passed
|
||||||
|
arguments and keyword arguments are used in the get() query.
|
||||||
|
|
||||||
|
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
|
||||||
|
object is found.
|
||||||
|
|
||||||
|
Inspired by django.shortcuts.*
|
||||||
|
"""
|
||||||
|
queryset = _get_queryset(cls)
|
||||||
|
try:
|
||||||
|
return queryset.get(*args, **kwargs)
|
||||||
|
except (queryset._document.DoesNotExist, ValidationError):
|
||||||
|
from django.http import Http404
|
||||||
|
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||||
|
|
||||||
|
def get_list_or_404(cls, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Uses filter() to return a list of documents, or raise a Http404 exception if
|
||||||
|
the list is empty.
|
||||||
|
|
||||||
|
cls may be a Document or QuerySet object. All other passed
|
||||||
|
arguments and keyword arguments are used in the filter() query.
|
||||||
|
|
||||||
|
Inspired by django.shortcuts.*
|
||||||
|
"""
|
||||||
|
queryset = _get_queryset(cls)
|
||||||
|
obj_list = list(queryset.filter(*args, **kwargs))
|
||||||
|
if not obj_list:
|
||||||
|
from django.http import Http404
|
||||||
|
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||||
|
return obj_list
|
112
mongoengine/django/storage.py
Normal file
112
mongoengine/django/storage.py
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
import os
|
||||||
|
import itertools
|
||||||
|
import urlparse
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.files.storage import Storage
|
||||||
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
|
|
||||||
|
|
||||||
|
class FileDocument(Document):
|
||||||
|
"""A document used to store a single file in GridFS.
|
||||||
|
"""
|
||||||
|
file = FileField()
|
||||||
|
|
||||||
|
|
||||||
|
class GridFSStorage(Storage):
|
||||||
|
"""A custom storage backend to store files in GridFS
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, base_url=None):
|
||||||
|
|
||||||
|
if base_url is None:
|
||||||
|
base_url = settings.MEDIA_URL
|
||||||
|
self.base_url = base_url
|
||||||
|
self.document = FileDocument
|
||||||
|
self.field = 'file'
|
||||||
|
|
||||||
|
def delete(self, name):
|
||||||
|
"""Deletes the specified file from the storage system.
|
||||||
|
"""
|
||||||
|
if self.exists(name):
|
||||||
|
doc = self.document.objects.first()
|
||||||
|
field = getattr(doc, self.field)
|
||||||
|
self._get_doc_with_name(name).delete() # Delete the FileField
|
||||||
|
field.delete() # Delete the FileDocument
|
||||||
|
|
||||||
|
def exists(self, name):
|
||||||
|
"""Returns True if a file referened by the given name already exists in the
|
||||||
|
storage system, or False if the name is available for a new file.
|
||||||
|
"""
|
||||||
|
doc = self._get_doc_with_name(name)
|
||||||
|
if doc:
|
||||||
|
field = getattr(doc, self.field)
|
||||||
|
return bool(field.name)
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def listdir(self, path=None):
|
||||||
|
"""Lists the contents of the specified path, returning a 2-tuple of lists;
|
||||||
|
the first item being directories, the second item being files.
|
||||||
|
"""
|
||||||
|
def name(doc):
|
||||||
|
return getattr(doc, self.field).name
|
||||||
|
docs = self.document.objects
|
||||||
|
return [], [name(d) for d in docs if name(d)]
|
||||||
|
|
||||||
|
def size(self, name):
|
||||||
|
"""Returns the total size, in bytes, of the file specified by name.
|
||||||
|
"""
|
||||||
|
doc = self._get_doc_with_name(name)
|
||||||
|
if doc:
|
||||||
|
return getattr(doc, self.field).length
|
||||||
|
else:
|
||||||
|
raise ValueError("No such file or directory: '%s'" % name)
|
||||||
|
|
||||||
|
def url(self, name):
|
||||||
|
"""Returns an absolute URL where the file's contents can be accessed
|
||||||
|
directly by a web browser.
|
||||||
|
"""
|
||||||
|
if self.base_url is None:
|
||||||
|
raise ValueError("This file is not accessible via a URL.")
|
||||||
|
return urlparse.urljoin(self.base_url, name).replace('\\', '/')
|
||||||
|
|
||||||
|
def _get_doc_with_name(self, name):
|
||||||
|
"""Find the documents in the store with the given name
|
||||||
|
"""
|
||||||
|
docs = self.document.objects
|
||||||
|
doc = [d for d in docs if getattr(d, self.field).name == name]
|
||||||
|
if doc:
|
||||||
|
return doc[0]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _open(self, name, mode='rb'):
|
||||||
|
doc = self._get_doc_with_name(name)
|
||||||
|
if doc:
|
||||||
|
return getattr(doc, self.field)
|
||||||
|
else:
|
||||||
|
raise ValueError("No file found with the name '%s'." % name)
|
||||||
|
|
||||||
|
def get_available_name(self, name):
|
||||||
|
"""Returns a filename that's free on the target storage system, and
|
||||||
|
available for new content to be written to.
|
||||||
|
"""
|
||||||
|
file_root, file_ext = os.path.splitext(name)
|
||||||
|
# If the filename already exists, add an underscore and a number (before
|
||||||
|
# the file extension, if one exists) to the filename until the generated
|
||||||
|
# filename doesn't exist.
|
||||||
|
count = itertools.count(1)
|
||||||
|
while self.exists(name):
|
||||||
|
# file_ext includes the dot.
|
||||||
|
name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext))
|
||||||
|
|
||||||
|
return name
|
||||||
|
|
||||||
|
def _save(self, name, content):
|
||||||
|
doc = self.document()
|
||||||
|
getattr(doc, self.field).put(content, filename=name)
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
return name
|
39
mongoengine/django/tests.py
Normal file
39
mongoengine/django/tests.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
#coding: utf-8
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
|
||||||
|
from mongoengine.python_support import PY3
|
||||||
|
from mongoengine import connect
|
||||||
|
|
||||||
|
try:
|
||||||
|
from django.test import TestCase
|
||||||
|
from django.conf import settings
|
||||||
|
except Exception as err:
|
||||||
|
if PY3:
|
||||||
|
from unittest import TestCase
|
||||||
|
# Dummy value so no error
|
||||||
|
class settings:
|
||||||
|
MONGO_DATABASE_NAME = 'dummy'
|
||||||
|
else:
|
||||||
|
raise err
|
||||||
|
|
||||||
|
|
||||||
|
class MongoTestCase(TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('django does not have Python 3 support')
|
||||||
|
|
||||||
|
"""
|
||||||
|
TestCase class that clear the collection between the tests
|
||||||
|
"""
|
||||||
|
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
||||||
|
def __init__(self, methodName='runtest'):
|
||||||
|
self.db = connect(self.db_name).get_db()
|
||||||
|
super(MongoTestCase, self).__init__(methodName)
|
||||||
|
|
||||||
|
def _post_teardown(self):
|
||||||
|
super(MongoTestCase, self)._post_teardown()
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if collection == 'system.indexes':
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
File diff suppressed because it is too large
Load Diff
@ -1,165 +0,0 @@
|
|||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"NotRegistered",
|
|
||||||
"InvalidDocumentError",
|
|
||||||
"LookUpError",
|
|
||||||
"DoesNotExist",
|
|
||||||
"MultipleObjectsReturned",
|
|
||||||
"InvalidQueryError",
|
|
||||||
"OperationError",
|
|
||||||
"NotUniqueError",
|
|
||||||
"BulkWriteError",
|
|
||||||
"FieldDoesNotExist",
|
|
||||||
"ValidationError",
|
|
||||||
"SaveConditionError",
|
|
||||||
"DeprecatedError",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MongoEngineException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NotRegistered(MongoEngineException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidDocumentError(MongoEngineException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class LookUpError(AttributeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DoesNotExist(MongoEngineException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class MultipleObjectsReturned(MongoEngineException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidQueryError(MongoEngineException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class OperationError(MongoEngineException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NotUniqueError(OperationError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BulkWriteError(OperationError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SaveConditionError(OperationError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class FieldDoesNotExist(MongoEngineException):
|
|
||||||
"""Raised when trying to set a field
|
|
||||||
not declared in a :class:`~mongoengine.Document`
|
|
||||||
or an :class:`~mongoengine.EmbeddedDocument`.
|
|
||||||
|
|
||||||
To avoid this behavior on data loading,
|
|
||||||
you should set the :attr:`strict` to ``False``
|
|
||||||
in the :attr:`meta` dictionary.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class ValidationError(AssertionError):
|
|
||||||
"""Validation exception.
|
|
||||||
|
|
||||||
May represent an error validating a field or a
|
|
||||||
document containing fields with validation errors.
|
|
||||||
|
|
||||||
:ivar errors: A dictionary of errors for fields within this
|
|
||||||
document or list, or None if the error is for an
|
|
||||||
individual field.
|
|
||||||
"""
|
|
||||||
|
|
||||||
errors = {}
|
|
||||||
field_name = None
|
|
||||||
_message = None
|
|
||||||
|
|
||||||
def __init__(self, message="", **kwargs):
|
|
||||||
super().__init__(message)
|
|
||||||
self.errors = kwargs.get("errors", {})
|
|
||||||
self.field_name = kwargs.get("field_name")
|
|
||||||
self.message = message
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return str(self.message)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"{self.__class__.__name__}({self.message},)"
|
|
||||||
|
|
||||||
def __getattribute__(self, name):
|
|
||||||
message = super().__getattribute__(name)
|
|
||||||
if name == "message":
|
|
||||||
if self.field_name:
|
|
||||||
message = "%s" % message
|
|
||||||
if self.errors:
|
|
||||||
message = f"{message}({self._format_errors()})"
|
|
||||||
return message
|
|
||||||
|
|
||||||
def _get_message(self):
|
|
||||||
return self._message
|
|
||||||
|
|
||||||
def _set_message(self, message):
|
|
||||||
self._message = message
|
|
||||||
|
|
||||||
message = property(_get_message, _set_message)
|
|
||||||
|
|
||||||
def to_dict(self):
|
|
||||||
"""Returns a dictionary of all errors within a document
|
|
||||||
|
|
||||||
Keys are field names or list indices and values are the
|
|
||||||
validation error messages, or a nested dictionary of
|
|
||||||
errors for an embedded document or list.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def build_dict(source):
|
|
||||||
errors_dict = {}
|
|
||||||
if isinstance(source, dict):
|
|
||||||
for field_name, error in source.items():
|
|
||||||
errors_dict[field_name] = build_dict(error)
|
|
||||||
elif isinstance(source, ValidationError) and source.errors:
|
|
||||||
return build_dict(source.errors)
|
|
||||||
else:
|
|
||||||
return str(source)
|
|
||||||
|
|
||||||
return errors_dict
|
|
||||||
|
|
||||||
if not self.errors:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
return build_dict(self.errors)
|
|
||||||
|
|
||||||
def _format_errors(self):
|
|
||||||
"""Returns a string listing all errors within a document"""
|
|
||||||
|
|
||||||
def generate_key(value, prefix=""):
|
|
||||||
if isinstance(value, list):
|
|
||||||
value = " ".join([generate_key(k) for k in value])
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
value = " ".join([generate_key(v, k) for k, v in value.items()])
|
|
||||||
|
|
||||||
results = f"{prefix}.{value}" if prefix else value
|
|
||||||
return results
|
|
||||||
|
|
||||||
error_dict = defaultdict(list)
|
|
||||||
for k, v in self.to_dict().items():
|
|
||||||
error_dict[generate_key(v)].append(k)
|
|
||||||
return " ".join([f"{k}: {v}" for k, v in error_dict.items()])
|
|
||||||
|
|
||||||
|
|
||||||
class DeprecatedError(MongoEngineException):
|
|
||||||
"""Raise when a user uses a feature that has been Deprecated"""
|
|
||||||
|
|
||||||
pass
|
|
File diff suppressed because it is too large
Load Diff
@ -1,20 +0,0 @@
|
|||||||
"""
|
|
||||||
Helper functions, constants, and types to aid with MongoDB version support
|
|
||||||
"""
|
|
||||||
from mongoengine.connection import get_connection
|
|
||||||
|
|
||||||
# Constant that can be used to compare the version retrieved with
|
|
||||||
# get_mongodb_version()
|
|
||||||
MONGODB_34 = (3, 4)
|
|
||||||
MONGODB_36 = (3, 6)
|
|
||||||
MONGODB_42 = (4, 2)
|
|
||||||
MONGODB_44 = (4, 4)
|
|
||||||
|
|
||||||
|
|
||||||
def get_mongodb_version():
|
|
||||||
"""Return the version of the default connected mongoDB (first 2 digits)
|
|
||||||
|
|
||||||
:return: tuple(int, int)
|
|
||||||
"""
|
|
||||||
version_list = get_connection().server_info()["versionArray"][:2] # e.g: (3, 2)
|
|
||||||
return tuple(version_list)
|
|
@ -1,60 +0,0 @@
|
|||||||
"""
|
|
||||||
Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support.
|
|
||||||
"""
|
|
||||||
import pymongo
|
|
||||||
from pymongo.errors import OperationFailure
|
|
||||||
|
|
||||||
_PYMONGO_37 = (3, 7)
|
|
||||||
|
|
||||||
PYMONGO_VERSION = tuple(pymongo.version_tuple[:2])
|
|
||||||
|
|
||||||
IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37
|
|
||||||
|
|
||||||
|
|
||||||
def count_documents(
|
|
||||||
collection, filter, skip=None, limit=None, hint=None, collation=None
|
|
||||||
):
|
|
||||||
"""Pymongo>3.7 deprecates count in favour of count_documents"""
|
|
||||||
if limit == 0:
|
|
||||||
return 0 # Pymongo raises an OperationFailure if called with limit=0
|
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
if skip is not None:
|
|
||||||
kwargs["skip"] = skip
|
|
||||||
if limit is not None:
|
|
||||||
kwargs["limit"] = limit
|
|
||||||
if hint not in (-1, None):
|
|
||||||
kwargs["hint"] = hint
|
|
||||||
if collation is not None:
|
|
||||||
kwargs["collation"] = collation
|
|
||||||
|
|
||||||
# count_documents appeared in pymongo 3.7
|
|
||||||
if IS_PYMONGO_GTE_37:
|
|
||||||
try:
|
|
||||||
return collection.count_documents(filter=filter, **kwargs)
|
|
||||||
except OperationFailure:
|
|
||||||
# OperationFailure - accounts for some operators that used to work
|
|
||||||
# with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere)
|
|
||||||
# fallback to deprecated Cursor.count
|
|
||||||
# Keeping this should be reevaluated the day pymongo removes .count entirely
|
|
||||||
pass
|
|
||||||
|
|
||||||
cursor = collection.find(filter)
|
|
||||||
for option, option_value in kwargs.items():
|
|
||||||
cursor_method = getattr(cursor, option)
|
|
||||||
cursor = cursor_method(option_value)
|
|
||||||
with_limit_and_skip = "skip" in kwargs or "limit" in kwargs
|
|
||||||
return cursor.count(with_limit_and_skip=with_limit_and_skip)
|
|
||||||
|
|
||||||
|
|
||||||
def list_collection_names(db, include_system_collections=False):
|
|
||||||
"""Pymongo>3.7 deprecates collection_names in favour of list_collection_names"""
|
|
||||||
if IS_PYMONGO_GTE_37:
|
|
||||||
collections = db.list_collection_names()
|
|
||||||
else:
|
|
||||||
collections = db.collection_names()
|
|
||||||
|
|
||||||
if not include_system_collections:
|
|
||||||
collections = [c for c in collections if not c.startswith("system.")]
|
|
||||||
|
|
||||||
return collections
|
|
60
mongoengine/python_support.py
Normal file
60
mongoengine/python_support.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
"""Helper functions and types to aid with Python 2.5 - 3 support."""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
PY25 = sys.version_info[:2] == (2, 5)
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
import codecs
|
||||||
|
from io import BytesIO as StringIO
|
||||||
|
# return s converted to binary. b('test') should be equivalent to b'test'
|
||||||
|
def b(s):
|
||||||
|
return codecs.latin_1_encode(s)[0]
|
||||||
|
|
||||||
|
bin_type = bytes
|
||||||
|
txt_type = str
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
from cStringIO import StringIO
|
||||||
|
except ImportError:
|
||||||
|
from StringIO import StringIO
|
||||||
|
|
||||||
|
# Conversion to binary only necessary in Python 3
|
||||||
|
def b(s):
|
||||||
|
return s
|
||||||
|
|
||||||
|
bin_type = str
|
||||||
|
txt_type = unicode
|
||||||
|
|
||||||
|
str_types = (bin_type, txt_type)
|
||||||
|
|
||||||
|
if PY25:
|
||||||
|
def product(*args, **kwds):
|
||||||
|
pools = map(tuple, args) * kwds.get('repeat', 1)
|
||||||
|
result = [[]]
|
||||||
|
for pool in pools:
|
||||||
|
result = [x + [y] for x in result for y in pool]
|
||||||
|
for prod in result:
|
||||||
|
yield tuple(prod)
|
||||||
|
reduce = reduce
|
||||||
|
else:
|
||||||
|
from itertools import product
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
|
||||||
|
# For use with Python 2.5
|
||||||
|
# converts all keys from unicode to str for d and all nested dictionaries
|
||||||
|
def to_str_keys_recursive(d):
|
||||||
|
if isinstance(d, list):
|
||||||
|
for val in d:
|
||||||
|
if isinstance(val, (dict, list)):
|
||||||
|
to_str_keys_recursive(val)
|
||||||
|
elif isinstance(d, dict):
|
||||||
|
for key, val in d.items():
|
||||||
|
if isinstance(val, (dict, list)):
|
||||||
|
to_str_keys_recursive(val)
|
||||||
|
if isinstance(key, unicode):
|
||||||
|
d[str(key)] = d.pop(key)
|
||||||
|
else:
|
||||||
|
raise ValueError("non list/dict parameter not allowed")
|
1960
mongoengine/queryset.py
Normal file
1960
mongoengine/queryset.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,28 +0,0 @@
|
|||||||
from mongoengine.errors import *
|
|
||||||
from mongoengine.queryset.field_list import *
|
|
||||||
from mongoengine.queryset.manager import *
|
|
||||||
from mongoengine.queryset.queryset import *
|
|
||||||
from mongoengine.queryset.transform import *
|
|
||||||
from mongoengine.queryset.visitor import *
|
|
||||||
|
|
||||||
# Expose just the public subset of all imported objects and constants.
|
|
||||||
__all__ = (
|
|
||||||
"QuerySet",
|
|
||||||
"QuerySetNoCache",
|
|
||||||
"Q",
|
|
||||||
"queryset_manager",
|
|
||||||
"QuerySetManager",
|
|
||||||
"QueryFieldList",
|
|
||||||
"DO_NOTHING",
|
|
||||||
"NULLIFY",
|
|
||||||
"CASCADE",
|
|
||||||
"DENY",
|
|
||||||
"PULL",
|
|
||||||
# Errors that might be related to a queryset, mostly here for backward
|
|
||||||
# compatibility
|
|
||||||
"DoesNotExist",
|
|
||||||
"InvalidQueryError",
|
|
||||||
"MultipleObjectsReturned",
|
|
||||||
"NotUniqueError",
|
|
||||||
"OperationError",
|
|
||||||
)
|
|
File diff suppressed because it is too large
Load Diff
@ -1,88 +0,0 @@
|
|||||||
__all__ = ("QueryFieldList",)
|
|
||||||
|
|
||||||
|
|
||||||
class QueryFieldList:
|
|
||||||
"""Object that handles combinations of .only() and .exclude() calls"""
|
|
||||||
|
|
||||||
ONLY = 1
|
|
||||||
EXCLUDE = 0
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, fields=None, value=ONLY, always_include=None, _only_called=False
|
|
||||||
):
|
|
||||||
"""The QueryFieldList builder
|
|
||||||
|
|
||||||
:param fields: A list of fields used in `.only()` or `.exclude()`
|
|
||||||
:param value: How to handle the fields; either `ONLY` or `EXCLUDE`
|
|
||||||
:param always_include: Any fields to always_include eg `_cls`
|
|
||||||
:param _only_called: Has `.only()` been called? If so its a set of fields
|
|
||||||
otherwise it performs a union.
|
|
||||||
"""
|
|
||||||
self.value = value
|
|
||||||
self.fields = set(fields or [])
|
|
||||||
self.always_include = set(always_include or [])
|
|
||||||
self._id = None
|
|
||||||
self._only_called = _only_called
|
|
||||||
self.slice = {}
|
|
||||||
|
|
||||||
def __add__(self, f):
|
|
||||||
if isinstance(f.value, dict):
|
|
||||||
for field in f.fields:
|
|
||||||
self.slice[field] = f.value
|
|
||||||
if not self.fields:
|
|
||||||
self.fields = f.fields
|
|
||||||
elif not self.fields:
|
|
||||||
self.fields = f.fields
|
|
||||||
self.value = f.value
|
|
||||||
self.slice = {}
|
|
||||||
elif self.value is self.ONLY and f.value is self.ONLY:
|
|
||||||
self._clean_slice()
|
|
||||||
if self._only_called:
|
|
||||||
self.fields = self.fields.union(f.fields)
|
|
||||||
else:
|
|
||||||
self.fields = f.fields
|
|
||||||
elif self.value is self.EXCLUDE and f.value is self.EXCLUDE:
|
|
||||||
self.fields = self.fields.union(f.fields)
|
|
||||||
self._clean_slice()
|
|
||||||
elif self.value is self.ONLY and f.value is self.EXCLUDE:
|
|
||||||
self.fields -= f.fields
|
|
||||||
self._clean_slice()
|
|
||||||
elif self.value is self.EXCLUDE and f.value is self.ONLY:
|
|
||||||
self.value = self.ONLY
|
|
||||||
self.fields = f.fields - self.fields
|
|
||||||
self._clean_slice()
|
|
||||||
|
|
||||||
if "_id" in f.fields:
|
|
||||||
self._id = f.value
|
|
||||||
|
|
||||||
if self.always_include:
|
|
||||||
if self.value is self.ONLY and self.fields:
|
|
||||||
if sorted(self.slice.keys()) != sorted(self.fields):
|
|
||||||
self.fields = self.fields.union(self.always_include)
|
|
||||||
else:
|
|
||||||
self.fields -= self.always_include
|
|
||||||
|
|
||||||
if getattr(f, "_only_called", False):
|
|
||||||
self._only_called = True
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
return bool(self.fields)
|
|
||||||
|
|
||||||
def as_dict(self):
|
|
||||||
field_list = {field: self.value for field in self.fields}
|
|
||||||
if self.slice:
|
|
||||||
field_list.update(self.slice)
|
|
||||||
if self._id is not None:
|
|
||||||
field_list["_id"] = self._id
|
|
||||||
return field_list
|
|
||||||
|
|
||||||
def reset(self):
|
|
||||||
self.fields = set()
|
|
||||||
self.slice = {}
|
|
||||||
self.value = self.ONLY
|
|
||||||
|
|
||||||
def _clean_slice(self):
|
|
||||||
if self.slice:
|
|
||||||
for field in set(self.slice.keys()) - self.fields:
|
|
||||||
del self.slice[field]
|
|
@ -1,58 +0,0 @@
|
|||||||
from functools import partial
|
|
||||||
|
|
||||||
from mongoengine.queryset.queryset import QuerySet
|
|
||||||
|
|
||||||
__all__ = ("queryset_manager", "QuerySetManager")
|
|
||||||
|
|
||||||
|
|
||||||
class QuerySetManager:
|
|
||||||
"""
|
|
||||||
The default QuerySet Manager.
|
|
||||||
|
|
||||||
Custom QuerySet Manager functions can extend this class and users can
|
|
||||||
add extra queryset functionality. Any custom manager methods must accept a
|
|
||||||
:class:`~mongoengine.Document` class as its first argument, and a
|
|
||||||
:class:`~mongoengine.queryset.QuerySet` as its second argument.
|
|
||||||
|
|
||||||
The method function should return a :class:`~mongoengine.queryset.QuerySet`
|
|
||||||
, probably the same one that was passed in, but modified in some way.
|
|
||||||
"""
|
|
||||||
|
|
||||||
get_queryset = None
|
|
||||||
default = QuerySet
|
|
||||||
|
|
||||||
def __init__(self, queryset_func=None):
|
|
||||||
if queryset_func:
|
|
||||||
self.get_queryset = queryset_func
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
"""Descriptor for instantiating a new QuerySet object when
|
|
||||||
Document.objects is accessed.
|
|
||||||
"""
|
|
||||||
if instance is not None:
|
|
||||||
# Document object being used rather than a document class
|
|
||||||
return self
|
|
||||||
|
|
||||||
# owner is the document that contains the QuerySetManager
|
|
||||||
queryset_class = owner._meta.get("queryset_class", self.default)
|
|
||||||
queryset = queryset_class(owner, owner._get_collection())
|
|
||||||
if self.get_queryset:
|
|
||||||
arg_count = self.get_queryset.__code__.co_argcount
|
|
||||||
if arg_count == 1:
|
|
||||||
queryset = self.get_queryset(queryset)
|
|
||||||
elif arg_count == 2:
|
|
||||||
queryset = self.get_queryset(owner, queryset)
|
|
||||||
else:
|
|
||||||
queryset = partial(self.get_queryset, owner, queryset)
|
|
||||||
return queryset
|
|
||||||
|
|
||||||
|
|
||||||
def queryset_manager(func):
|
|
||||||
"""Decorator that allows you to define custom QuerySet managers on
|
|
||||||
:class:`~mongoengine.Document` classes. The manager must be a function that
|
|
||||||
accepts a :class:`~mongoengine.Document` class as its first argument, and a
|
|
||||||
:class:`~mongoengine.queryset.QuerySet` as its second argument. The method
|
|
||||||
function should return a :class:`~mongoengine.queryset.QuerySet`, probably
|
|
||||||
the same one that was passed in, but modified in some way.
|
|
||||||
"""
|
|
||||||
return QuerySetManager(func)
|
|
@ -1,190 +0,0 @@
|
|||||||
from mongoengine.errors import OperationError
|
|
||||||
from mongoengine.queryset.base import (
|
|
||||||
CASCADE,
|
|
||||||
DENY,
|
|
||||||
DO_NOTHING,
|
|
||||||
NULLIFY,
|
|
||||||
PULL,
|
|
||||||
BaseQuerySet,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"QuerySet",
|
|
||||||
"QuerySetNoCache",
|
|
||||||
"DO_NOTHING",
|
|
||||||
"NULLIFY",
|
|
||||||
"CASCADE",
|
|
||||||
"DENY",
|
|
||||||
"PULL",
|
|
||||||
)
|
|
||||||
|
|
||||||
# The maximum number of items to display in a QuerySet.__repr__
|
|
||||||
REPR_OUTPUT_SIZE = 20
|
|
||||||
ITER_CHUNK_SIZE = 100
|
|
||||||
|
|
||||||
|
|
||||||
class QuerySet(BaseQuerySet):
|
|
||||||
"""The default queryset, that builds queries and handles a set of results
|
|
||||||
returned from a query.
|
|
||||||
|
|
||||||
Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as
|
|
||||||
the results.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_has_more = True
|
|
||||||
_len = None
|
|
||||||
_result_cache = None
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
"""Iteration utilises a results cache which iterates the cursor
|
|
||||||
in batches of ``ITER_CHUNK_SIZE``.
|
|
||||||
|
|
||||||
If ``self._has_more`` the cursor hasn't been exhausted so cache then
|
|
||||||
batch. Otherwise iterate the result_cache.
|
|
||||||
"""
|
|
||||||
self._iter = True
|
|
||||||
|
|
||||||
if self._has_more:
|
|
||||||
return self._iter_results()
|
|
||||||
|
|
||||||
# iterating over the cache.
|
|
||||||
return iter(self._result_cache)
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
"""Since __len__ is called quite frequently (for example, as part of
|
|
||||||
list(qs)), we populate the result cache and cache the length.
|
|
||||||
"""
|
|
||||||
if self._len is not None:
|
|
||||||
return self._len
|
|
||||||
|
|
||||||
# Populate the result cache with *all* of the docs in the cursor
|
|
||||||
if self._has_more:
|
|
||||||
list(self._iter_results())
|
|
||||||
|
|
||||||
# Cache the length of the complete result cache and return it
|
|
||||||
self._len = len(self._result_cache)
|
|
||||||
return self._len
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
"""Provide a string representation of the QuerySet"""
|
|
||||||
if self._iter:
|
|
||||||
return ".. queryset mid-iteration .."
|
|
||||||
|
|
||||||
self._populate_cache()
|
|
||||||
data = self._result_cache[: REPR_OUTPUT_SIZE + 1]
|
|
||||||
if len(data) > REPR_OUTPUT_SIZE:
|
|
||||||
data[-1] = "...(remaining elements truncated)..."
|
|
||||||
return repr(data)
|
|
||||||
|
|
||||||
def _iter_results(self):
|
|
||||||
"""A generator for iterating over the result cache.
|
|
||||||
|
|
||||||
Also populates the cache if there are more possible results to
|
|
||||||
yield. Raises StopIteration when there are no more results.
|
|
||||||
"""
|
|
||||||
if self._result_cache is None:
|
|
||||||
self._result_cache = []
|
|
||||||
|
|
||||||
pos = 0
|
|
||||||
while True:
|
|
||||||
|
|
||||||
# For all positions lower than the length of the current result
|
|
||||||
# cache, serve the docs straight from the cache w/o hitting the
|
|
||||||
# database.
|
|
||||||
# XXX it's VERY important to compute the len within the `while`
|
|
||||||
# condition because the result cache might expand mid-iteration
|
|
||||||
# (e.g. if we call len(qs) inside a loop that iterates over the
|
|
||||||
# queryset). Fortunately len(list) is O(1) in Python, so this
|
|
||||||
# doesn't cause performance issues.
|
|
||||||
while pos < len(self._result_cache):
|
|
||||||
yield self._result_cache[pos]
|
|
||||||
pos += 1
|
|
||||||
|
|
||||||
# return if we already established there were no more
|
|
||||||
# docs in the db cursor.
|
|
||||||
if not self._has_more:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Otherwise, populate more of the cache and repeat.
|
|
||||||
if len(self._result_cache) <= pos:
|
|
||||||
self._populate_cache()
|
|
||||||
|
|
||||||
def _populate_cache(self):
|
|
||||||
"""
|
|
||||||
Populates the result cache with ``ITER_CHUNK_SIZE`` more entries
|
|
||||||
(until the cursor is exhausted).
|
|
||||||
"""
|
|
||||||
if self._result_cache is None:
|
|
||||||
self._result_cache = []
|
|
||||||
|
|
||||||
# Skip populating the cache if we already established there are no
|
|
||||||
# more docs to pull from the database.
|
|
||||||
if not self._has_more:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
|
||||||
# the result cache.
|
|
||||||
try:
|
|
||||||
for _ in range(ITER_CHUNK_SIZE):
|
|
||||||
self._result_cache.append(next(self))
|
|
||||||
except StopIteration:
|
|
||||||
# Getting this exception means there are no more docs in the
|
|
||||||
# db cursor. Set _has_more to False so that we can use that
|
|
||||||
# information in other places.
|
|
||||||
self._has_more = False
|
|
||||||
|
|
||||||
def count(self, with_limit_and_skip=False):
|
|
||||||
"""Count the selected elements in the query.
|
|
||||||
|
|
||||||
:param with_limit_and_skip (optional): take any :meth:`limit` or
|
|
||||||
:meth:`skip` that has been applied to this cursor into account when
|
|
||||||
getting the count
|
|
||||||
"""
|
|
||||||
if with_limit_and_skip is False:
|
|
||||||
return super().count(with_limit_and_skip)
|
|
||||||
|
|
||||||
if self._len is None:
|
|
||||||
# cache the length
|
|
||||||
self._len = super().count(with_limit_and_skip)
|
|
||||||
|
|
||||||
return self._len
|
|
||||||
|
|
||||||
def no_cache(self):
|
|
||||||
"""Convert to a non-caching queryset"""
|
|
||||||
if self._result_cache is not None:
|
|
||||||
raise OperationError("QuerySet already cached")
|
|
||||||
|
|
||||||
return self._clone_into(QuerySetNoCache(self._document, self._collection))
|
|
||||||
|
|
||||||
|
|
||||||
class QuerySetNoCache(BaseQuerySet):
|
|
||||||
"""A non caching QuerySet"""
|
|
||||||
|
|
||||||
def cache(self):
|
|
||||||
"""Convert to a caching queryset"""
|
|
||||||
return self._clone_into(QuerySet(self._document, self._collection))
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
"""Provides the string representation of the QuerySet"""
|
|
||||||
if self._iter:
|
|
||||||
return ".. queryset mid-iteration .."
|
|
||||||
|
|
||||||
data = []
|
|
||||||
for _ in range(REPR_OUTPUT_SIZE + 1):
|
|
||||||
try:
|
|
||||||
data.append(next(self))
|
|
||||||
except StopIteration:
|
|
||||||
break
|
|
||||||
|
|
||||||
if len(data) > REPR_OUTPUT_SIZE:
|
|
||||||
data[-1] = "...(remaining elements truncated)..."
|
|
||||||
|
|
||||||
self.rewind()
|
|
||||||
return repr(data)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
queryset = self
|
|
||||||
if queryset._iter:
|
|
||||||
queryset = self.clone()
|
|
||||||
queryset.rewind()
|
|
||||||
return queryset
|
|
@ -1,506 +0,0 @@
|
|||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
import pymongo
|
|
||||||
from bson import SON, ObjectId
|
|
||||||
from bson.dbref import DBRef
|
|
||||||
|
|
||||||
from mongoengine.base import UPDATE_OPERATORS
|
|
||||||
from mongoengine.common import _import_class
|
|
||||||
from mongoengine.errors import InvalidQueryError
|
|
||||||
|
|
||||||
__all__ = ("query", "update", "STRING_OPERATORS")
|
|
||||||
|
|
||||||
COMPARISON_OPERATORS = (
|
|
||||||
"ne",
|
|
||||||
"gt",
|
|
||||||
"gte",
|
|
||||||
"lt",
|
|
||||||
"lte",
|
|
||||||
"in",
|
|
||||||
"nin",
|
|
||||||
"mod",
|
|
||||||
"all",
|
|
||||||
"size",
|
|
||||||
"exists",
|
|
||||||
"not",
|
|
||||||
"elemMatch",
|
|
||||||
"type",
|
|
||||||
)
|
|
||||||
GEO_OPERATORS = (
|
|
||||||
"within_distance",
|
|
||||||
"within_spherical_distance",
|
|
||||||
"within_box",
|
|
||||||
"within_polygon",
|
|
||||||
"near",
|
|
||||||
"near_sphere",
|
|
||||||
"max_distance",
|
|
||||||
"min_distance",
|
|
||||||
"geo_within",
|
|
||||||
"geo_within_box",
|
|
||||||
"geo_within_polygon",
|
|
||||||
"geo_within_center",
|
|
||||||
"geo_within_sphere",
|
|
||||||
"geo_intersects",
|
|
||||||
)
|
|
||||||
STRING_OPERATORS = (
|
|
||||||
"contains",
|
|
||||||
"icontains",
|
|
||||||
"startswith",
|
|
||||||
"istartswith",
|
|
||||||
"endswith",
|
|
||||||
"iendswith",
|
|
||||||
"exact",
|
|
||||||
"iexact",
|
|
||||||
"regex",
|
|
||||||
"iregex",
|
|
||||||
"wholeword",
|
|
||||||
"iwholeword",
|
|
||||||
)
|
|
||||||
CUSTOM_OPERATORS = ("match",)
|
|
||||||
MATCH_OPERATORS = (
|
|
||||||
COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO make this less complex
|
|
||||||
def query(_doc_cls=None, **kwargs):
|
|
||||||
"""Transform a query from Django-style format to Mongo format."""
|
|
||||||
mongo_query = {}
|
|
||||||
merge_query = defaultdict(list)
|
|
||||||
for key, value in sorted(kwargs.items()):
|
|
||||||
if key == "__raw__":
|
|
||||||
mongo_query.update(value)
|
|
||||||
continue
|
|
||||||
|
|
||||||
parts = key.rsplit("__")
|
|
||||||
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
|
|
||||||
parts = [part for part in parts if not part.isdigit()]
|
|
||||||
# Check for an operator and transform to mongo-style if there is
|
|
||||||
op = None
|
|
||||||
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
|
|
||||||
op = parts.pop()
|
|
||||||
|
|
||||||
# Allow to escape operator-like field name by __
|
|
||||||
if len(parts) > 1 and parts[-1] == "":
|
|
||||||
parts.pop()
|
|
||||||
|
|
||||||
negate = False
|
|
||||||
if len(parts) > 1 and parts[-1] == "not":
|
|
||||||
parts.pop()
|
|
||||||
negate = True
|
|
||||||
|
|
||||||
if _doc_cls:
|
|
||||||
# Switch field names to proper names [set in Field(name='foo')]
|
|
||||||
try:
|
|
||||||
fields = _doc_cls._lookup_field(parts)
|
|
||||||
except Exception as e:
|
|
||||||
raise InvalidQueryError(e)
|
|
||||||
parts = []
|
|
||||||
|
|
||||||
CachedReferenceField = _import_class("CachedReferenceField")
|
|
||||||
GenericReferenceField = _import_class("GenericReferenceField")
|
|
||||||
|
|
||||||
cleaned_fields = []
|
|
||||||
for field in fields:
|
|
||||||
append_field = True
|
|
||||||
if isinstance(field, str):
|
|
||||||
parts.append(field)
|
|
||||||
append_field = False
|
|
||||||
# is last and CachedReferenceField
|
|
||||||
elif isinstance(field, CachedReferenceField) and fields[-1] == field:
|
|
||||||
parts.append("%s._id" % field.db_field)
|
|
||||||
else:
|
|
||||||
parts.append(field.db_field)
|
|
||||||
|
|
||||||
if append_field:
|
|
||||||
cleaned_fields.append(field)
|
|
||||||
|
|
||||||
# Convert value to proper value
|
|
||||||
field = cleaned_fields[-1]
|
|
||||||
|
|
||||||
singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"]
|
|
||||||
singular_ops += STRING_OPERATORS
|
|
||||||
if op in singular_ops:
|
|
||||||
value = field.prepare_query_value(op, value)
|
|
||||||
|
|
||||||
if isinstance(field, CachedReferenceField) and value:
|
|
||||||
value = value["_id"]
|
|
||||||
|
|
||||||
elif op in ("in", "nin", "all", "near") and not isinstance(value, dict):
|
|
||||||
# Raise an error if the in/nin/all/near param is not iterable.
|
|
||||||
value = _prepare_query_for_iterable(field, op, value)
|
|
||||||
|
|
||||||
# If we're querying a GenericReferenceField, we need to alter the
|
|
||||||
# key depending on the value:
|
|
||||||
# * If the value is a DBRef, the key should be "field_name._ref".
|
|
||||||
# * If the value is an ObjectId, the key should be "field_name._ref.$id".
|
|
||||||
if isinstance(field, GenericReferenceField):
|
|
||||||
if isinstance(value, DBRef):
|
|
||||||
parts[-1] += "._ref"
|
|
||||||
elif isinstance(value, ObjectId):
|
|
||||||
parts[-1] += "._ref.$id"
|
|
||||||
|
|
||||||
# if op and op not in COMPARISON_OPERATORS:
|
|
||||||
if op:
|
|
||||||
if op in GEO_OPERATORS:
|
|
||||||
value = _geo_operator(field, op, value)
|
|
||||||
elif op in ("match", "elemMatch"):
|
|
||||||
ListField = _import_class("ListField")
|
|
||||||
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
|
||||||
if (
|
|
||||||
isinstance(value, dict)
|
|
||||||
and isinstance(field, ListField)
|
|
||||||
and isinstance(field.field, EmbeddedDocumentField)
|
|
||||||
):
|
|
||||||
value = query(field.field.document_type, **value)
|
|
||||||
else:
|
|
||||||
value = field.prepare_query_value(op, value)
|
|
||||||
value = {"$elemMatch": value}
|
|
||||||
elif op in CUSTOM_OPERATORS:
|
|
||||||
NotImplementedError(
|
|
||||||
'Custom method "%s" has not ' "been implemented" % op
|
|
||||||
)
|
|
||||||
elif op not in STRING_OPERATORS:
|
|
||||||
value = {"$" + op: value}
|
|
||||||
|
|
||||||
if negate:
|
|
||||||
value = {"$not": value}
|
|
||||||
|
|
||||||
for i, part in indices:
|
|
||||||
parts.insert(i, part)
|
|
||||||
|
|
||||||
key = ".".join(parts)
|
|
||||||
|
|
||||||
if key not in mongo_query:
|
|
||||||
mongo_query[key] = value
|
|
||||||
else:
|
|
||||||
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
|
||||||
mongo_query[key].update(value)
|
|
||||||
# $max/minDistance needs to come last - convert to SON
|
|
||||||
value_dict = mongo_query[key]
|
|
||||||
if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and (
|
|
||||||
"$near" in value_dict or "$nearSphere" in value_dict
|
|
||||||
):
|
|
||||||
value_son = SON()
|
|
||||||
for k, v in value_dict.items():
|
|
||||||
if k == "$maxDistance" or k == "$minDistance":
|
|
||||||
continue
|
|
||||||
value_son[k] = v
|
|
||||||
# Required for MongoDB >= 2.6, may fail when combining
|
|
||||||
# PyMongo 3+ and MongoDB < 2.6
|
|
||||||
near_embedded = False
|
|
||||||
for near_op in ("$near", "$nearSphere"):
|
|
||||||
if isinstance(value_dict.get(near_op), dict):
|
|
||||||
value_son[near_op] = SON(value_son[near_op])
|
|
||||||
if "$maxDistance" in value_dict:
|
|
||||||
value_son[near_op]["$maxDistance"] = value_dict[
|
|
||||||
"$maxDistance"
|
|
||||||
]
|
|
||||||
if "$minDistance" in value_dict:
|
|
||||||
value_son[near_op]["$minDistance"] = value_dict[
|
|
||||||
"$minDistance"
|
|
||||||
]
|
|
||||||
near_embedded = True
|
|
||||||
|
|
||||||
if not near_embedded:
|
|
||||||
if "$maxDistance" in value_dict:
|
|
||||||
value_son["$maxDistance"] = value_dict["$maxDistance"]
|
|
||||||
if "$minDistance" in value_dict:
|
|
||||||
value_son["$minDistance"] = value_dict["$minDistance"]
|
|
||||||
mongo_query[key] = value_son
|
|
||||||
else:
|
|
||||||
# Store for manually merging later
|
|
||||||
merge_query[key].append(value)
|
|
||||||
|
|
||||||
# The queryset has been filter in such a way we must manually merge
|
|
||||||
for k, v in merge_query.items():
|
|
||||||
merge_query[k].append(mongo_query[k])
|
|
||||||
del mongo_query[k]
|
|
||||||
if isinstance(v, list):
|
|
||||||
value = [{k: val} for val in v]
|
|
||||||
if "$and" in mongo_query.keys():
|
|
||||||
mongo_query["$and"].extend(value)
|
|
||||||
else:
|
|
||||||
mongo_query["$and"] = value
|
|
||||||
|
|
||||||
return mongo_query
|
|
||||||
|
|
||||||
|
|
||||||
def update(_doc_cls=None, **update):
|
|
||||||
"""Transform an update spec from Django-style format to Mongo
|
|
||||||
format.
|
|
||||||
"""
|
|
||||||
mongo_update = {}
|
|
||||||
|
|
||||||
for key, value in update.items():
|
|
||||||
if key == "__raw__":
|
|
||||||
mongo_update.update(value)
|
|
||||||
continue
|
|
||||||
|
|
||||||
parts = key.split("__")
|
|
||||||
|
|
||||||
# if there is no operator, default to 'set'
|
|
||||||
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
|
|
||||||
parts.insert(0, "set")
|
|
||||||
|
|
||||||
# Check for an operator and transform to mongo-style if there is
|
|
||||||
op = None
|
|
||||||
if parts[0] in UPDATE_OPERATORS:
|
|
||||||
op = parts.pop(0)
|
|
||||||
# Convert Pythonic names to Mongo equivalents
|
|
||||||
operator_map = {
|
|
||||||
"push_all": "pushAll",
|
|
||||||
"pull_all": "pullAll",
|
|
||||||
"dec": "inc",
|
|
||||||
"add_to_set": "addToSet",
|
|
||||||
"set_on_insert": "setOnInsert",
|
|
||||||
}
|
|
||||||
if op == "dec":
|
|
||||||
# Support decrement by flipping a positive value's sign
|
|
||||||
# and using 'inc'
|
|
||||||
value = -value
|
|
||||||
# If the operator doesn't found from operator map, the op value
|
|
||||||
# will stay unchanged
|
|
||||||
op = operator_map.get(op, op)
|
|
||||||
|
|
||||||
match = None
|
|
||||||
if parts[-1] in COMPARISON_OPERATORS:
|
|
||||||
match = parts.pop()
|
|
||||||
|
|
||||||
# Allow to escape operator-like field name by __
|
|
||||||
if len(parts) > 1 and parts[-1] == "":
|
|
||||||
parts.pop()
|
|
||||||
|
|
||||||
if _doc_cls:
|
|
||||||
# Switch field names to proper names [set in Field(name='foo')]
|
|
||||||
try:
|
|
||||||
fields = _doc_cls._lookup_field(parts)
|
|
||||||
except Exception as e:
|
|
||||||
raise InvalidQueryError(e)
|
|
||||||
parts = []
|
|
||||||
|
|
||||||
cleaned_fields = []
|
|
||||||
appended_sub_field = False
|
|
||||||
for field in fields:
|
|
||||||
append_field = True
|
|
||||||
if isinstance(field, str):
|
|
||||||
# Convert the S operator to $
|
|
||||||
if field == "S":
|
|
||||||
field = "$"
|
|
||||||
parts.append(field)
|
|
||||||
append_field = False
|
|
||||||
else:
|
|
||||||
parts.append(field.db_field)
|
|
||||||
if append_field:
|
|
||||||
appended_sub_field = False
|
|
||||||
cleaned_fields.append(field)
|
|
||||||
if hasattr(field, "field"):
|
|
||||||
cleaned_fields.append(field.field)
|
|
||||||
appended_sub_field = True
|
|
||||||
|
|
||||||
# Convert value to proper value
|
|
||||||
if appended_sub_field:
|
|
||||||
field = cleaned_fields[-2]
|
|
||||||
else:
|
|
||||||
field = cleaned_fields[-1]
|
|
||||||
|
|
||||||
GeoJsonBaseField = _import_class("GeoJsonBaseField")
|
|
||||||
if isinstance(field, GeoJsonBaseField):
|
|
||||||
value = field.to_mongo(value)
|
|
||||||
|
|
||||||
if op == "pull":
|
|
||||||
if field.required or value is not None:
|
|
||||||
if match in ("in", "nin") and not isinstance(value, dict):
|
|
||||||
value = _prepare_query_for_iterable(field, op, value)
|
|
||||||
else:
|
|
||||||
value = field.prepare_query_value(op, value)
|
|
||||||
elif op == "push" and isinstance(value, (list, tuple, set)):
|
|
||||||
value = [field.prepare_query_value(op, v) for v in value]
|
|
||||||
elif op in (None, "set", "push"):
|
|
||||||
if field.required or value is not None:
|
|
||||||
value = field.prepare_query_value(op, value)
|
|
||||||
elif op in ("pushAll", "pullAll"):
|
|
||||||
value = [field.prepare_query_value(op, v) for v in value]
|
|
||||||
elif op in ("addToSet", "setOnInsert"):
|
|
||||||
if isinstance(value, (list, tuple, set)):
|
|
||||||
value = [field.prepare_query_value(op, v) for v in value]
|
|
||||||
elif field.required or value is not None:
|
|
||||||
value = field.prepare_query_value(op, value)
|
|
||||||
elif op == "unset":
|
|
||||||
value = 1
|
|
||||||
elif op == "inc":
|
|
||||||
value = field.prepare_query_value(op, value)
|
|
||||||
|
|
||||||
if match:
|
|
||||||
match = "$" + match
|
|
||||||
value = {match: value}
|
|
||||||
|
|
||||||
key = ".".join(parts)
|
|
||||||
|
|
||||||
if "pull" in op and "." in key:
|
|
||||||
# Dot operators don't work on pull operations
|
|
||||||
# unless they point to a list field
|
|
||||||
# Otherwise it uses nested dict syntax
|
|
||||||
if op == "pullAll":
|
|
||||||
raise InvalidQueryError(
|
|
||||||
"pullAll operations only support a single field depth"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Look for the last list field and use dot notation until there
|
|
||||||
field_classes = [c.__class__ for c in cleaned_fields]
|
|
||||||
field_classes.reverse()
|
|
||||||
ListField = _import_class("ListField")
|
|
||||||
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
|
||||||
if ListField in field_classes or EmbeddedDocumentListField in field_classes:
|
|
||||||
# Join all fields via dot notation to the last ListField or EmbeddedDocumentListField
|
|
||||||
# Then process as normal
|
|
||||||
if ListField in field_classes:
|
|
||||||
_check_field = ListField
|
|
||||||
else:
|
|
||||||
_check_field = EmbeddedDocumentListField
|
|
||||||
|
|
||||||
last_listField = len(cleaned_fields) - field_classes.index(_check_field)
|
|
||||||
key = ".".join(parts[:last_listField])
|
|
||||||
parts = parts[last_listField:]
|
|
||||||
parts.insert(0, key)
|
|
||||||
|
|
||||||
parts.reverse()
|
|
||||||
for key in parts:
|
|
||||||
value = {key: value}
|
|
||||||
elif op == "addToSet" and isinstance(value, list):
|
|
||||||
value = {key: {"$each": value}}
|
|
||||||
elif op in ("push", "pushAll"):
|
|
||||||
if parts[-1].isdigit():
|
|
||||||
key = ".".join(parts[0:-1])
|
|
||||||
position = int(parts[-1])
|
|
||||||
# $position expects an iterable. If pushing a single value,
|
|
||||||
# wrap it in a list.
|
|
||||||
if not isinstance(value, (set, tuple, list)):
|
|
||||||
value = [value]
|
|
||||||
value = {key: {"$each": value, "$position": position}}
|
|
||||||
else:
|
|
||||||
if op == "pushAll":
|
|
||||||
op = "push" # convert to non-deprecated keyword
|
|
||||||
if not isinstance(value, (set, tuple, list)):
|
|
||||||
value = [value]
|
|
||||||
value = {key: {"$each": value}}
|
|
||||||
else:
|
|
||||||
value = {key: value}
|
|
||||||
else:
|
|
||||||
value = {key: value}
|
|
||||||
key = "$" + op
|
|
||||||
if key not in mongo_update:
|
|
||||||
mongo_update[key] = value
|
|
||||||
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
|
||||||
mongo_update[key].update(value)
|
|
||||||
|
|
||||||
return mongo_update
|
|
||||||
|
|
||||||
|
|
||||||
def _geo_operator(field, op, value):
|
|
||||||
"""Helper to return the query for a given geo query."""
|
|
||||||
if op == "max_distance":
|
|
||||||
value = {"$maxDistance": value}
|
|
||||||
elif op == "min_distance":
|
|
||||||
value = {"$minDistance": value}
|
|
||||||
elif field._geo_index == pymongo.GEO2D:
|
|
||||||
if op == "within_distance":
|
|
||||||
value = {"$within": {"$center": value}}
|
|
||||||
elif op == "within_spherical_distance":
|
|
||||||
value = {"$within": {"$centerSphere": value}}
|
|
||||||
elif op == "within_polygon":
|
|
||||||
value = {"$within": {"$polygon": value}}
|
|
||||||
elif op == "near":
|
|
||||||
value = {"$near": value}
|
|
||||||
elif op == "near_sphere":
|
|
||||||
value = {"$nearSphere": value}
|
|
||||||
elif op == "within_box":
|
|
||||||
value = {"$within": {"$box": value}}
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(
|
|
||||||
'Geo method "%s" has not been ' "implemented for a GeoPointField" % op
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if op == "geo_within":
|
|
||||||
value = {"$geoWithin": _infer_geometry(value)}
|
|
||||||
elif op == "geo_within_box":
|
|
||||||
value = {"$geoWithin": {"$box": value}}
|
|
||||||
elif op == "geo_within_polygon":
|
|
||||||
value = {"$geoWithin": {"$polygon": value}}
|
|
||||||
elif op == "geo_within_center":
|
|
||||||
value = {"$geoWithin": {"$center": value}}
|
|
||||||
elif op == "geo_within_sphere":
|
|
||||||
value = {"$geoWithin": {"$centerSphere": value}}
|
|
||||||
elif op == "geo_intersects":
|
|
||||||
value = {"$geoIntersects": _infer_geometry(value)}
|
|
||||||
elif op == "near":
|
|
||||||
value = {"$near": _infer_geometry(value)}
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(
|
|
||||||
'Geo method "{}" has not been implemented for a {} '.format(
|
|
||||||
op, field._name
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def _infer_geometry(value):
|
|
||||||
"""Helper method that tries to infer the $geometry shape for a
|
|
||||||
given value.
|
|
||||||
"""
|
|
||||||
if isinstance(value, dict):
|
|
||||||
if "$geometry" in value:
|
|
||||||
return value
|
|
||||||
elif "coordinates" in value and "type" in value:
|
|
||||||
return {"$geometry": value}
|
|
||||||
raise InvalidQueryError(
|
|
||||||
"Invalid $geometry dictionary should have type and coordinates keys"
|
|
||||||
)
|
|
||||||
elif isinstance(value, (list, set)):
|
|
||||||
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
|
||||||
|
|
||||||
try:
|
|
||||||
value[0][0][0]
|
|
||||||
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
|
||||||
except (TypeError, IndexError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
value[0][0]
|
|
||||||
return {"$geometry": {"type": "LineString", "coordinates": value}}
|
|
||||||
except (TypeError, IndexError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
value[0]
|
|
||||||
return {"$geometry": {"type": "Point", "coordinates": value}}
|
|
||||||
except (TypeError, IndexError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
raise InvalidQueryError(
|
|
||||||
"Invalid $geometry data. Can be either a "
|
|
||||||
"dictionary or (nested) lists of coordinate(s)"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _prepare_query_for_iterable(field, op, value):
|
|
||||||
# We need a special check for BaseDocument, because - although it's iterable - using
|
|
||||||
# it as such in the context of this method is most definitely a mistake.
|
|
||||||
BaseDocument = _import_class("BaseDocument")
|
|
||||||
|
|
||||||
if isinstance(value, BaseDocument):
|
|
||||||
raise TypeError(
|
|
||||||
"When using the `in`, `nin`, `all`, or "
|
|
||||||
"`near`-operators you can't use a "
|
|
||||||
"`Document`, you must wrap your object "
|
|
||||||
"in a list (object -> [object])."
|
|
||||||
)
|
|
||||||
|
|
||||||
if not hasattr(value, "__iter__"):
|
|
||||||
raise TypeError(
|
|
||||||
"The `in`, `nin`, `all`, or "
|
|
||||||
"`near`-operators must be applied to an "
|
|
||||||
"iterable (e.g. a list)."
|
|
||||||
)
|
|
||||||
|
|
||||||
return [field.prepare_query_value(op, v) for v in value]
|
|
@ -1,189 +0,0 @@
|
|||||||
import copy
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from mongoengine.errors import InvalidQueryError
|
|
||||||
from mongoengine.queryset import transform
|
|
||||||
|
|
||||||
__all__ = ("Q", "QNode")
|
|
||||||
|
|
||||||
|
|
||||||
def warn_empty_is_deprecated():
|
|
||||||
msg = "'empty' property is deprecated in favour of using 'not bool(filter)'"
|
|
||||||
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
|
||||||
|
|
||||||
|
|
||||||
class QNodeVisitor:
|
|
||||||
"""Base visitor class for visiting Q-object nodes in a query tree."""
|
|
||||||
|
|
||||||
def visit_combination(self, combination):
|
|
||||||
"""Called by QCombination objects."""
|
|
||||||
return combination
|
|
||||||
|
|
||||||
def visit_query(self, query):
|
|
||||||
"""Called by (New)Q objects."""
|
|
||||||
return query
|
|
||||||
|
|
||||||
|
|
||||||
class DuplicateQueryConditionsError(InvalidQueryError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SimplificationVisitor(QNodeVisitor):
|
|
||||||
"""Simplifies query trees by combining unnecessary 'and' connection nodes
|
|
||||||
into a single Q-object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def visit_combination(self, combination):
|
|
||||||
if combination.operation == combination.AND:
|
|
||||||
# The simplification only applies to 'simple' queries
|
|
||||||
if all(isinstance(node, Q) for node in combination.children):
|
|
||||||
queries = [n.query for n in combination.children]
|
|
||||||
try:
|
|
||||||
return Q(**self._query_conjunction(queries))
|
|
||||||
except DuplicateQueryConditionsError:
|
|
||||||
# Cannot be simplified
|
|
||||||
pass
|
|
||||||
return combination
|
|
||||||
|
|
||||||
def _query_conjunction(self, queries):
|
|
||||||
"""Merges query dicts - effectively &ing them together."""
|
|
||||||
query_ops = set()
|
|
||||||
combined_query = {}
|
|
||||||
for query in queries:
|
|
||||||
ops = set(query.keys())
|
|
||||||
# Make sure that the same operation isn't applied more than once
|
|
||||||
# to a single field
|
|
||||||
intersection = ops.intersection(query_ops)
|
|
||||||
if intersection:
|
|
||||||
raise DuplicateQueryConditionsError()
|
|
||||||
|
|
||||||
query_ops.update(ops)
|
|
||||||
combined_query.update(copy.deepcopy(query))
|
|
||||||
return combined_query
|
|
||||||
|
|
||||||
|
|
||||||
class QueryCompilerVisitor(QNodeVisitor):
|
|
||||||
"""Compiles the nodes in a query tree to a PyMongo-compatible query
|
|
||||||
dictionary.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, document):
|
|
||||||
self.document = document
|
|
||||||
|
|
||||||
def visit_combination(self, combination):
|
|
||||||
operator = "$and"
|
|
||||||
if combination.operation == combination.OR:
|
|
||||||
operator = "$or"
|
|
||||||
return {operator: combination.children}
|
|
||||||
|
|
||||||
def visit_query(self, query):
|
|
||||||
return transform.query(self.document, **query.query)
|
|
||||||
|
|
||||||
|
|
||||||
class QNode:
|
|
||||||
"""Base class for nodes in query trees."""
|
|
||||||
|
|
||||||
AND = 0
|
|
||||||
OR = 1
|
|
||||||
|
|
||||||
def to_query(self, document):
|
|
||||||
query = self.accept(SimplificationVisitor())
|
|
||||||
query = query.accept(QueryCompilerVisitor(document))
|
|
||||||
return query
|
|
||||||
|
|
||||||
def accept(self, visitor):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def _combine(self, other, operation):
|
|
||||||
"""Combine this node with another node into a QCombination
|
|
||||||
object.
|
|
||||||
"""
|
|
||||||
# If the other Q() is empty, ignore it and just use `self`.
|
|
||||||
if not bool(other):
|
|
||||||
return self
|
|
||||||
|
|
||||||
# Or if this Q is empty, ignore it and just use `other`.
|
|
||||||
if not bool(self):
|
|
||||||
return other
|
|
||||||
|
|
||||||
return QCombination(operation, [self, other])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def empty(self):
|
|
||||||
warn_empty_is_deprecated()
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __or__(self, other):
|
|
||||||
return self._combine(other, self.OR)
|
|
||||||
|
|
||||||
def __and__(self, other):
|
|
||||||
return self._combine(other, self.AND)
|
|
||||||
|
|
||||||
|
|
||||||
class QCombination(QNode):
|
|
||||||
"""Represents the combination of several conditions by a given
|
|
||||||
logical operator.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, operation, children):
|
|
||||||
self.operation = operation
|
|
||||||
self.children = []
|
|
||||||
for node in children:
|
|
||||||
# If the child is a combination of the same type, we can merge its
|
|
||||||
# children directly into this combinations children
|
|
||||||
if isinstance(node, QCombination) and node.operation == operation:
|
|
||||||
self.children += node.children
|
|
||||||
else:
|
|
||||||
self.children.append(node)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
op = " & " if self.operation is self.AND else " | "
|
|
||||||
return "(%s)" % op.join([repr(node) for node in self.children])
|
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
return bool(self.children)
|
|
||||||
|
|
||||||
def accept(self, visitor):
|
|
||||||
for i in range(len(self.children)):
|
|
||||||
if isinstance(self.children[i], QNode):
|
|
||||||
self.children[i] = self.children[i].accept(visitor)
|
|
||||||
|
|
||||||
return visitor.visit_combination(self)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def empty(self):
|
|
||||||
warn_empty_is_deprecated()
|
|
||||||
return not bool(self.children)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return (
|
|
||||||
self.__class__ == other.__class__
|
|
||||||
and self.operation == other.operation
|
|
||||||
and self.children == other.children
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Q(QNode):
|
|
||||||
"""A simple query object, used in a query tree to build up more complex
|
|
||||||
query structures.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, **query):
|
|
||||||
self.query = query
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "Q(**%s)" % repr(self.query)
|
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
return bool(self.query)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self.__class__ == other.__class__ and self.query == other.query
|
|
||||||
|
|
||||||
def accept(self, visitor):
|
|
||||||
return visitor.visit_query(self)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def empty(self):
|
|
||||||
warn_empty_is_deprecated()
|
|
||||||
return not bool(self.query)
|
|
@ -1,25 +1,18 @@
|
|||||||
__all__ = (
|
# -*- coding: utf-8 -*-
|
||||||
"pre_init",
|
|
||||||
"post_init",
|
__all__ = ['pre_init', 'post_init', 'pre_save', 'post_save',
|
||||||
"pre_save",
|
'pre_delete', 'post_delete']
|
||||||
"pre_save_post_validation",
|
|
||||||
"post_save",
|
|
||||||
"pre_delete",
|
|
||||||
"post_delete",
|
|
||||||
)
|
|
||||||
|
|
||||||
signals_available = False
|
signals_available = False
|
||||||
try:
|
try:
|
||||||
from blinker import Namespace
|
from blinker import Namespace
|
||||||
|
|
||||||
signals_available = True
|
signals_available = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
class Namespace(object):
|
||||||
class Namespace:
|
|
||||||
def signal(self, name, doc=None):
|
def signal(self, name, doc=None):
|
||||||
return _FakeSignal(name, doc)
|
return _FakeSignal(name, doc)
|
||||||
|
|
||||||
class _FakeSignal:
|
class _FakeSignal(object):
|
||||||
"""If blinker is unavailable, create a fake class with the same
|
"""If blinker is unavailable, create a fake class with the same
|
||||||
interface that allows sending of signals but will fail with an
|
interface that allows sending of signals but will fail with an
|
||||||
error on anything else. Instead of doing anything on send, it
|
error on anything else. Instead of doing anything on send, it
|
||||||
@ -31,29 +24,23 @@ except ImportError:
|
|||||||
self.__doc__ = doc
|
self.__doc__ = doc
|
||||||
|
|
||||||
def _fail(self, *args, **kwargs):
|
def _fail(self, *args, **kwargs):
|
||||||
raise RuntimeError(
|
raise RuntimeError('signalling support is unavailable '
|
||||||
"signalling support is unavailable "
|
'because the blinker library is '
|
||||||
"because the blinker library is "
|
'not installed.')
|
||||||
"not installed."
|
send = lambda *a, **kw: None
|
||||||
)
|
connect = disconnect = has_receivers_for = receivers_for = \
|
||||||
|
temporarily_connected_to = _fail
|
||||||
send = lambda *a, **kw: None # noqa
|
|
||||||
connect = (
|
|
||||||
disconnect
|
|
||||||
) = has_receivers_for = receivers_for = temporarily_connected_to = _fail
|
|
||||||
del _fail
|
del _fail
|
||||||
|
|
||||||
|
|
||||||
# the namespace for code signals. If you are not mongoengine code, do
|
# the namespace for code signals. If you are not mongoengine code, do
|
||||||
# not put signals in here. Create your own namespace instead.
|
# not put signals in here. Create your own namespace instead.
|
||||||
_signals = Namespace()
|
_signals = Namespace()
|
||||||
|
|
||||||
pre_init = _signals.signal("pre_init")
|
pre_init = _signals.signal('pre_init')
|
||||||
post_init = _signals.signal("post_init")
|
post_init = _signals.signal('post_init')
|
||||||
pre_save = _signals.signal("pre_save")
|
pre_save = _signals.signal('pre_save')
|
||||||
pre_save_post_validation = _signals.signal("pre_save_post_validation")
|
post_save = _signals.signal('post_save')
|
||||||
post_save = _signals.signal("post_save")
|
pre_delete = _signals.signal('pre_delete')
|
||||||
pre_delete = _signals.signal("pre_delete")
|
post_delete = _signals.signal('post_delete')
|
||||||
post_delete = _signals.signal("post_delete")
|
pre_bulk_insert = _signals.signal('pre_bulk_insert')
|
||||||
pre_bulk_insert = _signals.signal("pre_bulk_insert")
|
post_bulk_insert = _signals.signal('post_bulk_insert')
|
||||||
post_bulk_insert = _signals.signal("post_bulk_insert")
|
|
||||||
|
59
mongoengine/tests.py
Normal file
59
mongoengine/tests.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
|
||||||
|
class query_counter(object):
|
||||||
|
""" Query_counter contextmanager to get the number of queries. """
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
""" Construct the query_counter. """
|
||||||
|
self.counter = 0
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
""" On every with block we need to drop the profile collection. """
|
||||||
|
self.db.set_profiling_level(0)
|
||||||
|
self.db.system.profile.drop()
|
||||||
|
self.db.set_profiling_level(2)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
""" Reset the profiling level. """
|
||||||
|
self.db.set_profiling_level(0)
|
||||||
|
|
||||||
|
def __eq__(self, value):
|
||||||
|
""" == Compare querycounter. """
|
||||||
|
return value == self._get_count()
|
||||||
|
|
||||||
|
def __ne__(self, value):
|
||||||
|
""" != Compare querycounter. """
|
||||||
|
return not self.__eq__(value)
|
||||||
|
|
||||||
|
def __lt__(self, value):
|
||||||
|
""" < Compare querycounter. """
|
||||||
|
return self._get_count() < value
|
||||||
|
|
||||||
|
def __le__(self, value):
|
||||||
|
""" <= Compare querycounter. """
|
||||||
|
return self._get_count() <= value
|
||||||
|
|
||||||
|
def __gt__(self, value):
|
||||||
|
""" > Compare querycounter. """
|
||||||
|
return self._get_count() > value
|
||||||
|
|
||||||
|
def __ge__(self, value):
|
||||||
|
""" >= Compare querycounter. """
|
||||||
|
return self._get_count() >= value
|
||||||
|
|
||||||
|
def __int__(self):
|
||||||
|
""" int representation. """
|
||||||
|
return self._get_count()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
""" repr query_counter as the number of queries. """
|
||||||
|
return u"%s" % self._get_count()
|
||||||
|
|
||||||
|
def _get_count(self):
|
||||||
|
""" Get the number of queries. """
|
||||||
|
count = self.db.system.profile.find().count() - self.counter
|
||||||
|
self.counter += 1
|
||||||
|
return count
|
@ -5,7 +5,7 @@
|
|||||||
%define srcname mongoengine
|
%define srcname mongoengine
|
||||||
|
|
||||||
Name: python-%{srcname}
|
Name: python-%{srcname}
|
||||||
Version: 0.8.7
|
Version: 0.7rc1
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||||
|
|
||||||
@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT
|
|||||||
# %{python_sitearch}/*
|
# %{python_sitearch}/*
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
* See: http://docs.mongoengine.org/en/latest/changelog.html
|
* See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html
|
@ -1,7 +0,0 @@
|
|||||||
black
|
|
||||||
flake8
|
|
||||||
pre-commit
|
|
||||||
pytest
|
|
||||||
ipdb
|
|
||||||
ipython
|
|
||||||
tox
|
|
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
pymongo
|
29
setup.cfg
29
setup.cfg
@ -1,18 +1,11 @@
|
|||||||
[flake8]
|
[nosetests]
|
||||||
ignore=E501,F403,F405,I201,I202,W504,W605,W503,B007
|
verbosity = 3
|
||||||
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
detailed-errors = 1
|
||||||
max-complexity=47
|
#with-coverage = 1
|
||||||
|
#cover-erase = 1
|
||||||
[tool:pytest]
|
#cover-html = 1
|
||||||
# Limits the discovery to tests directory
|
#cover-html-dir = ../htmlcov
|
||||||
# avoids that it runs for instance the benchmark
|
#cover-package = mongoengine
|
||||||
testpaths = tests
|
py3where = build
|
||||||
|
where = tests
|
||||||
[isort]
|
#tests = test_bugfix.py
|
||||||
known_first_party = mongoengine,tests
|
|
||||||
default_section = THIRDPARTY
|
|
||||||
multi_line_output = 3
|
|
||||||
include_trailing_comma = True
|
|
||||||
combine_as_imports = True
|
|
||||||
line_length = 70
|
|
||||||
ensure_newline_before_comments = 1
|
|
176
setup.py
176
setup.py
@ -1,148 +1,80 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from setuptools import setup, find_packages
|
||||||
from pkg_resources import normalize_path
|
|
||||||
from setuptools import find_packages, setup
|
|
||||||
from setuptools.command.test import test as TestCommand
|
|
||||||
|
|
||||||
# Hack to silence atexit traceback in newer python versions
|
# Hack to silence atexit traceback in newer python versions
|
||||||
try:
|
try:
|
||||||
import multiprocessing # noqa: F401
|
import multiprocessing
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB."
|
DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB"
|
||||||
|
|
||||||
|
LONG_DESCRIPTION = None
|
||||||
try:
|
try:
|
||||||
with open("README.rst") as fin:
|
LONG_DESCRIPTION = open('README.rst').read()
|
||||||
LONG_DESCRIPTION = fin.read()
|
except:
|
||||||
except Exception:
|
pass
|
||||||
LONG_DESCRIPTION = None
|
|
||||||
|
|
||||||
|
|
||||||
def get_version(version_tuple):
|
def get_version(version_tuple):
|
||||||
"""Return the version tuple as a string, e.g. for (0, 10, 7),
|
if not isinstance(version_tuple[-1], int):
|
||||||
return '0.10.7'.
|
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
|
||||||
"""
|
return '.'.join(map(str, version_tuple))
|
||||||
return ".".join(map(str, version_tuple))
|
|
||||||
|
|
||||||
|
|
||||||
class PyTest(TestCommand):
|
|
||||||
"""Will force pytest to search for tests inside the build directory
|
|
||||||
for 2to3 converted code (used by tox), instead of the current directory.
|
|
||||||
Required as long as we need 2to3
|
|
||||||
|
|
||||||
Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations
|
|
||||||
Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html
|
|
||||||
"""
|
|
||||||
|
|
||||||
# https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands
|
|
||||||
# Allows to provide pytest command argument through the test runner command `python setup.py test`
|
|
||||||
# e.g: `python setup.py test -a "-k=test"`
|
|
||||||
# This only works for 1 argument though
|
|
||||||
user_options = [("pytest-args=", "a", "Arguments to pass to py.test")]
|
|
||||||
|
|
||||||
def initialize_options(self):
|
|
||||||
TestCommand.initialize_options(self)
|
|
||||||
self.pytest_args = ""
|
|
||||||
|
|
||||||
def finalize_options(self):
|
|
||||||
TestCommand.finalize_options(self)
|
|
||||||
self.test_args = ["tests"]
|
|
||||||
self.test_suite = True
|
|
||||||
|
|
||||||
def run_tests(self):
|
|
||||||
# import here, cause outside the eggs aren't loaded
|
|
||||||
import pytest
|
|
||||||
from pkg_resources import _namespace_packages
|
|
||||||
|
|
||||||
# Purge modules under test from sys.modules. The test loader will
|
|
||||||
# re-import them from the build location. Required when 2to3 is used
|
|
||||||
# with namespace packages.
|
|
||||||
if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False):
|
|
||||||
module = self.test_args[-1].split(".")[0]
|
|
||||||
if module in _namespace_packages:
|
|
||||||
del_modules = []
|
|
||||||
if module in sys.modules:
|
|
||||||
del_modules.append(module)
|
|
||||||
module += "."
|
|
||||||
for name in sys.modules:
|
|
||||||
if name.startswith(module):
|
|
||||||
del_modules.append(name)
|
|
||||||
map(sys.modules.__delitem__, del_modules)
|
|
||||||
|
|
||||||
# Run on the build directory for 2to3-built code
|
|
||||||
# This will prevent the old 2.x code from being found
|
|
||||||
# by py.test discovery mechanism, that apparently
|
|
||||||
# ignores sys.path..
|
|
||||||
ei_cmd = self.get_finalized_command("egg_info")
|
|
||||||
self.test_args = [normalize_path(ei_cmd.egg_base)]
|
|
||||||
|
|
||||||
cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else [])
|
|
||||||
errno = pytest.main(cmd_args)
|
|
||||||
sys.exit(errno)
|
|
||||||
|
|
||||||
|
|
||||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||||
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||||
# file is read
|
# file is read
|
||||||
init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py")
|
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
||||||
version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0]
|
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
|
||||||
|
|
||||||
VERSION = get_version(eval(version_line.split("=")[-1]))
|
VERSION = get_version(eval(version_line.split('=')[-1]))
|
||||||
|
print(VERSION)
|
||||||
|
|
||||||
CLASSIFIERS = [
|
CLASSIFIERS = [
|
||||||
"Development Status :: 5 - Production/Stable",
|
'Development Status :: 4 - Beta',
|
||||||
"Intended Audience :: Developers",
|
'Intended Audience :: Developers',
|
||||||
"License :: OSI Approved :: MIT License",
|
'License :: OSI Approved :: MIT License',
|
||||||
"Operating System :: OS Independent",
|
'Operating System :: OS Independent',
|
||||||
"Programming Language :: Python",
|
'Programming Language :: Python',
|
||||||
|
"Programming Language :: Python :: 2",
|
||||||
|
"Programming Language :: Python :: 2.5",
|
||||||
|
"Programming Language :: Python :: 2.6",
|
||||||
|
"Programming Language :: Python :: 2.7",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
"Programming Language :: Python :: 3.6",
|
"Programming Language :: Python :: 3.1",
|
||||||
"Programming Language :: Python :: 3.7",
|
"Programming Language :: Python :: 3.2",
|
||||||
"Programming Language :: Python :: 3.8",
|
|
||||||
"Programming Language :: Python :: 3.9",
|
|
||||||
"Programming Language :: Python :: 3.10",
|
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: Implementation :: PyPy",
|
'Topic :: Database',
|
||||||
"Topic :: Database",
|
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
extra_opts = {
|
extra_opts = {}
|
||||||
"packages": find_packages(exclude=["tests", "tests.*"]),
|
if sys.version_info[0] == 3:
|
||||||
"tests_require": [
|
extra_opts['use_2to3'] = True
|
||||||
"pytest",
|
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker']
|
||||||
"pytest-cov",
|
extra_opts['packages'] = find_packages(exclude=('tests',))
|
||||||
"coverage",
|
if "test" in sys.argv or "nosetests" in sys.argv:
|
||||||
"blinker",
|
extra_opts['packages'].append("tests")
|
||||||
"Pillow>=7.0.0",
|
extra_opts['package_data'] = {"tests": ["mongoengine.png"]}
|
||||||
],
|
else:
|
||||||
}
|
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL']
|
||||||
|
extra_opts['packages'] = find_packages(exclude=('tests',))
|
||||||
|
|
||||||
if "test" in sys.argv:
|
setup(name='mongoengine',
|
||||||
extra_opts["packages"] = find_packages()
|
version=VERSION,
|
||||||
extra_opts["package_data"] = {
|
author='Harry Marr',
|
||||||
"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]
|
author_email='harry.marr@{nospam}gmail.com',
|
||||||
}
|
maintainer="Ross Lawley",
|
||||||
|
maintainer_email="ross.lawley@{nospam}gmail.com",
|
||||||
setup(
|
url='http://mongoengine.org/',
|
||||||
name="mongoengine",
|
license='MIT',
|
||||||
version=VERSION,
|
include_package_data=True,
|
||||||
author="Harry Marr",
|
description=DESCRIPTION,
|
||||||
author_email="harry.marr@gmail.com",
|
long_description=LONG_DESCRIPTION,
|
||||||
maintainer="Stefan Wojcik",
|
platforms=['any'],
|
||||||
maintainer_email="wojcikstefan@gmail.com",
|
classifiers=CLASSIFIERS,
|
||||||
url="http://mongoengine.org/",
|
install_requires=['pymongo'],
|
||||||
download_url="https://github.com/MongoEngine/mongoengine/tarball/master",
|
test_suite='nose.collector',
|
||||||
license="MIT",
|
**extra_opts
|
||||||
include_package_data=True,
|
|
||||||
description=DESCRIPTION,
|
|
||||||
long_description=LONG_DESCRIPTION,
|
|
||||||
platforms=["any"],
|
|
||||||
classifiers=CLASSIFIERS,
|
|
||||||
python_requires=">=3.6",
|
|
||||||
install_requires=["pymongo>=3.4, <4.0"],
|
|
||||||
cmdclass={"test": PyTest},
|
|
||||||
**extra_opts
|
|
||||||
)
|
)
|
||||||
|
@ -1,35 +0,0 @@
|
|||||||
"""
|
|
||||||
This test has been put into a module. This is because it tests warnings that
|
|
||||||
only get triggered on first hit. This way we can ensure its imported into the
|
|
||||||
top level and called first by the test suite.
|
|
||||||
"""
|
|
||||||
import unittest
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
|
|
||||||
|
|
||||||
class TestAllWarnings(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
connect(db="mongoenginetest")
|
|
||||||
self.warning_list = []
|
|
||||||
self.showwarning_default = warnings.showwarning
|
|
||||||
warnings.showwarning = self.append_to_warning_list
|
|
||||||
|
|
||||||
def append_to_warning_list(self, message, category, *args):
|
|
||||||
self.warning_list.append({"message": message, "category": category})
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
# restore default handling of warnings
|
|
||||||
warnings.showwarning = self.showwarning_default
|
|
||||||
|
|
||||||
def test_document_collection_syntax_warning(self):
|
|
||||||
class NonAbstractBase(Document):
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class InheritedDocumentFailTest(NonAbstractBase):
|
|
||||||
meta = {"collection": "fail"}
|
|
||||||
|
|
||||||
warning = self.warning_list[0]
|
|
||||||
assert SyntaxWarning == warning["category"]
|
|
||||||
assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name()
|
|
@ -1,349 +0,0 @@
|
|||||||
import unittest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import get_db
|
|
||||||
from mongoengine.pymongo_support import list_collection_names
|
|
||||||
from mongoengine.queryset import NULLIFY, PULL
|
|
||||||
|
|
||||||
|
|
||||||
class TestClassMethods(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
connect(db="mongoenginetest")
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
|
|
||||||
non_field = True
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
for collection in list_collection_names(self.db):
|
|
||||||
self.db.drop_collection(collection)
|
|
||||||
|
|
||||||
def test_definition(self):
|
|
||||||
"""Ensure that document may be defined using fields."""
|
|
||||||
assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys())
|
|
||||||
assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted(
|
|
||||||
x.__class__.__name__ for x in self.Person._fields.values()
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_get_db(self):
|
|
||||||
"""Ensure that get_db returns the expected db."""
|
|
||||||
db = self.Person._get_db()
|
|
||||||
assert self.db == db
|
|
||||||
|
|
||||||
def test_get_collection_name(self):
|
|
||||||
"""Ensure that get_collection_name returns the expected collection
|
|
||||||
name.
|
|
||||||
"""
|
|
||||||
collection_name = "person"
|
|
||||||
assert collection_name == self.Person._get_collection_name()
|
|
||||||
|
|
||||||
def test_get_collection(self):
|
|
||||||
"""Ensure that get_collection returns the expected collection."""
|
|
||||||
collection_name = "person"
|
|
||||||
collection = self.Person._get_collection()
|
|
||||||
assert self.db[collection_name] == collection
|
|
||||||
|
|
||||||
def test_drop_collection(self):
|
|
||||||
"""Ensure that the collection may be dropped from the database."""
|
|
||||||
collection_name = "person"
|
|
||||||
self.Person(name="Test").save()
|
|
||||||
assert collection_name in list_collection_names(self.db)
|
|
||||||
|
|
||||||
self.Person.drop_collection()
|
|
||||||
assert collection_name not in list_collection_names(self.db)
|
|
||||||
|
|
||||||
def test_register_delete_rule(self):
|
|
||||||
"""Ensure that register delete rule adds a delete rule to the document
|
|
||||||
meta.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Job(Document):
|
|
||||||
employee = ReferenceField(self.Person)
|
|
||||||
|
|
||||||
assert self.Person._meta.get("delete_rules") is None
|
|
||||||
|
|
||||||
self.Person.register_delete_rule(Job, "employee", NULLIFY)
|
|
||||||
assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY}
|
|
||||||
|
|
||||||
def test_compare_indexes(self):
|
|
||||||
"""Ensure that the indexes are properly created and that
|
|
||||||
compare_indexes identifies the missing/extra indexes
|
|
||||||
"""
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
author = StringField()
|
|
||||||
title = StringField()
|
|
||||||
description = StringField()
|
|
||||||
tags = StringField()
|
|
||||||
|
|
||||||
meta = {"indexes": [("author", "title")]}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
BlogPost.ensure_indexes()
|
|
||||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
|
||||||
|
|
||||||
BlogPost.ensure_index(["author", "description"])
|
|
||||||
assert BlogPost.compare_indexes() == {
|
|
||||||
"missing": [],
|
|
||||||
"extra": [[("author", 1), ("description", 1)]],
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPost._get_collection().drop_index("author_1_description_1")
|
|
||||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
|
||||||
|
|
||||||
BlogPost._get_collection().drop_index("author_1_title_1")
|
|
||||||
assert BlogPost.compare_indexes() == {
|
|
||||||
"missing": [[("author", 1), ("title", 1)]],
|
|
||||||
"extra": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_compare_indexes_inheritance(self):
|
|
||||||
"""Ensure that the indexes are properly created and that
|
|
||||||
compare_indexes identifies the missing/extra indexes for subclassed
|
|
||||||
documents (_cls included)
|
|
||||||
"""
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
author = StringField()
|
|
||||||
title = StringField()
|
|
||||||
description = StringField()
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class BlogPostWithTags(BlogPost):
|
|
||||||
tags = StringField()
|
|
||||||
tag_list = ListField(StringField())
|
|
||||||
|
|
||||||
meta = {"indexes": [("author", "tags")]}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
BlogPost.ensure_indexes()
|
|
||||||
BlogPostWithTags.ensure_indexes()
|
|
||||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
|
||||||
|
|
||||||
BlogPostWithTags.ensure_index(["author", "tag_list"])
|
|
||||||
assert BlogPost.compare_indexes() == {
|
|
||||||
"missing": [],
|
|
||||||
"extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]],
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1")
|
|
||||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
|
||||||
|
|
||||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1")
|
|
||||||
assert BlogPost.compare_indexes() == {
|
|
||||||
"missing": [[("_cls", 1), ("author", 1), ("tags", 1)]],
|
|
||||||
"extra": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_compare_indexes_multiple_subclasses(self):
|
|
||||||
"""Ensure that compare_indexes behaves correctly if called from a
|
|
||||||
class, which base class has multiple subclasses
|
|
||||||
"""
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
author = StringField()
|
|
||||||
title = StringField()
|
|
||||||
description = StringField()
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class BlogPostWithTags(BlogPost):
|
|
||||||
tags = StringField()
|
|
||||||
tag_list = ListField(StringField())
|
|
||||||
|
|
||||||
meta = {"indexes": [("author", "tags")]}
|
|
||||||
|
|
||||||
class BlogPostWithCustomField(BlogPost):
|
|
||||||
custom = DictField()
|
|
||||||
|
|
||||||
meta = {"indexes": [("author", "custom")]}
|
|
||||||
|
|
||||||
BlogPost.ensure_indexes()
|
|
||||||
BlogPostWithTags.ensure_indexes()
|
|
||||||
BlogPostWithCustomField.ensure_indexes()
|
|
||||||
|
|
||||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
|
||||||
assert BlogPostWithTags.compare_indexes() == {"missing": [], "extra": []}
|
|
||||||
assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []}
|
|
||||||
|
|
||||||
def test_compare_indexes_for_text_indexes(self):
|
|
||||||
"""Ensure that compare_indexes behaves correctly for text indexes"""
|
|
||||||
|
|
||||||
class Doc(Document):
|
|
||||||
a = StringField()
|
|
||||||
b = StringField()
|
|
||||||
meta = {
|
|
||||||
"indexes": [
|
|
||||||
{
|
|
||||||
"fields": ["$a", "$b"],
|
|
||||||
"default_language": "english",
|
|
||||||
"weights": {"a": 10, "b": 2},
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
Doc.ensure_indexes()
|
|
||||||
actual = Doc.compare_indexes()
|
|
||||||
expected = {"missing": [], "extra": []}
|
|
||||||
assert actual == expected
|
|
||||||
|
|
||||||
def test_list_indexes_inheritance(self):
|
|
||||||
"""ensure that all of the indexes are listed regardless of the super-
|
|
||||||
or sub-class that we call it from
|
|
||||||
"""
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
author = StringField()
|
|
||||||
title = StringField()
|
|
||||||
description = StringField()
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class BlogPostWithTags(BlogPost):
|
|
||||||
tags = StringField()
|
|
||||||
|
|
||||||
meta = {"indexes": [("author", "tags")]}
|
|
||||||
|
|
||||||
class BlogPostWithTagsAndExtraText(BlogPostWithTags):
|
|
||||||
extra_text = StringField()
|
|
||||||
|
|
||||||
meta = {"indexes": [("author", "tags", "extra_text")]}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
BlogPost.ensure_indexes()
|
|
||||||
BlogPostWithTags.ensure_indexes()
|
|
||||||
BlogPostWithTagsAndExtraText.ensure_indexes()
|
|
||||||
|
|
||||||
assert BlogPost.list_indexes() == BlogPostWithTags.list_indexes()
|
|
||||||
assert BlogPost.list_indexes() == BlogPostWithTagsAndExtraText.list_indexes()
|
|
||||||
assert BlogPost.list_indexes() == [
|
|
||||||
[("_cls", 1), ("author", 1), ("tags", 1)],
|
|
||||||
[("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)],
|
|
||||||
[("_id", 1)],
|
|
||||||
[("_cls", 1)],
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_register_delete_rule_inherited(self):
|
|
||||||
class Vaccine(Document):
|
|
||||||
name = StringField(required=True)
|
|
||||||
|
|
||||||
meta = {"indexes": ["name"]}
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
family = StringField(required=True)
|
|
||||||
vaccine_made = ListField(
|
|
||||||
ReferenceField("Vaccine", reverse_delete_rule=PULL)
|
|
||||||
)
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True, "indexes": ["family"]}
|
|
||||||
|
|
||||||
class Cat(Animal):
|
|
||||||
name = StringField(required=True)
|
|
||||||
|
|
||||||
assert Vaccine._meta["delete_rules"][(Animal, "vaccine_made")] == PULL
|
|
||||||
assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL
|
|
||||||
|
|
||||||
def test_collection_naming(self):
|
|
||||||
"""Ensure that a collection with a specified name may be used."""
|
|
||||||
|
|
||||||
class DefaultNamingTest(Document):
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert "default_naming_test" == DefaultNamingTest._get_collection_name()
|
|
||||||
|
|
||||||
class CustomNamingTest(Document):
|
|
||||||
meta = {"collection": "pimp_my_collection"}
|
|
||||||
|
|
||||||
assert "pimp_my_collection" == CustomNamingTest._get_collection_name()
|
|
||||||
|
|
||||||
class DynamicNamingTest(Document):
|
|
||||||
meta = {"collection": lambda c: "DYNAMO"}
|
|
||||||
|
|
||||||
assert "DYNAMO" == DynamicNamingTest._get_collection_name()
|
|
||||||
|
|
||||||
# Use Abstract class to handle backwards compatibility
|
|
||||||
class BaseDocument(Document):
|
|
||||||
meta = {"abstract": True, "collection": lambda c: c.__name__.lower()}
|
|
||||||
|
|
||||||
class OldNamingConvention(BaseDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert "oldnamingconvention" == OldNamingConvention._get_collection_name()
|
|
||||||
|
|
||||||
class InheritedAbstractNamingTest(BaseDocument):
|
|
||||||
meta = {"collection": "wibble"}
|
|
||||||
|
|
||||||
assert "wibble" == InheritedAbstractNamingTest._get_collection_name()
|
|
||||||
|
|
||||||
# Mixin tests
|
|
||||||
class BaseMixin:
|
|
||||||
meta = {"collection": lambda c: c.__name__.lower()}
|
|
||||||
|
|
||||||
class OldMixinNamingConvention(Document, BaseMixin):
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert (
|
|
||||||
"oldmixinnamingconvention"
|
|
||||||
== OldMixinNamingConvention._get_collection_name()
|
|
||||||
)
|
|
||||||
|
|
||||||
class BaseMixin:
|
|
||||||
meta = {"collection": lambda c: c.__name__.lower()}
|
|
||||||
|
|
||||||
class BaseDocument(Document, BaseMixin):
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class MyDocument(BaseDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert "basedocument" == MyDocument._get_collection_name()
|
|
||||||
|
|
||||||
def test_custom_collection_name_operations(self):
|
|
||||||
"""Ensure that a collection with a specified name is used as expected."""
|
|
||||||
collection_name = "personCollTest"
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {"collection": collection_name}
|
|
||||||
|
|
||||||
Person(name="Test User").save()
|
|
||||||
assert collection_name in list_collection_names(self.db)
|
|
||||||
|
|
||||||
user_obj = self.db[collection_name].find_one()
|
|
||||||
assert user_obj["name"] == "Test User"
|
|
||||||
|
|
||||||
user_obj = Person.objects[0]
|
|
||||||
assert user_obj.name == "Test User"
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
assert collection_name not in list_collection_names(self.db)
|
|
||||||
|
|
||||||
def test_collection_name_and_primary(self):
|
|
||||||
"""Ensure that a collection with a specified name may be used."""
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField(primary_key=True)
|
|
||||||
meta = {"collection": "app"}
|
|
||||||
|
|
||||||
Person(name="Test User").save()
|
|
||||||
|
|
||||||
user_obj = Person.objects.first()
|
|
||||||
assert user_obj.name == "Test User"
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
unittest.main()
|
|
@ -1,957 +0,0 @@
|
|||||||
import unittest
|
|
||||||
|
|
||||||
from bson import SON
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.pymongo_support import list_collection_names
|
|
||||||
from tests.utils import MongoDBTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestDelta(MongoDBTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
super().setUp()
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
|
|
||||||
non_field = True
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
for collection in list_collection_names(self.db):
|
|
||||||
self.db.drop_collection(collection)
|
|
||||||
|
|
||||||
def test_delta(self):
|
|
||||||
self.delta(Document)
|
|
||||||
self.delta(DynamicDocument)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def delta(DocClass):
|
|
||||||
class Doc(DocClass):
|
|
||||||
string_field = StringField()
|
|
||||||
int_field = IntField()
|
|
||||||
dict_field = DictField()
|
|
||||||
list_field = ListField()
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
assert doc._get_changed_fields() == []
|
|
||||||
assert doc._delta() == ({}, {})
|
|
||||||
|
|
||||||
doc.string_field = "hello"
|
|
||||||
assert doc._get_changed_fields() == ["string_field"]
|
|
||||||
assert doc._delta() == ({"string_field": "hello"}, {})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.int_field = 1
|
|
||||||
assert doc._get_changed_fields() == ["int_field"]
|
|
||||||
assert doc._delta() == ({"int_field": 1}, {})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
dict_value = {"hello": "world", "ping": "pong"}
|
|
||||||
doc.dict_field = dict_value
|
|
||||||
assert doc._get_changed_fields() == ["dict_field"]
|
|
||||||
assert doc._delta() == ({"dict_field": dict_value}, {})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
list_value = ["1", 2, {"hello": "world"}]
|
|
||||||
doc.list_field = list_value
|
|
||||||
assert doc._get_changed_fields() == ["list_field"]
|
|
||||||
assert doc._delta() == ({"list_field": list_value}, {})
|
|
||||||
|
|
||||||
# Test unsetting
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.dict_field = {}
|
|
||||||
assert doc._get_changed_fields() == ["dict_field"]
|
|
||||||
assert doc._delta() == ({}, {"dict_field": 1})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.list_field = []
|
|
||||||
assert doc._get_changed_fields() == ["list_field"]
|
|
||||||
assert doc._delta() == ({}, {"list_field": 1})
|
|
||||||
|
|
||||||
def test_delta_recursive(self):
|
|
||||||
self.delta_recursive(Document, EmbeddedDocument)
|
|
||||||
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
|
||||||
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
|
||||||
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
|
||||||
|
|
||||||
def delta_recursive(self, DocClass, EmbeddedClass):
|
|
||||||
class Embedded(EmbeddedClass):
|
|
||||||
id = StringField()
|
|
||||||
string_field = StringField()
|
|
||||||
int_field = IntField()
|
|
||||||
dict_field = DictField()
|
|
||||||
list_field = ListField()
|
|
||||||
|
|
||||||
class Doc(DocClass):
|
|
||||||
string_field = StringField()
|
|
||||||
int_field = IntField()
|
|
||||||
dict_field = DictField()
|
|
||||||
list_field = ListField()
|
|
||||||
embedded_field = EmbeddedDocumentField(Embedded)
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
assert doc._get_changed_fields() == []
|
|
||||||
assert doc._delta() == ({}, {})
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.id = "010101"
|
|
||||||
embedded_1.string_field = "hello"
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {"hello": "world"}
|
|
||||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
assert doc._get_changed_fields() == ["embedded_field"]
|
|
||||||
|
|
||||||
embedded_delta = {
|
|
||||||
"id": "010101",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
}
|
|
||||||
assert doc.embedded_field._delta() == (embedded_delta, {})
|
|
||||||
assert doc._delta() == ({"embedded_field": embedded_delta}, {})
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.embedded_field.dict_field = {}
|
|
||||||
assert doc._get_changed_fields() == ["embedded_field.dict_field"]
|
|
||||||
assert doc.embedded_field._delta() == ({}, {"dict_field": 1})
|
|
||||||
assert doc._delta() == ({}, {"embedded_field.dict_field": 1})
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.dict_field == {}
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = []
|
|
||||||
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
|
||||||
assert doc.embedded_field._delta() == ({}, {"list_field": 1})
|
|
||||||
assert doc._delta() == ({}, {"embedded_field.list_field": 1})
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.list_field == []
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = "hello"
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {"hello": "world"}
|
|
||||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
|
||||||
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
|
||||||
|
|
||||||
assert doc.embedded_field._delta() == (
|
|
||||||
{
|
|
||||||
"list_field": [
|
|
||||||
"1",
|
|
||||||
2,
|
|
||||||
{
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"int_field": 1,
|
|
||||||
"list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert doc._delta() == (
|
|
||||||
{
|
|
||||||
"embedded_field.list_field": [
|
|
||||||
"1",
|
|
||||||
2,
|
|
||||||
{
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"int_field": 1,
|
|
||||||
"list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
assert doc.embedded_field.list_field[0] == "1"
|
|
||||||
assert doc.embedded_field.list_field[1] == 2
|
|
||||||
for k in doc.embedded_field.list_field[2]._fields:
|
|
||||||
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].string_field = "world"
|
|
||||||
assert doc._get_changed_fields() == ["embedded_field.list_field.2.string_field"]
|
|
||||||
assert doc.embedded_field._delta() == (
|
|
||||||
{"list_field.2.string_field": "world"},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
assert doc._delta() == (
|
|
||||||
{"embedded_field.list_field.2.string_field": "world"},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.list_field[2].string_field == "world"
|
|
||||||
|
|
||||||
# Test multiple assignments
|
|
||||||
doc.embedded_field.list_field[2].string_field = "hello world"
|
|
||||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
|
||||||
assert doc._get_changed_fields() == ["embedded_field.list_field.2"]
|
|
||||||
assert doc.embedded_field._delta() == (
|
|
||||||
{
|
|
||||||
"list_field.2": {
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"string_field": "hello world",
|
|
||||||
"int_field": 1,
|
|
||||||
"list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
assert doc._delta() == (
|
|
||||||
{
|
|
||||||
"embedded_field.list_field.2": {
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"string_field": "hello world",
|
|
||||||
"int_field": 1,
|
|
||||||
"list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
|
||||||
|
|
||||||
# Test list native methods
|
|
||||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
|
||||||
assert doc._delta() == (
|
|
||||||
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.append(1)
|
|
||||||
assert doc._delta() == (
|
|
||||||
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
|
||||||
|
|
||||||
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
|
||||||
assert doc._delta() == (
|
|
||||||
{},
|
|
||||||
{"embedded_field.list_field.2.list_field.2.hello": 1},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
del doc.embedded_field.list_field[2].list_field
|
|
||||||
assert doc._delta() == ({}, {"embedded_field.list_field.2.list_field": 1})
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.dict_field["Embedded"] = embedded_1
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.dict_field["Embedded"].string_field = "Hello World"
|
|
||||||
assert doc._get_changed_fields() == ["dict_field.Embedded.string_field"]
|
|
||||||
assert doc._delta() == ({"dict_field.Embedded.string_field": "Hello World"}, {})
|
|
||||||
|
|
||||||
def test_circular_reference_deltas(self):
|
|
||||||
self.circular_reference_deltas(Document, Document)
|
|
||||||
self.circular_reference_deltas(Document, DynamicDocument)
|
|
||||||
self.circular_reference_deltas(DynamicDocument, Document)
|
|
||||||
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
|
||||||
|
|
||||||
def circular_reference_deltas(self, DocClass1, DocClass2):
|
|
||||||
class Person(DocClass1):
|
|
||||||
name = StringField()
|
|
||||||
owns = ListField(ReferenceField("Organization"))
|
|
||||||
|
|
||||||
class Organization(DocClass2):
|
|
||||||
name = StringField()
|
|
||||||
owner = ReferenceField("Person")
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
Organization.drop_collection()
|
|
||||||
|
|
||||||
person = Person(name="owner").save()
|
|
||||||
organization = Organization(name="company").save()
|
|
||||||
|
|
||||||
person.owns.append(organization)
|
|
||||||
organization.owner = person
|
|
||||||
|
|
||||||
person.save()
|
|
||||||
organization.save()
|
|
||||||
|
|
||||||
p = Person.objects[0].select_related()
|
|
||||||
o = Organization.objects.first()
|
|
||||||
assert p.owns[0] == o
|
|
||||||
assert o.owner == p
|
|
||||||
|
|
||||||
def test_circular_reference_deltas_2(self):
|
|
||||||
self.circular_reference_deltas_2(Document, Document)
|
|
||||||
self.circular_reference_deltas_2(Document, DynamicDocument)
|
|
||||||
self.circular_reference_deltas_2(DynamicDocument, Document)
|
|
||||||
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
|
||||||
|
|
||||||
def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True):
|
|
||||||
class Person(DocClass1):
|
|
||||||
name = StringField()
|
|
||||||
owns = ListField(ReferenceField("Organization", dbref=dbref))
|
|
||||||
employer = ReferenceField("Organization", dbref=dbref)
|
|
||||||
|
|
||||||
class Organization(DocClass2):
|
|
||||||
name = StringField()
|
|
||||||
owner = ReferenceField("Person", dbref=dbref)
|
|
||||||
employees = ListField(ReferenceField("Person", dbref=dbref))
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
Organization.drop_collection()
|
|
||||||
|
|
||||||
person = Person(name="owner").save()
|
|
||||||
employee = Person(name="employee").save()
|
|
||||||
organization = Organization(name="company").save()
|
|
||||||
|
|
||||||
person.owns.append(organization)
|
|
||||||
organization.owner = person
|
|
||||||
|
|
||||||
organization.employees.append(employee)
|
|
||||||
employee.employer = organization
|
|
||||||
|
|
||||||
person.save()
|
|
||||||
organization.save()
|
|
||||||
employee.save()
|
|
||||||
|
|
||||||
p = Person.objects.get(name="owner")
|
|
||||||
e = Person.objects.get(name="employee")
|
|
||||||
o = Organization.objects.first()
|
|
||||||
|
|
||||||
assert p.owns[0] == o
|
|
||||||
assert o.owner == p
|
|
||||||
assert e.employer == o
|
|
||||||
|
|
||||||
return person, organization, employee
|
|
||||||
|
|
||||||
def test_delta_db_field(self):
|
|
||||||
self.delta_db_field(Document)
|
|
||||||
self.delta_db_field(DynamicDocument)
|
|
||||||
|
|
||||||
def delta_db_field(self, DocClass):
|
|
||||||
class Doc(DocClass):
|
|
||||||
string_field = StringField(db_field="db_string_field")
|
|
||||||
int_field = IntField(db_field="db_int_field")
|
|
||||||
dict_field = DictField(db_field="db_dict_field")
|
|
||||||
list_field = ListField(db_field="db_list_field")
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
assert doc._get_changed_fields() == []
|
|
||||||
assert doc._delta() == ({}, {})
|
|
||||||
|
|
||||||
doc.string_field = "hello"
|
|
||||||
assert doc._get_changed_fields() == ["db_string_field"]
|
|
||||||
assert doc._delta() == ({"db_string_field": "hello"}, {})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.int_field = 1
|
|
||||||
assert doc._get_changed_fields() == ["db_int_field"]
|
|
||||||
assert doc._delta() == ({"db_int_field": 1}, {})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
dict_value = {"hello": "world", "ping": "pong"}
|
|
||||||
doc.dict_field = dict_value
|
|
||||||
assert doc._get_changed_fields() == ["db_dict_field"]
|
|
||||||
assert doc._delta() == ({"db_dict_field": dict_value}, {})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
list_value = ["1", 2, {"hello": "world"}]
|
|
||||||
doc.list_field = list_value
|
|
||||||
assert doc._get_changed_fields() == ["db_list_field"]
|
|
||||||
assert doc._delta() == ({"db_list_field": list_value}, {})
|
|
||||||
|
|
||||||
# Test unsetting
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.dict_field = {}
|
|
||||||
assert doc._get_changed_fields() == ["db_dict_field"]
|
|
||||||
assert doc._delta() == ({}, {"db_dict_field": 1})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.list_field = []
|
|
||||||
assert doc._get_changed_fields() == ["db_list_field"]
|
|
||||||
assert doc._delta() == ({}, {"db_list_field": 1})
|
|
||||||
|
|
||||||
# Test it saves that data
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc.string_field = "hello"
|
|
||||||
doc.int_field = 1
|
|
||||||
doc.dict_field = {"hello": "world"}
|
|
||||||
doc.list_field = ["1", 2, {"hello": "world"}]
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
assert doc.string_field == "hello"
|
|
||||||
assert doc.int_field == 1
|
|
||||||
assert doc.dict_field == {"hello": "world"}
|
|
||||||
assert doc.list_field == ["1", 2, {"hello": "world"}]
|
|
||||||
|
|
||||||
def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self):
|
|
||||||
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
|
||||||
|
|
||||||
def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self):
|
|
||||||
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
|
||||||
|
|
||||||
def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self):
|
|
||||||
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
|
||||||
|
|
||||||
def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self):
|
|
||||||
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def delta_recursive_db_field(DocClass, EmbeddedClass):
|
|
||||||
class Embedded(EmbeddedClass):
|
|
||||||
string_field = StringField(db_field="db_string_field")
|
|
||||||
int_field = IntField(db_field="db_int_field")
|
|
||||||
dict_field = DictField(db_field="db_dict_field")
|
|
||||||
list_field = ListField(db_field="db_list_field")
|
|
||||||
|
|
||||||
class Doc(DocClass):
|
|
||||||
string_field = StringField(db_field="db_string_field")
|
|
||||||
int_field = IntField(db_field="db_int_field")
|
|
||||||
dict_field = DictField(db_field="db_dict_field")
|
|
||||||
list_field = ListField(db_field="db_list_field")
|
|
||||||
embedded_field = EmbeddedDocumentField(
|
|
||||||
Embedded, db_field="db_embedded_field"
|
|
||||||
)
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
assert doc._get_changed_fields() == []
|
|
||||||
assert doc._delta() == ({}, {})
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = "hello"
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {"hello": "world"}
|
|
||||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
assert doc._get_changed_fields() == ["db_embedded_field"]
|
|
||||||
|
|
||||||
embedded_delta = {
|
|
||||||
"db_string_field": "hello",
|
|
||||||
"db_int_field": 1,
|
|
||||||
"db_dict_field": {"hello": "world"},
|
|
||||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
}
|
|
||||||
assert doc.embedded_field._delta() == (embedded_delta, {})
|
|
||||||
assert doc._delta() == ({"db_embedded_field": embedded_delta}, {})
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.embedded_field.dict_field = {}
|
|
||||||
assert doc._get_changed_fields() == ["db_embedded_field.db_dict_field"]
|
|
||||||
assert doc.embedded_field._delta() == ({}, {"db_dict_field": 1})
|
|
||||||
assert doc._delta() == ({}, {"db_embedded_field.db_dict_field": 1})
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.dict_field == {}
|
|
||||||
|
|
||||||
assert doc._get_changed_fields() == []
|
|
||||||
doc.embedded_field.list_field = []
|
|
||||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
|
||||||
assert doc.embedded_field._delta() == ({}, {"db_list_field": 1})
|
|
||||||
assert doc._delta() == ({}, {"db_embedded_field.db_list_field": 1})
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.list_field == []
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = "hello"
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {"hello": "world"}
|
|
||||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
|
||||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
|
||||||
assert doc.embedded_field._delta() == (
|
|
||||||
{
|
|
||||||
"db_list_field": [
|
|
||||||
"1",
|
|
||||||
2,
|
|
||||||
{
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"db_string_field": "hello",
|
|
||||||
"db_dict_field": {"hello": "world"},
|
|
||||||
"db_int_field": 1,
|
|
||||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert doc._delta() == (
|
|
||||||
{
|
|
||||||
"db_embedded_field.db_list_field": [
|
|
||||||
"1",
|
|
||||||
2,
|
|
||||||
{
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"db_string_field": "hello",
|
|
||||||
"db_dict_field": {"hello": "world"},
|
|
||||||
"db_int_field": 1,
|
|
||||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
assert doc._get_changed_fields() == []
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
assert doc.embedded_field.list_field[0] == "1"
|
|
||||||
assert doc.embedded_field.list_field[1] == 2
|
|
||||||
for k in doc.embedded_field.list_field[2]._fields:
|
|
||||||
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].string_field = "world"
|
|
||||||
assert doc._get_changed_fields() == [
|
|
||||||
"db_embedded_field.db_list_field.2.db_string_field"
|
|
||||||
]
|
|
||||||
assert doc.embedded_field._delta() == (
|
|
||||||
{"db_list_field.2.db_string_field": "world"},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
assert doc._delta() == (
|
|
||||||
{"db_embedded_field.db_list_field.2.db_string_field": "world"},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.list_field[2].string_field == "world"
|
|
||||||
|
|
||||||
# Test multiple assignments
|
|
||||||
doc.embedded_field.list_field[2].string_field = "hello world"
|
|
||||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
|
||||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field.2"]
|
|
||||||
assert doc.embedded_field._delta() == (
|
|
||||||
{
|
|
||||||
"db_list_field.2": {
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"db_string_field": "hello world",
|
|
||||||
"db_int_field": 1,
|
|
||||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
"db_dict_field": {"hello": "world"},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
assert doc._delta() == (
|
|
||||||
{
|
|
||||||
"db_embedded_field.db_list_field.2": {
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"db_string_field": "hello world",
|
|
||||||
"db_int_field": 1,
|
|
||||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
"db_dict_field": {"hello": "world"},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
|
||||||
|
|
||||||
# Test list native methods
|
|
||||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
|
||||||
assert doc._delta() == (
|
|
||||||
{
|
|
||||||
"db_embedded_field.db_list_field.2.db_list_field": [
|
|
||||||
2,
|
|
||||||
{"hello": "world"},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.append(1)
|
|
||||||
assert doc._delta() == (
|
|
||||||
{
|
|
||||||
"db_embedded_field.db_list_field.2.db_list_field": [
|
|
||||||
2,
|
|
||||||
{"hello": "world"},
|
|
||||||
1,
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
|
||||||
|
|
||||||
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
|
||||||
assert doc._delta() == (
|
|
||||||
{},
|
|
||||||
{"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1},
|
|
||||||
)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
assert doc._delta() == (
|
|
||||||
{},
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
del doc.embedded_field.list_field[2].list_field
|
|
||||||
assert doc._delta() == (
|
|
||||||
{},
|
|
||||||
{"db_embedded_field.db_list_field.2.db_list_field": 1},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_delta_for_dynamic_documents(self):
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
p = Person(name="James", age=34)
|
|
||||||
assert p._delta() == (
|
|
||||||
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
|
|
||||||
p.doc = 123
|
|
||||||
del p.doc
|
|
||||||
assert p._delta() == (
|
|
||||||
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
|
|
||||||
p = Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.age = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p.age = 24
|
|
||||||
assert p.age == 24
|
|
||||||
assert p._get_changed_fields() == ["age"]
|
|
||||||
assert p._delta() == ({"age": 24}, {})
|
|
||||||
|
|
||||||
p = Person.objects(age=22).get()
|
|
||||||
p.age = 24
|
|
||||||
assert p.age == 24
|
|
||||||
assert p._get_changed_fields() == ["age"]
|
|
||||||
assert p._delta() == ({"age": 24}, {})
|
|
||||||
|
|
||||||
p.save()
|
|
||||||
assert 1 == Person.objects(age=24).count()
|
|
||||||
|
|
||||||
def test_dynamic_delta(self):
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
assert doc._get_changed_fields() == []
|
|
||||||
assert doc._delta() == ({}, {})
|
|
||||||
|
|
||||||
doc.string_field = "hello"
|
|
||||||
assert doc._get_changed_fields() == ["string_field"]
|
|
||||||
assert doc._delta() == ({"string_field": "hello"}, {})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.int_field = 1
|
|
||||||
assert doc._get_changed_fields() == ["int_field"]
|
|
||||||
assert doc._delta() == ({"int_field": 1}, {})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
dict_value = {"hello": "world", "ping": "pong"}
|
|
||||||
doc.dict_field = dict_value
|
|
||||||
assert doc._get_changed_fields() == ["dict_field"]
|
|
||||||
assert doc._delta() == ({"dict_field": dict_value}, {})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
list_value = ["1", 2, {"hello": "world"}]
|
|
||||||
doc.list_field = list_value
|
|
||||||
assert doc._get_changed_fields() == ["list_field"]
|
|
||||||
assert doc._delta() == ({"list_field": list_value}, {})
|
|
||||||
|
|
||||||
# Test unsetting
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.dict_field = {}
|
|
||||||
assert doc._get_changed_fields() == ["dict_field"]
|
|
||||||
assert doc._delta() == ({}, {"dict_field": 1})
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.list_field = []
|
|
||||||
assert doc._get_changed_fields() == ["list_field"]
|
|
||||||
assert doc._delta() == ({}, {"list_field": 1})
|
|
||||||
|
|
||||||
def test_delta_with_dbref_true(self):
|
|
||||||
person, organization, employee = self.circular_reference_deltas_2(
|
|
||||||
Document, Document, True
|
|
||||||
)
|
|
||||||
employee.name = "test"
|
|
||||||
|
|
||||||
assert organization._get_changed_fields() == []
|
|
||||||
|
|
||||||
updates, removals = organization._delta()
|
|
||||||
assert removals == {}
|
|
||||||
assert updates == {}
|
|
||||||
|
|
||||||
organization.employees.append(person)
|
|
||||||
updates, removals = organization._delta()
|
|
||||||
assert removals == {}
|
|
||||||
assert "employees" in updates
|
|
||||||
|
|
||||||
def test_delta_with_dbref_false(self):
|
|
||||||
person, organization, employee = self.circular_reference_deltas_2(
|
|
||||||
Document, Document, False
|
|
||||||
)
|
|
||||||
employee.name = "test"
|
|
||||||
|
|
||||||
assert organization._get_changed_fields() == []
|
|
||||||
|
|
||||||
updates, removals = organization._delta()
|
|
||||||
assert removals == {}
|
|
||||||
assert updates == {}
|
|
||||||
|
|
||||||
organization.employees.append(person)
|
|
||||||
updates, removals = organization._delta()
|
|
||||||
assert removals == {}
|
|
||||||
assert "employees" in updates
|
|
||||||
|
|
||||||
def test_nested_nested_fields_mark_as_changed(self):
|
|
||||||
class EmbeddedDoc(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class MyDoc(Document):
|
|
||||||
subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc)))
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
MyDoc.drop_collection()
|
|
||||||
|
|
||||||
MyDoc(name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}).save()
|
|
||||||
|
|
||||||
mydoc = MyDoc.objects.first()
|
|
||||||
subdoc = mydoc.subs["a"]["b"]
|
|
||||||
subdoc.name = "bar"
|
|
||||||
|
|
||||||
assert subdoc._get_changed_fields() == ["name"]
|
|
||||||
assert mydoc._get_changed_fields() == ["subs.a.b.name"]
|
|
||||||
|
|
||||||
mydoc._clear_changed_fields()
|
|
||||||
assert mydoc._get_changed_fields() == []
|
|
||||||
|
|
||||||
def test_nested_nested_fields_db_field_set__gets_mark_as_changed_and_cleaned(self):
|
|
||||||
class EmbeddedDoc(EmbeddedDocument):
|
|
||||||
name = StringField(db_field="db_name")
|
|
||||||
|
|
||||||
class MyDoc(Document):
|
|
||||||
embed = EmbeddedDocumentField(EmbeddedDoc, db_field="db_embed")
|
|
||||||
name = StringField(db_field="db_name")
|
|
||||||
|
|
||||||
MyDoc.drop_collection()
|
|
||||||
|
|
||||||
MyDoc(name="testcase1", embed=EmbeddedDoc(name="foo")).save()
|
|
||||||
|
|
||||||
mydoc = MyDoc.objects.first()
|
|
||||||
mydoc.embed.name = "foo1"
|
|
||||||
|
|
||||||
assert mydoc.embed._get_changed_fields() == ["db_name"]
|
|
||||||
assert mydoc._get_changed_fields() == ["db_embed.db_name"]
|
|
||||||
|
|
||||||
mydoc = MyDoc.objects.first()
|
|
||||||
embed = EmbeddedDoc(name="foo2")
|
|
||||||
embed.name = "bar"
|
|
||||||
mydoc.embed = embed
|
|
||||||
|
|
||||||
assert embed._get_changed_fields() == ["db_name"]
|
|
||||||
assert mydoc._get_changed_fields() == ["db_embed"]
|
|
||||||
|
|
||||||
mydoc._clear_changed_fields()
|
|
||||||
assert mydoc._get_changed_fields() == []
|
|
||||||
|
|
||||||
def test_lower_level_mark_as_changed(self):
|
|
||||||
class EmbeddedDoc(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class MyDoc(Document):
|
|
||||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
|
||||||
|
|
||||||
MyDoc.drop_collection()
|
|
||||||
|
|
||||||
MyDoc().save()
|
|
||||||
|
|
||||||
mydoc = MyDoc.objects.first()
|
|
||||||
mydoc.subs["a"] = EmbeddedDoc()
|
|
||||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
|
||||||
|
|
||||||
subdoc = mydoc.subs["a"]
|
|
||||||
subdoc.name = "bar"
|
|
||||||
|
|
||||||
assert subdoc._get_changed_fields() == ["name"]
|
|
||||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
|
||||||
mydoc.save()
|
|
||||||
|
|
||||||
mydoc._clear_changed_fields()
|
|
||||||
assert mydoc._get_changed_fields() == []
|
|
||||||
|
|
||||||
def test_upper_level_mark_as_changed(self):
|
|
||||||
class EmbeddedDoc(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class MyDoc(Document):
|
|
||||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
|
||||||
|
|
||||||
MyDoc.drop_collection()
|
|
||||||
|
|
||||||
MyDoc(subs={"a": EmbeddedDoc(name="foo")}).save()
|
|
||||||
|
|
||||||
mydoc = MyDoc.objects.first()
|
|
||||||
subdoc = mydoc.subs["a"]
|
|
||||||
subdoc.name = "bar"
|
|
||||||
|
|
||||||
assert subdoc._get_changed_fields() == ["name"]
|
|
||||||
assert mydoc._get_changed_fields() == ["subs.a.name"]
|
|
||||||
|
|
||||||
mydoc.subs["a"] = EmbeddedDoc()
|
|
||||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
|
||||||
mydoc.save()
|
|
||||||
|
|
||||||
mydoc._clear_changed_fields()
|
|
||||||
assert mydoc._get_changed_fields() == []
|
|
||||||
|
|
||||||
def test_referenced_object_changed_attributes(self):
|
|
||||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
|
||||||
|
|
||||||
class Organization(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
org = ReferenceField("Organization", required=True)
|
|
||||||
|
|
||||||
Organization.drop_collection()
|
|
||||||
User.drop_collection()
|
|
||||||
|
|
||||||
org1 = Organization(name="Org 1")
|
|
||||||
org1.save()
|
|
||||||
|
|
||||||
org2 = Organization(name="Org 2")
|
|
||||||
org2.save()
|
|
||||||
|
|
||||||
user = User(name="Fred", org=org1)
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
org1.reload()
|
|
||||||
org2.reload()
|
|
||||||
user.reload()
|
|
||||||
assert org1.name == "Org 1"
|
|
||||||
assert org2.name == "Org 2"
|
|
||||||
assert user.name == "Fred"
|
|
||||||
|
|
||||||
user.name = "Harold"
|
|
||||||
user.org = org2
|
|
||||||
|
|
||||||
org2.name = "New Org 2"
|
|
||||||
assert org2.name == "New Org 2"
|
|
||||||
|
|
||||||
user.save()
|
|
||||||
org2.save()
|
|
||||||
|
|
||||||
assert org2.name == "New Org 2"
|
|
||||||
org2.reload()
|
|
||||||
assert org2.name == "New Org 2"
|
|
||||||
|
|
||||||
def test_delta_for_nested_map_fields(self):
|
|
||||||
class UInfoDocument(Document):
|
|
||||||
phone = StringField()
|
|
||||||
|
|
||||||
class EmbeddedRole(EmbeddedDocument):
|
|
||||||
type = StringField()
|
|
||||||
|
|
||||||
class EmbeddedUser(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
|
|
||||||
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
|
|
||||||
info = ReferenceField(UInfoDocument)
|
|
||||||
|
|
||||||
class Doc(Document):
|
|
||||||
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
|
|
||||||
num = IntField(default=-1)
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
|
|
||||||
doc = Doc(num=1)
|
|
||||||
doc.users["007"] = EmbeddedUser(name="Agent007")
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
uinfo = UInfoDocument(phone="79089269066")
|
|
||||||
uinfo.save()
|
|
||||||
|
|
||||||
d = Doc.objects(num=1).first()
|
|
||||||
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
|
|
||||||
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
|
||||||
d.users["007"]["info"] = uinfo
|
|
||||||
delta = d._delta()
|
|
||||||
assert True == ("users.007.roles.666" in delta[0])
|
|
||||||
assert True == ("users.007.rolist" in delta[0])
|
|
||||||
assert True == ("users.007.info" in delta[0])
|
|
||||||
assert "superadmin" == delta[0]["users.007.roles.666"]["type"]
|
|
||||||
assert "oops" == delta[0]["users.007.rolist"][0]["type"]
|
|
||||||
assert uinfo.id == delta[0]["users.007.info"]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
unittest.main()
|
|
@ -1,439 +0,0 @@
|
|||||||
import unittest
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from tests.utils import MongoDBTestCase
|
|
||||||
|
|
||||||
__all__ = ("TestDynamicDocument",)
|
|
||||||
|
|
||||||
|
|
||||||
class TestDynamicDocument(MongoDBTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
super().setUp()
|
|
||||||
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def test_simple_dynamic_document(self):
|
|
||||||
"""Ensures simple dynamic documents are saved correctly"""
|
|
||||||
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "James"
|
|
||||||
p.age = 34
|
|
||||||
|
|
||||||
assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34}
|
|
||||||
assert p.to_mongo().keys() == ["_cls", "name", "age"]
|
|
||||||
p.save()
|
|
||||||
assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"]
|
|
||||||
|
|
||||||
assert self.Person.objects.first().age == 34
|
|
||||||
|
|
||||||
# Confirm no changes to self.Person
|
|
||||||
assert not hasattr(self.Person, "age")
|
|
||||||
|
|
||||||
def test_dynamic_document_parse_values_in_constructor_like_document_do(self):
|
|
||||||
class ProductDynamicDocument(DynamicDocument):
|
|
||||||
title = StringField()
|
|
||||||
price = FloatField()
|
|
||||||
|
|
||||||
class ProductDocument(Document):
|
|
||||||
title = StringField()
|
|
||||||
price = FloatField()
|
|
||||||
|
|
||||||
product = ProductDocument(title="Blabla", price="12.5")
|
|
||||||
dyn_product = ProductDynamicDocument(title="Blabla", price="12.5")
|
|
||||||
assert product.price == dyn_product.price == 12.5
|
|
||||||
|
|
||||||
def test_change_scope_of_variable(self):
|
|
||||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.misc = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
p.misc = {"hello": "world"}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
assert p.misc == {"hello": "world"}
|
|
||||||
|
|
||||||
def test_delete_dynamic_field(self):
|
|
||||||
"""Test deleting a dynamic field works"""
|
|
||||||
self.Person.drop_collection()
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.misc = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
p.misc = {"hello": "world"}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
assert p.misc == {"hello": "world"}
|
|
||||||
collection = self.db[self.Person._get_collection_name()]
|
|
||||||
obj = collection.find_one()
|
|
||||||
assert sorted(obj.keys()) == ["_cls", "_id", "misc", "name"]
|
|
||||||
|
|
||||||
del p.misc
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
assert not hasattr(p, "misc")
|
|
||||||
|
|
||||||
obj = collection.find_one()
|
|
||||||
assert sorted(obj.keys()) == ["_cls", "_id", "name"]
|
|
||||||
|
|
||||||
def test_reload_after_unsetting(self):
|
|
||||||
p = self.Person()
|
|
||||||
p.misc = 22
|
|
||||||
p.save()
|
|
||||||
p.update(unset__misc=1)
|
|
||||||
p.reload()
|
|
||||||
|
|
||||||
def test_reload_dynamic_field(self):
|
|
||||||
self.Person.objects.delete()
|
|
||||||
p = self.Person.objects.create()
|
|
||||||
p.update(age=1)
|
|
||||||
|
|
||||||
assert len(p._data) == 3
|
|
||||||
assert sorted(p._data.keys()) == ["_cls", "id", "name"]
|
|
||||||
|
|
||||||
p.reload()
|
|
||||||
assert len(p._data) == 4
|
|
||||||
assert sorted(p._data.keys()) == ["_cls", "age", "id", "name"]
|
|
||||||
|
|
||||||
def test_fields_without_underscore(self):
|
|
||||||
"""Ensure we can query dynamic fields"""
|
|
||||||
Person = self.Person
|
|
||||||
|
|
||||||
p = self.Person(name="Dean")
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
|
||||||
assert raw_p == {"_cls": "Person", "_id": p.id, "name": "Dean"}
|
|
||||||
|
|
||||||
p.name = "OldDean"
|
|
||||||
p.newattr = "garbage"
|
|
||||||
p.save()
|
|
||||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
|
||||||
assert raw_p == {
|
|
||||||
"_cls": "Person",
|
|
||||||
"_id": p.id,
|
|
||||||
"name": "OldDean",
|
|
||||||
"newattr": "garbage",
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_fields_containing_underscore(self):
|
|
||||||
"""Ensure we can query dynamic fields"""
|
|
||||||
|
|
||||||
class WeirdPerson(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
_name = StringField()
|
|
||||||
|
|
||||||
WeirdPerson.drop_collection()
|
|
||||||
|
|
||||||
p = WeirdPerson(name="Dean", _name="Dean")
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
|
||||||
assert raw_p == {"_id": p.id, "_name": "Dean", "name": "Dean"}
|
|
||||||
|
|
||||||
p.name = "OldDean"
|
|
||||||
p._name = "NewDean"
|
|
||||||
p._newattr1 = "garbage" # Unknown fields won't be added
|
|
||||||
p.save()
|
|
||||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
|
||||||
assert raw_p == {"_id": p.id, "_name": "NewDean", "name": "OldDean"}
|
|
||||||
|
|
||||||
def test_dynamic_document_queries(self):
|
|
||||||
"""Ensure we can query dynamic fields"""
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.age = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
assert 1 == self.Person.objects(age=22).count()
|
|
||||||
p = self.Person.objects(age=22)
|
|
||||||
p = p.get()
|
|
||||||
assert 22 == p.age
|
|
||||||
|
|
||||||
def test_complex_dynamic_document_queries(self):
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
p = Person(name="test")
|
|
||||||
p.age = "ten"
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p1 = Person(name="test1")
|
|
||||||
p1.age = "less then ten and a half"
|
|
||||||
p1.save()
|
|
||||||
|
|
||||||
p2 = Person(name="test2")
|
|
||||||
p2.age = 10
|
|
||||||
p2.save()
|
|
||||||
|
|
||||||
assert Person.objects(age__icontains="ten").count() == 2
|
|
||||||
assert Person.objects(age__gte=10).count() == 1
|
|
||||||
|
|
||||||
def test_complex_data_lookups(self):
|
|
||||||
"""Ensure you can query dynamic document dynamic fields"""
|
|
||||||
p = self.Person()
|
|
||||||
p.misc = {"hello": "world"}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
assert 1 == self.Person.objects(misc__hello="world").count()
|
|
||||||
|
|
||||||
def test_three_level_complex_data_lookups(self):
|
|
||||||
"""Ensure you can query three level document dynamic fields"""
|
|
||||||
self.Person.objects.create(misc={"hello": {"hello2": "world"}})
|
|
||||||
assert 1 == self.Person.objects(misc__hello__hello2="world").count()
|
|
||||||
|
|
||||||
def test_complex_embedded_document_validation(self):
|
|
||||||
"""Ensure embedded dynamic documents may be validated"""
|
|
||||||
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
content = URLField()
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_doc_1 = Embedded(content="http://mongoengine.org")
|
|
||||||
embedded_doc_1.validate()
|
|
||||||
|
|
||||||
embedded_doc_2 = Embedded(content="this is not a url")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
embedded_doc_2.validate()
|
|
||||||
|
|
||||||
doc.embedded_field_1 = embedded_doc_1
|
|
||||||
doc.embedded_field_2 = embedded_doc_2
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
doc.validate()
|
|
||||||
|
|
||||||
def test_inheritance(self):
|
|
||||||
"""Ensure that dynamic document plays nice with inheritance"""
|
|
||||||
|
|
||||||
class Employee(self.Person):
|
|
||||||
salary = IntField()
|
|
||||||
|
|
||||||
Employee.drop_collection()
|
|
||||||
|
|
||||||
assert "name" in Employee._fields
|
|
||||||
assert "salary" in Employee._fields
|
|
||||||
assert Employee._get_collection_name() == self.Person._get_collection_name()
|
|
||||||
|
|
||||||
joe_bloggs = Employee()
|
|
||||||
joe_bloggs.name = "Joe Bloggs"
|
|
||||||
joe_bloggs.salary = 10
|
|
||||||
joe_bloggs.age = 20
|
|
||||||
joe_bloggs.save()
|
|
||||||
|
|
||||||
assert 1 == self.Person.objects(age=20).count()
|
|
||||||
assert 1 == Employee.objects(age=20).count()
|
|
||||||
|
|
||||||
joe_bloggs = self.Person.objects.first()
|
|
||||||
assert isinstance(joe_bloggs, Employee)
|
|
||||||
|
|
||||||
def test_embedded_dynamic_document(self):
|
|
||||||
"""Test dynamic embedded documents"""
|
|
||||||
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = "hello"
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {"hello": "world"}
|
|
||||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
assert doc.to_mongo() == {
|
|
||||||
"embedded_field": {
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
assert doc.embedded_field.__class__ == Embedded
|
|
||||||
assert doc.embedded_field.string_field == "hello"
|
|
||||||
assert doc.embedded_field.int_field == 1
|
|
||||||
assert doc.embedded_field.dict_field == {"hello": "world"}
|
|
||||||
assert doc.embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
|
||||||
|
|
||||||
def test_complex_embedded_documents(self):
|
|
||||||
"""Test complex dynamic embedded documents setups"""
|
|
||||||
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = "hello"
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {"hello": "world"}
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = "hello"
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {"hello": "world"}
|
|
||||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
|
||||||
|
|
||||||
embedded_1.list_field = ["1", 2, embedded_2]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
assert doc.to_mongo() == {
|
|
||||||
"embedded_field": {
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": [
|
|
||||||
"1",
|
|
||||||
2,
|
|
||||||
{
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ["1", 2, {"hello": "world"}],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
doc.save()
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
assert doc.embedded_field.__class__ == Embedded
|
|
||||||
assert doc.embedded_field.string_field == "hello"
|
|
||||||
assert doc.embedded_field.int_field == 1
|
|
||||||
assert doc.embedded_field.dict_field == {"hello": "world"}
|
|
||||||
assert doc.embedded_field.list_field[0] == "1"
|
|
||||||
assert doc.embedded_field.list_field[1] == 2
|
|
||||||
|
|
||||||
embedded_field = doc.embedded_field.list_field[2]
|
|
||||||
|
|
||||||
assert embedded_field.__class__ == Embedded
|
|
||||||
assert embedded_field.string_field == "hello"
|
|
||||||
assert embedded_field.int_field == 1
|
|
||||||
assert embedded_field.dict_field == {"hello": "world"}
|
|
||||||
assert embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
|
||||||
|
|
||||||
def test_dynamic_and_embedded(self):
|
|
||||||
"""Ensure embedded documents play nicely"""
|
|
||||||
|
|
||||||
class Address(EmbeddedDocument):
|
|
||||||
city = StringField()
|
|
||||||
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
Person(name="Ross", address=Address(city="London")).save()
|
|
||||||
|
|
||||||
person = Person.objects.first()
|
|
||||||
person.address.city = "Lundenne"
|
|
||||||
person.save()
|
|
||||||
|
|
||||||
assert Person.objects.first().address.city == "Lundenne"
|
|
||||||
|
|
||||||
person = Person.objects.first()
|
|
||||||
person.address = Address(city="Londinium")
|
|
||||||
person.save()
|
|
||||||
|
|
||||||
assert Person.objects.first().address.city == "Londinium"
|
|
||||||
|
|
||||||
person = Person.objects.first()
|
|
||||||
person.age = 35
|
|
||||||
person.save()
|
|
||||||
assert Person.objects.first().age == 35
|
|
||||||
|
|
||||||
def test_dynamic_embedded_works_with_only(self):
|
|
||||||
"""Ensure custom fieldnames on a dynamic embedded document are found by qs.only()"""
|
|
||||||
|
|
||||||
class Address(DynamicEmbeddedDocument):
|
|
||||||
city = StringField()
|
|
||||||
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
address = EmbeddedDocumentField(Address)
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
Person(
|
|
||||||
name="Eric", address=Address(city="San Francisco", street_number="1337")
|
|
||||||
).save()
|
|
||||||
|
|
||||||
assert Person.objects.first().address.street_number == "1337"
|
|
||||||
assert (
|
|
||||||
Person.objects.only("address__street_number").first().address.street_number
|
|
||||||
== "1337"
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_dynamic_and_embedded_dict_access(self):
|
|
||||||
"""Ensure embedded dynamic documents work with dict[] style access"""
|
|
||||||
|
|
||||||
class Address(EmbeddedDocument):
|
|
||||||
city = StringField()
|
|
||||||
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
Person(name="Ross", address=Address(city="London")).save()
|
|
||||||
|
|
||||||
person = Person.objects.first()
|
|
||||||
person.attrval = "This works"
|
|
||||||
|
|
||||||
person["phone"] = "555-1212" # but this should too
|
|
||||||
|
|
||||||
# Same thing two levels deep
|
|
||||||
person["address"]["city"] = "Lundenne"
|
|
||||||
person.save()
|
|
||||||
|
|
||||||
assert Person.objects.first().address.city == "Lundenne"
|
|
||||||
|
|
||||||
assert Person.objects.first().phone == "555-1212"
|
|
||||||
|
|
||||||
person = Person.objects.first()
|
|
||||||
person.address = Address(city="Londinium")
|
|
||||||
person.save()
|
|
||||||
|
|
||||||
assert Person.objects.first().address.city == "Londinium"
|
|
||||||
|
|
||||||
person = Person.objects.first()
|
|
||||||
person["age"] = 35
|
|
||||||
person.save()
|
|
||||||
assert Person.objects.first().age == 35
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
unittest.main()
|
|
File diff suppressed because it is too large
Load Diff
@ -1,617 +0,0 @@
|
|||||||
import unittest
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from mongoengine import (
|
|
||||||
BooleanField,
|
|
||||||
Document,
|
|
||||||
EmbeddedDocument,
|
|
||||||
EmbeddedDocumentField,
|
|
||||||
GenericReferenceField,
|
|
||||||
IntField,
|
|
||||||
ReferenceField,
|
|
||||||
StringField,
|
|
||||||
)
|
|
||||||
from mongoengine.pymongo_support import list_collection_names
|
|
||||||
from tests.fixtures import Base
|
|
||||||
from tests.utils import MongoDBTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestInheritance(MongoDBTestCase):
|
|
||||||
def tearDown(self):
|
|
||||||
for collection in list_collection_names(self.db):
|
|
||||||
self.db.drop_collection(collection)
|
|
||||||
|
|
||||||
def test_constructor_cls(self):
|
|
||||||
# Ensures _cls is properly set during construction
|
|
||||||
# and when object gets reloaded (prevent regression of #1950)
|
|
||||||
class EmbedData(EmbeddedDocument):
|
|
||||||
data = StringField()
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class DataDoc(Document):
|
|
||||||
name = StringField()
|
|
||||||
embed = EmbeddedDocumentField(EmbedData)
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
test_doc = DataDoc(name="test", embed=EmbedData(data="data"))
|
|
||||||
assert test_doc._cls == "DataDoc"
|
|
||||||
assert test_doc.embed._cls == "EmbedData"
|
|
||||||
test_doc.save()
|
|
||||||
saved_doc = DataDoc.objects.with_id(test_doc.id)
|
|
||||||
assert test_doc._cls == saved_doc._cls
|
|
||||||
assert test_doc.embed._cls == saved_doc.embed._cls
|
|
||||||
test_doc.delete()
|
|
||||||
|
|
||||||
def test_superclasses(self):
|
|
||||||
"""Ensure that the correct list of superclasses is assembled."""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class Fish(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Guppy(Fish):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Mammal(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Dog(Mammal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Human(Mammal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert Animal._superclasses == ()
|
|
||||||
assert Fish._superclasses == ("Animal",)
|
|
||||||
assert Guppy._superclasses == ("Animal", "Animal.Fish")
|
|
||||||
assert Mammal._superclasses == ("Animal",)
|
|
||||||
assert Dog._superclasses == ("Animal", "Animal.Mammal")
|
|
||||||
assert Human._superclasses == ("Animal", "Animal.Mammal")
|
|
||||||
|
|
||||||
def test_external_superclasses(self):
|
|
||||||
"""Ensure that the correct list of super classes is assembled when
|
|
||||||
importing part of the model.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Animal(Base):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Fish(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Guppy(Fish):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Mammal(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Dog(Mammal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Human(Mammal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert Animal._superclasses == ("Base",)
|
|
||||||
assert Fish._superclasses == ("Base", "Base.Animal")
|
|
||||||
assert Guppy._superclasses == ("Base", "Base.Animal", "Base.Animal.Fish")
|
|
||||||
assert Mammal._superclasses == ("Base", "Base.Animal")
|
|
||||||
assert Dog._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
|
||||||
assert Human._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
|
||||||
|
|
||||||
def test_subclasses(self):
|
|
||||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
|
||||||
assembled.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class Fish(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Guppy(Fish):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Mammal(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Dog(Mammal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Human(Mammal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert Animal._subclasses == (
|
|
||||||
"Animal",
|
|
||||||
"Animal.Fish",
|
|
||||||
"Animal.Fish.Guppy",
|
|
||||||
"Animal.Mammal",
|
|
||||||
"Animal.Mammal.Dog",
|
|
||||||
"Animal.Mammal.Human",
|
|
||||||
)
|
|
||||||
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Guppy")
|
|
||||||
assert Guppy._subclasses == ("Animal.Fish.Guppy",)
|
|
||||||
assert Mammal._subclasses == (
|
|
||||||
"Animal.Mammal",
|
|
||||||
"Animal.Mammal.Dog",
|
|
||||||
"Animal.Mammal.Human",
|
|
||||||
)
|
|
||||||
assert Human._subclasses == ("Animal.Mammal.Human",)
|
|
||||||
|
|
||||||
def test_external_subclasses(self):
|
|
||||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
|
||||||
assembled when importing part of the model.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Animal(Base):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Fish(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Guppy(Fish):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Mammal(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Dog(Mammal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Human(Mammal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert Animal._subclasses == (
|
|
||||||
"Base.Animal",
|
|
||||||
"Base.Animal.Fish",
|
|
||||||
"Base.Animal.Fish.Guppy",
|
|
||||||
"Base.Animal.Mammal",
|
|
||||||
"Base.Animal.Mammal.Dog",
|
|
||||||
"Base.Animal.Mammal.Human",
|
|
||||||
)
|
|
||||||
assert Fish._subclasses == ("Base.Animal.Fish", "Base.Animal.Fish.Guppy")
|
|
||||||
assert Guppy._subclasses == ("Base.Animal.Fish.Guppy",)
|
|
||||||
assert Mammal._subclasses == (
|
|
||||||
"Base.Animal.Mammal",
|
|
||||||
"Base.Animal.Mammal.Dog",
|
|
||||||
"Base.Animal.Mammal.Human",
|
|
||||||
)
|
|
||||||
assert Human._subclasses == ("Base.Animal.Mammal.Human",)
|
|
||||||
|
|
||||||
def test_dynamic_declarations(self):
|
|
||||||
"""Test that declaring an extra class updates meta data"""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
assert Animal._superclasses == ()
|
|
||||||
assert Animal._subclasses == ("Animal",)
|
|
||||||
|
|
||||||
# Test dynamically adding a class changes the meta data
|
|
||||||
class Fish(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert Animal._superclasses == ()
|
|
||||||
assert Animal._subclasses == ("Animal", "Animal.Fish")
|
|
||||||
|
|
||||||
assert Fish._superclasses == ("Animal",)
|
|
||||||
assert Fish._subclasses == ("Animal.Fish",)
|
|
||||||
|
|
||||||
# Test dynamically adding an inherited class changes the meta data
|
|
||||||
class Pike(Fish):
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert Animal._superclasses == ()
|
|
||||||
assert Animal._subclasses == ("Animal", "Animal.Fish", "Animal.Fish.Pike")
|
|
||||||
|
|
||||||
assert Fish._superclasses == ("Animal",)
|
|
||||||
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Pike")
|
|
||||||
|
|
||||||
assert Pike._superclasses == ("Animal", "Animal.Fish")
|
|
||||||
assert Pike._subclasses == ("Animal.Fish.Pike",)
|
|
||||||
|
|
||||||
def test_inheritance_meta_data(self):
|
|
||||||
"""Ensure that document may inherit fields from a superclass document."""
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class Employee(Person):
|
|
||||||
salary = IntField()
|
|
||||||
|
|
||||||
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
|
||||||
Employee._fields.keys()
|
|
||||||
)
|
|
||||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
|
||||||
|
|
||||||
def test_inheritance_to_mongo_keys(self):
|
|
||||||
"""Ensure that document may inherit fields from a superclass document."""
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class Employee(Person):
|
|
||||||
salary = IntField()
|
|
||||||
|
|
||||||
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
|
||||||
Employee._fields.keys()
|
|
||||||
)
|
|
||||||
assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"]
|
|
||||||
assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [
|
|
||||||
"_cls",
|
|
||||||
"name",
|
|
||||||
"age",
|
|
||||||
"salary",
|
|
||||||
]
|
|
||||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
|
||||||
|
|
||||||
def test_indexes_and_multiple_inheritance(self):
|
|
||||||
"""Ensure that all of the indexes are created for a document with
|
|
||||||
multiple inheritance.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class A(Document):
|
|
||||||
a = StringField()
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True, "indexes": ["a"]}
|
|
||||||
|
|
||||||
class B(Document):
|
|
||||||
b = StringField()
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True, "indexes": ["b"]}
|
|
||||||
|
|
||||||
class C(A, B):
|
|
||||||
pass
|
|
||||||
|
|
||||||
A.drop_collection()
|
|
||||||
B.drop_collection()
|
|
||||||
C.drop_collection()
|
|
||||||
|
|
||||||
C.ensure_indexes()
|
|
||||||
|
|
||||||
assert sorted(
|
|
||||||
idx["key"] for idx in C._get_collection().index_information().values()
|
|
||||||
) == sorted([[("_cls", 1), ("b", 1)], [("_id", 1)], [("_cls", 1), ("a", 1)]])
|
|
||||||
|
|
||||||
def test_polymorphic_queries(self):
|
|
||||||
"""Ensure that the correct subclasses are returned from a query"""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class Fish(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Mammal(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Dog(Mammal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Human(Mammal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
|
|
||||||
Animal().save()
|
|
||||||
Fish().save()
|
|
||||||
Mammal().save()
|
|
||||||
Dog().save()
|
|
||||||
Human().save()
|
|
||||||
|
|
||||||
classes = [obj.__class__ for obj in Animal.objects]
|
|
||||||
assert classes == [Animal, Fish, Mammal, Dog, Human]
|
|
||||||
|
|
||||||
classes = [obj.__class__ for obj in Mammal.objects]
|
|
||||||
assert classes == [Mammal, Dog, Human]
|
|
||||||
|
|
||||||
classes = [obj.__class__ for obj in Human.objects]
|
|
||||||
assert classes == [Human]
|
|
||||||
|
|
||||||
def test_allow_inheritance(self):
|
|
||||||
"""Ensure that inheritance is disabled by default on simple
|
|
||||||
classes and that _cls will not be used.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
# can't inherit because Animal didn't explicitly allow inheritance
|
|
||||||
with pytest.raises(ValueError, match="Document Animal may not be subclassed"):
|
|
||||||
|
|
||||||
class Dog(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Check that _cls etc aren't present on simple documents
|
|
||||||
dog = Animal(name="dog").save()
|
|
||||||
assert dog.to_mongo().keys() == ["_id", "name"]
|
|
||||||
|
|
||||||
collection = self.db[Animal._get_collection_name()]
|
|
||||||
obj = collection.find_one()
|
|
||||||
assert "_cls" not in obj
|
|
||||||
|
|
||||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
|
||||||
"""Ensure if inheritance is on in a subclass you cant turn it off."""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
with pytest.raises(ValueError) as exc_info:
|
|
||||||
|
|
||||||
class Mammal(Animal):
|
|
||||||
meta = {"allow_inheritance": False}
|
|
||||||
|
|
||||||
assert (
|
|
||||||
str(exc_info.value)
|
|
||||||
== 'Only direct subclasses of Document may set "allow_inheritance" to False'
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_allow_inheritance_abstract_document(self):
|
|
||||||
"""Ensure that abstract documents can set inheritance rules and that
|
|
||||||
_cls will not be used.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class FinalDocument(Document):
|
|
||||||
meta = {"abstract": True, "allow_inheritance": False}
|
|
||||||
|
|
||||||
class Animal(FinalDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
|
|
||||||
class Mammal(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Check that _cls isn't present in simple documents
|
|
||||||
doc = Animal(name="dog")
|
|
||||||
assert "_cls" not in doc.to_mongo()
|
|
||||||
|
|
||||||
def test_using_abstract_class_in_reference_field(self):
|
|
||||||
# Ensures no regression of #1920
|
|
||||||
class AbstractHuman(Document):
|
|
||||||
meta = {"abstract": True}
|
|
||||||
|
|
||||||
class Dad(AbstractHuman):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Home(Document):
|
|
||||||
dad = ReferenceField(AbstractHuman) # Referencing the abstract class
|
|
||||||
address = StringField()
|
|
||||||
|
|
||||||
dad = Dad(name="5").save()
|
|
||||||
Home(dad=dad, address="street").save()
|
|
||||||
|
|
||||||
home = Home.objects.first()
|
|
||||||
home.address = "garbage"
|
|
||||||
home.save() # Was failing with ValidationError
|
|
||||||
|
|
||||||
def test_abstract_class_referencing_self(self):
|
|
||||||
# Ensures no regression of #1920
|
|
||||||
class Human(Document):
|
|
||||||
meta = {"abstract": True}
|
|
||||||
creator = ReferenceField("self", dbref=True)
|
|
||||||
|
|
||||||
class User(Human):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
user = User(name="John").save()
|
|
||||||
user2 = User(name="Foo", creator=user).save()
|
|
||||||
|
|
||||||
user2 = User.objects.with_id(user2.id)
|
|
||||||
user2.name = "Bar"
|
|
||||||
user2.save() # Was failing with ValidationError
|
|
||||||
|
|
||||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
|
||||||
class City(Document):
|
|
||||||
continent = StringField()
|
|
||||||
meta = {"abstract": True, "allow_inheritance": False}
|
|
||||||
|
|
||||||
class EuropeanCity(City):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
|
||||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
|
||||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
|
||||||
assert len(berlin._fields_ordered) == 3
|
|
||||||
assert berlin._fields_ordered[0] == "id"
|
|
||||||
|
|
||||||
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
|
||||||
class City(Document):
|
|
||||||
continent = StringField()
|
|
||||||
city_id = IntField(primary_key=True)
|
|
||||||
meta = {"abstract": True, "allow_inheritance": False}
|
|
||||||
|
|
||||||
class EuropeanCity(City):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
|
||||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
|
||||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
|
||||||
assert len(berlin._fields_ordered) == 3
|
|
||||||
assert berlin._fields_ordered[0] == "city_id"
|
|
||||||
|
|
||||||
def test_auto_id_vs_non_pk_id_field(self):
|
|
||||||
class City(Document):
|
|
||||||
continent = StringField()
|
|
||||||
id = IntField()
|
|
||||||
meta = {"abstract": True, "allow_inheritance": False}
|
|
||||||
|
|
||||||
class EuropeanCity(City):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
|
||||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
|
||||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
|
||||||
assert len(berlin._fields_ordered) == 4
|
|
||||||
assert berlin._fields_ordered[0] == "auto_id_0"
|
|
||||||
berlin.save()
|
|
||||||
assert berlin.pk == berlin.auto_id_0
|
|
||||||
|
|
||||||
def test_abstract_document_creation_does_not_fail(self):
|
|
||||||
class City(Document):
|
|
||||||
continent = StringField()
|
|
||||||
meta = {"abstract": True, "allow_inheritance": False}
|
|
||||||
|
|
||||||
city = City(continent="asia")
|
|
||||||
assert city.pk is None
|
|
||||||
# TODO: expected error? Shouldn't we create a new error type?
|
|
||||||
with pytest.raises(KeyError):
|
|
||||||
city.pk = 1
|
|
||||||
|
|
||||||
def test_allow_inheritance_embedded_document(self):
|
|
||||||
"""Ensure embedded documents respect inheritance."""
|
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
content = StringField()
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
|
|
||||||
class SpecialComment(Comment):
|
|
||||||
pass
|
|
||||||
|
|
||||||
doc = Comment(content="test")
|
|
||||||
assert "_cls" not in doc.to_mongo()
|
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
content = StringField()
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
doc = Comment(content="test")
|
|
||||||
assert "_cls" in doc.to_mongo()
|
|
||||||
|
|
||||||
def test_document_inheritance(self):
|
|
||||||
"""Ensure mutliple inheritance of abstract documents"""
|
|
||||||
|
|
||||||
class DateCreatedDocument(Document):
|
|
||||||
meta = {"allow_inheritance": True, "abstract": True}
|
|
||||||
|
|
||||||
class DateUpdatedDocument(Document):
|
|
||||||
meta = {"allow_inheritance": True, "abstract": True}
|
|
||||||
|
|
||||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_abstract_documents(self):
|
|
||||||
"""Ensure that a document superclass can be marked as abstract
|
|
||||||
thereby not using it as the name for the collection."""
|
|
||||||
|
|
||||||
defaults = {
|
|
||||||
"index_background": True,
|
|
||||||
"index_opts": {"hello": "world"},
|
|
||||||
"allow_inheritance": True,
|
|
||||||
"queryset_class": "QuerySet",
|
|
||||||
"db_alias": "myDB",
|
|
||||||
"shard_key": ("hello", "world"),
|
|
||||||
}
|
|
||||||
|
|
||||||
meta_settings = {"abstract": True}
|
|
||||||
meta_settings.update(defaults)
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = meta_settings
|
|
||||||
|
|
||||||
class Fish(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Guppy(Fish):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Mammal(Animal):
|
|
||||||
meta = {"abstract": True}
|
|
||||||
|
|
||||||
class Human(Mammal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
for k, v in defaults.items():
|
|
||||||
for cls in [Animal, Fish, Guppy]:
|
|
||||||
assert cls._meta[k] == v
|
|
||||||
|
|
||||||
assert "collection" not in Animal._meta
|
|
||||||
assert "collection" not in Mammal._meta
|
|
||||||
|
|
||||||
assert Animal._get_collection_name() is None
|
|
||||||
assert Mammal._get_collection_name() is None
|
|
||||||
|
|
||||||
assert Fish._get_collection_name() == "fish"
|
|
||||||
assert Guppy._get_collection_name() == "fish"
|
|
||||||
assert Human._get_collection_name() == "human"
|
|
||||||
|
|
||||||
# ensure that a subclass of a non-abstract class can't be abstract
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
|
|
||||||
class EvilHuman(Human):
|
|
||||||
evil = BooleanField(default=True)
|
|
||||||
meta = {"abstract": True}
|
|
||||||
|
|
||||||
def test_abstract_embedded_documents(self):
|
|
||||||
# 789: EmbeddedDocument shouldn't inherit abstract
|
|
||||||
class A(EmbeddedDocument):
|
|
||||||
meta = {"abstract": True}
|
|
||||||
|
|
||||||
class B(A):
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert not B._meta["abstract"]
|
|
||||||
|
|
||||||
def test_inherited_collections(self):
|
|
||||||
"""Ensure that subclassed documents don't override parents'
|
|
||||||
collections
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Drink(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
class Drinker(Document):
|
|
||||||
drink = GenericReferenceField()
|
|
||||||
|
|
||||||
try:
|
|
||||||
warnings.simplefilter("error")
|
|
||||||
|
|
||||||
class AcloholicDrink(Drink):
|
|
||||||
meta = {"collection": "booze"}
|
|
||||||
|
|
||||||
except SyntaxWarning:
|
|
||||||
warnings.simplefilter("ignore")
|
|
||||||
|
|
||||||
class AlcoholicDrink(Drink):
|
|
||||||
meta = {"collection": "booze"}
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise AssertionError("SyntaxWarning should be triggered")
|
|
||||||
|
|
||||||
warnings.resetwarnings()
|
|
||||||
|
|
||||||
Drink.drop_collection()
|
|
||||||
AlcoholicDrink.drop_collection()
|
|
||||||
Drinker.drop_collection()
|
|
||||||
|
|
||||||
red_bull = Drink(name="Red Bull")
|
|
||||||
red_bull.save()
|
|
||||||
|
|
||||||
programmer = Drinker(drink=red_bull)
|
|
||||||
programmer.save()
|
|
||||||
|
|
||||||
beer = AlcoholicDrink(name="Beer")
|
|
||||||
beer.save()
|
|
||||||
real_person = Drinker(drink=beer)
|
|
||||||
real_person.save()
|
|
||||||
|
|
||||||
assert Drinker.objects[0].drink.name == red_bull.name
|
|
||||||
assert Drinker.objects[1].drink.name == beer.name
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
unittest.main()
|
|
File diff suppressed because it is too large
Load Diff
@ -1,106 +0,0 @@
|
|||||||
import unittest
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from bson import ObjectId
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from tests.utils import MongoDBTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestJson(MongoDBTestCase):
|
|
||||||
def test_json_names(self):
|
|
||||||
"""
|
|
||||||
Going to test reported issue:
|
|
||||||
https://github.com/MongoEngine/mongoengine/issues/654
|
|
||||||
where the reporter asks for the availability to perform
|
|
||||||
a to_json with the original class names and not the abreviated
|
|
||||||
mongodb document keys
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Embedded(EmbeddedDocument):
|
|
||||||
string = StringField(db_field="s")
|
|
||||||
|
|
||||||
class Doc(Document):
|
|
||||||
string = StringField(db_field="s")
|
|
||||||
embedded = EmbeddedDocumentField(Embedded, db_field="e")
|
|
||||||
|
|
||||||
doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello"))
|
|
||||||
doc_json = doc.to_json(
|
|
||||||
sort_keys=True, use_db_field=False, separators=(",", ":")
|
|
||||||
)
|
|
||||||
|
|
||||||
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
|
|
||||||
|
|
||||||
assert doc_json == expected_json
|
|
||||||
|
|
||||||
def test_json_simple(self):
|
|
||||||
class Embedded(EmbeddedDocument):
|
|
||||||
string = StringField()
|
|
||||||
|
|
||||||
class Doc(Document):
|
|
||||||
string = StringField()
|
|
||||||
embedded_field = EmbeddedDocumentField(Embedded)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return (
|
|
||||||
self.string == other.string
|
|
||||||
and self.embedded_field == other.embedded_field
|
|
||||||
)
|
|
||||||
|
|
||||||
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
|
|
||||||
|
|
||||||
doc_json = doc.to_json(sort_keys=True, separators=(",", ":"))
|
|
||||||
expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}"""
|
|
||||||
assert doc_json == expected_json
|
|
||||||
|
|
||||||
assert doc == Doc.from_json(doc.to_json())
|
|
||||||
|
|
||||||
def test_json_complex(self):
|
|
||||||
class EmbeddedDoc(EmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Simple(Document):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(Document):
|
|
||||||
string_field = StringField(default="1")
|
|
||||||
int_field = IntField(default=1)
|
|
||||||
float_field = FloatField(default=1.1)
|
|
||||||
boolean_field = BooleanField(default=True)
|
|
||||||
datetime_field = DateTimeField(default=datetime.now)
|
|
||||||
embedded_document_field = EmbeddedDocumentField(
|
|
||||||
EmbeddedDoc, default=lambda: EmbeddedDoc()
|
|
||||||
)
|
|
||||||
list_field = ListField(default=lambda: [1, 2, 3])
|
|
||||||
dict_field = DictField(default=lambda: {"hello": "world"})
|
|
||||||
objectid_field = ObjectIdField(default=ObjectId)
|
|
||||||
reference_field = ReferenceField(Simple, default=lambda: Simple().save())
|
|
||||||
map_field = MapField(IntField(), default=lambda: {"simple": 1})
|
|
||||||
decimal_field = DecimalField(default=1.0)
|
|
||||||
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
|
|
||||||
url_field = URLField(default="http://mongoengine.org")
|
|
||||||
dynamic_field = DynamicField(default=1)
|
|
||||||
generic_reference_field = GenericReferenceField(
|
|
||||||
default=lambda: Simple().save()
|
|
||||||
)
|
|
||||||
sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3])
|
|
||||||
email_field = EmailField(default="ross@example.com")
|
|
||||||
geo_point_field = GeoPointField(default=lambda: [1, 2])
|
|
||||||
sequence_field = SequenceField()
|
|
||||||
uuid_field = UUIDField(default=uuid.uuid4)
|
|
||||||
generic_embedded_document_field = GenericEmbeddedDocumentField(
|
|
||||||
default=lambda: EmbeddedDoc()
|
|
||||||
)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
import json
|
|
||||||
|
|
||||||
return json.loads(self.to_json()) == json.loads(other.to_json())
|
|
||||||
|
|
||||||
doc = Doc()
|
|
||||||
assert doc == Doc.from_json(doc.to_json())
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
unittest.main()
|
|
@ -1,217 +0,0 @@
|
|||||||
import unittest
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from tests.utils import MongoDBTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestValidatorError(MongoDBTestCase):
|
|
||||||
def test_to_dict(self):
|
|
||||||
"""Ensure a ValidationError handles error to_dict correctly."""
|
|
||||||
error = ValidationError("root")
|
|
||||||
assert error.to_dict() == {}
|
|
||||||
|
|
||||||
# 1st level error schema
|
|
||||||
error.errors = {"1st": ValidationError("bad 1st")}
|
|
||||||
assert "1st" in error.to_dict()
|
|
||||||
assert error.to_dict()["1st"] == "bad 1st"
|
|
||||||
|
|
||||||
# 2nd level error schema
|
|
||||||
error.errors = {
|
|
||||||
"1st": ValidationError(
|
|
||||||
"bad 1st", errors={"2nd": ValidationError("bad 2nd")}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
assert "1st" in error.to_dict()
|
|
||||||
assert isinstance(error.to_dict()["1st"], dict)
|
|
||||||
assert "2nd" in error.to_dict()["1st"]
|
|
||||||
assert error.to_dict()["1st"]["2nd"] == "bad 2nd"
|
|
||||||
|
|
||||||
# moar levels
|
|
||||||
error.errors = {
|
|
||||||
"1st": ValidationError(
|
|
||||||
"bad 1st",
|
|
||||||
errors={
|
|
||||||
"2nd": ValidationError(
|
|
||||||
"bad 2nd",
|
|
||||||
errors={
|
|
||||||
"3rd": ValidationError(
|
|
||||||
"bad 3rd", errors={"4th": ValidationError("Inception")}
|
|
||||||
)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
assert "1st" in error.to_dict()
|
|
||||||
assert "2nd" in error.to_dict()["1st"]
|
|
||||||
assert "3rd" in error.to_dict()["1st"]["2nd"]
|
|
||||||
assert "4th" in error.to_dict()["1st"]["2nd"]["3rd"]
|
|
||||||
assert error.to_dict()["1st"]["2nd"]["3rd"]["4th"] == "Inception"
|
|
||||||
|
|
||||||
assert error.message == "root(2nd.3rd.4th.Inception: ['1st'])"
|
|
||||||
|
|
||||||
def test_model_validation(self):
|
|
||||||
class User(Document):
|
|
||||||
username = StringField(primary_key=True)
|
|
||||||
name = StringField(required=True)
|
|
||||||
|
|
||||||
try:
|
|
||||||
User().validate()
|
|
||||||
except ValidationError as e:
|
|
||||||
assert "User:None" in e.message
|
|
||||||
assert e.to_dict() == {
|
|
||||||
"username": "Field is required",
|
|
||||||
"name": "Field is required",
|
|
||||||
}
|
|
||||||
|
|
||||||
user = User(username="RossC0", name="Ross").save()
|
|
||||||
user.name = None
|
|
||||||
try:
|
|
||||||
user.save()
|
|
||||||
except ValidationError as e:
|
|
||||||
assert "User:RossC0" in e.message
|
|
||||||
assert e.to_dict() == {"name": "Field is required"}
|
|
||||||
|
|
||||||
def test_fields_rewrite(self):
|
|
||||||
class BasePerson(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
meta = {"abstract": True}
|
|
||||||
|
|
||||||
class Person(BasePerson):
|
|
||||||
name = StringField(required=True)
|
|
||||||
|
|
||||||
p = Person(age=15)
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
p.validate()
|
|
||||||
|
|
||||||
def test_embedded_document_validation(self):
|
|
||||||
"""Ensure that embedded documents may be validated."""
|
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
date = DateTimeField()
|
|
||||||
content = StringField(required=True)
|
|
||||||
|
|
||||||
comment = Comment()
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
comment.validate()
|
|
||||||
|
|
||||||
comment.content = "test"
|
|
||||||
comment.validate()
|
|
||||||
|
|
||||||
comment.date = 4
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
comment.validate()
|
|
||||||
|
|
||||||
comment.date = datetime.now()
|
|
||||||
comment.validate()
|
|
||||||
assert comment._instance is None
|
|
||||||
|
|
||||||
def test_embedded_db_field_validate(self):
|
|
||||||
class SubDoc(EmbeddedDocument):
|
|
||||||
val = IntField(required=True)
|
|
||||||
|
|
||||||
class Doc(Document):
|
|
||||||
id = StringField(primary_key=True)
|
|
||||||
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
|
||||||
|
|
||||||
try:
|
|
||||||
Doc(id="bad").validate()
|
|
||||||
except ValidationError as e:
|
|
||||||
assert "SubDoc:None" in e.message
|
|
||||||
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
|
|
||||||
Doc(id="test", e=SubDoc(val=15)).save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
keys = doc._data.keys()
|
|
||||||
assert 2 == len(keys)
|
|
||||||
assert "e" in keys
|
|
||||||
assert "id" in keys
|
|
||||||
|
|
||||||
doc.e.val = "OK"
|
|
||||||
try:
|
|
||||||
doc.save()
|
|
||||||
except ValidationError as e:
|
|
||||||
assert "Doc:test" in e.message
|
|
||||||
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
|
||||||
|
|
||||||
def test_embedded_weakref(self):
|
|
||||||
class SubDoc(EmbeddedDocument):
|
|
||||||
val = IntField(required=True)
|
|
||||||
|
|
||||||
class Doc(Document):
|
|
||||||
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
|
|
||||||
d1 = Doc()
|
|
||||||
d2 = Doc()
|
|
||||||
|
|
||||||
s = SubDoc()
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
s.validate()
|
|
||||||
|
|
||||||
d1.e = s
|
|
||||||
d2.e = s
|
|
||||||
|
|
||||||
del d1
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
d2.validate()
|
|
||||||
|
|
||||||
def test_parent_reference_in_child_document(self):
|
|
||||||
"""
|
|
||||||
Test to ensure a ReferenceField can store a reference to a parent
|
|
||||||
class when inherited. Issue #954.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Parent(Document):
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
reference = ReferenceField("self")
|
|
||||||
|
|
||||||
class Child(Parent):
|
|
||||||
pass
|
|
||||||
|
|
||||||
parent = Parent()
|
|
||||||
parent.save()
|
|
||||||
|
|
||||||
child = Child(reference=parent)
|
|
||||||
|
|
||||||
# Saving child should not raise a ValidationError
|
|
||||||
try:
|
|
||||||
child.save()
|
|
||||||
except ValidationError as e:
|
|
||||||
self.fail("ValidationError raised: %s" % e.message)
|
|
||||||
|
|
||||||
def test_parent_reference_set_as_attribute_in_child_document(self):
|
|
||||||
"""
|
|
||||||
Test to ensure a ReferenceField can store a reference to a parent
|
|
||||||
class when inherited and when set via attribute. Issue #954.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Parent(Document):
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
reference = ReferenceField("self")
|
|
||||||
|
|
||||||
class Child(Parent):
|
|
||||||
pass
|
|
||||||
|
|
||||||
parent = Parent()
|
|
||||||
parent.save()
|
|
||||||
|
|
||||||
child = Child()
|
|
||||||
child.reference = parent
|
|
||||||
|
|
||||||
# Saving the child should not raise a ValidationError
|
|
||||||
child.save()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
unittest.main()
|
|
Binary file not shown.
Before Width: | Height: | Size: 4.9 KiB |
@ -1,140 +0,0 @@
|
|||||||
import uuid
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from bson import Binary
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from tests.utils import MongoDBTestCase
|
|
||||||
|
|
||||||
BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode(
|
|
||||||
"latin-1"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestBinaryField(MongoDBTestCase):
|
|
||||||
def test_binary_fields(self):
|
|
||||||
"""Ensure that binary fields can be stored and retrieved."""
|
|
||||||
|
|
||||||
class Attachment(Document):
|
|
||||||
content_type = StringField()
|
|
||||||
blob = BinaryField()
|
|
||||||
|
|
||||||
BLOB = b"\xe6\x00\xc4\xff\x07"
|
|
||||||
MIME_TYPE = "application/octet-stream"
|
|
||||||
|
|
||||||
Attachment.drop_collection()
|
|
||||||
|
|
||||||
attachment = Attachment(content_type=MIME_TYPE, blob=BLOB)
|
|
||||||
attachment.save()
|
|
||||||
|
|
||||||
attachment_1 = Attachment.objects().first()
|
|
||||||
assert MIME_TYPE == attachment_1.content_type
|
|
||||||
assert BLOB == bytes(attachment_1.blob)
|
|
||||||
|
|
||||||
def test_validation_succeeds(self):
|
|
||||||
"""Ensure that valid values can be assigned to binary fields."""
|
|
||||||
|
|
||||||
class AttachmentRequired(Document):
|
|
||||||
blob = BinaryField(required=True)
|
|
||||||
|
|
||||||
class AttachmentSizeLimit(Document):
|
|
||||||
blob = BinaryField(max_bytes=4)
|
|
||||||
|
|
||||||
attachment_required = AttachmentRequired()
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
attachment_required.validate()
|
|
||||||
attachment_required.blob = Binary(b"\xe6\x00\xc4\xff\x07")
|
|
||||||
attachment_required.validate()
|
|
||||||
|
|
||||||
_5_BYTES = b"\xe6\x00\xc4\xff\x07"
|
|
||||||
_4_BYTES = b"\xe6\x00\xc4\xff"
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
AttachmentSizeLimit(blob=_5_BYTES).validate()
|
|
||||||
AttachmentSizeLimit(blob=_4_BYTES).validate()
|
|
||||||
|
|
||||||
def test_validation_fails(self):
|
|
||||||
"""Ensure that invalid values cannot be assigned to binary fields."""
|
|
||||||
|
|
||||||
class Attachment(Document):
|
|
||||||
blob = BinaryField()
|
|
||||||
|
|
||||||
for invalid_data in (2, "Im_a_unicode", ["some_str"]):
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
Attachment(blob=invalid_data).validate()
|
|
||||||
|
|
||||||
def test__primary(self):
|
|
||||||
class Attachment(Document):
|
|
||||||
id = BinaryField(primary_key=True)
|
|
||||||
|
|
||||||
Attachment.drop_collection()
|
|
||||||
binary_id = uuid.uuid4().bytes
|
|
||||||
att = Attachment(id=binary_id).save()
|
|
||||||
assert 1 == Attachment.objects.count()
|
|
||||||
assert 1 == Attachment.objects.filter(id=att.id).count()
|
|
||||||
att.delete()
|
|
||||||
assert 0 == Attachment.objects.count()
|
|
||||||
|
|
||||||
def test_primary_filter_by_binary_pk_as_str(self):
|
|
||||||
class Attachment(Document):
|
|
||||||
id = BinaryField(primary_key=True)
|
|
||||||
|
|
||||||
Attachment.drop_collection()
|
|
||||||
binary_id = uuid.uuid4().bytes
|
|
||||||
att = Attachment(id=binary_id).save()
|
|
||||||
assert 1 == Attachment.objects.filter(id=binary_id).count()
|
|
||||||
att.delete()
|
|
||||||
assert 0 == Attachment.objects.count()
|
|
||||||
|
|
||||||
def test_match_querying_with_bytes(self):
|
|
||||||
class MyDocument(Document):
|
|
||||||
bin_field = BinaryField()
|
|
||||||
|
|
||||||
MyDocument.drop_collection()
|
|
||||||
|
|
||||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
|
||||||
matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first()
|
|
||||||
assert matched_doc.id == doc.id
|
|
||||||
|
|
||||||
def test_match_querying_with_binary(self):
|
|
||||||
class MyDocument(Document):
|
|
||||||
bin_field = BinaryField()
|
|
||||||
|
|
||||||
MyDocument.drop_collection()
|
|
||||||
|
|
||||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
|
||||||
|
|
||||||
matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first()
|
|
||||||
assert matched_doc.id == doc.id
|
|
||||||
|
|
||||||
def test_modify_operation__set(self):
|
|
||||||
"""Ensures no regression of bug #1127"""
|
|
||||||
|
|
||||||
class MyDocument(Document):
|
|
||||||
some_field = StringField()
|
|
||||||
bin_field = BinaryField()
|
|
||||||
|
|
||||||
MyDocument.drop_collection()
|
|
||||||
|
|
||||||
doc = MyDocument.objects(some_field="test").modify(
|
|
||||||
upsert=True, new=True, set__bin_field=BIN_VALUE
|
|
||||||
)
|
|
||||||
assert doc.some_field == "test"
|
|
||||||
assert doc.bin_field == BIN_VALUE
|
|
||||||
|
|
||||||
def test_update_one(self):
|
|
||||||
"""Ensures no regression of bug #1127"""
|
|
||||||
|
|
||||||
class MyDocument(Document):
|
|
||||||
bin_field = BinaryField()
|
|
||||||
|
|
||||||
MyDocument.drop_collection()
|
|
||||||
|
|
||||||
bin_data = b"\xe6\x00\xc4\xff\x07"
|
|
||||||
doc = MyDocument(bin_field=bin_data).save()
|
|
||||||
|
|
||||||
n_updated = MyDocument.objects(bin_field=bin_data).update_one(
|
|
||||||
bin_field=BIN_VALUE
|
|
||||||
)
|
|
||||||
assert n_updated == 1
|
|
||||||
fetched = MyDocument.objects.with_id(doc.id)
|
|
||||||
assert fetched.bin_field == BIN_VALUE
|
|
@ -1,62 +0,0 @@
|
|||||||
import pytest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
|
||||||
|
|
||||||
|
|
||||||
class TestBooleanField(MongoDBTestCase):
|
|
||||||
def test_storage(self):
|
|
||||||
class Person(Document):
|
|
||||||
admin = BooleanField()
|
|
||||||
|
|
||||||
person = Person(admin=True)
|
|
||||||
person.save()
|
|
||||||
assert get_as_pymongo(person) == {"_id": person.id, "admin": True}
|
|
||||||
|
|
||||||
def test_construction_does_not_fail_uncastable_value(self):
|
|
||||||
class BoolFail:
|
|
||||||
def __bool__(self):
|
|
||||||
return "bogus"
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
admin = BooleanField()
|
|
||||||
|
|
||||||
person = Person(admin=BoolFail())
|
|
||||||
person.admin == "bogus"
|
|
||||||
|
|
||||||
def test_validation(self):
|
|
||||||
"""Ensure that invalid values cannot be assigned to boolean
|
|
||||||
fields.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
admin = BooleanField()
|
|
||||||
|
|
||||||
person = Person()
|
|
||||||
person.admin = True
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
person.admin = 2
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
person.validate()
|
|
||||||
person.admin = "Yes"
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
person.validate()
|
|
||||||
person.admin = "False"
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
def test_weirdness_constructor(self):
|
|
||||||
"""When attribute is set in contructor, it gets cast into a bool
|
|
||||||
which causes some weird behavior. We dont necessarily want to maintain this behavior
|
|
||||||
but its a known issue
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
admin = BooleanField()
|
|
||||||
|
|
||||||
new_person = Person(admin="False")
|
|
||||||
assert new_person.admin
|
|
||||||
|
|
||||||
new_person = Person(admin="0")
|
|
||||||
assert new_person.admin
|
|
@ -1,393 +0,0 @@
|
|||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from mongoengine import (
|
|
||||||
CachedReferenceField,
|
|
||||||
DecimalField,
|
|
||||||
Document,
|
|
||||||
EmbeddedDocument,
|
|
||||||
EmbeddedDocumentField,
|
|
||||||
InvalidDocumentError,
|
|
||||||
ListField,
|
|
||||||
ReferenceField,
|
|
||||||
StringField,
|
|
||||||
ValidationError,
|
|
||||||
)
|
|
||||||
from tests.utils import MongoDBTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestCachedReferenceField(MongoDBTestCase):
|
|
||||||
def test_constructor_fail_bad_document_type(self):
|
|
||||||
with pytest.raises(
|
|
||||||
ValidationError, match="must be a document class or a string"
|
|
||||||
):
|
|
||||||
CachedReferenceField(document_type=0)
|
|
||||||
|
|
||||||
def test_get_and_save(self):
|
|
||||||
"""
|
|
||||||
Tests #1047: CachedReferenceField creates DBRefs on to_python,
|
|
||||||
but can't save them on to_mongo.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocorrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = CachedReferenceField(Animal)
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocorrence.drop_collection()
|
|
||||||
|
|
||||||
Ocorrence(
|
|
||||||
person="testte", animal=Animal(name="Leopard", tag="heavy").save()
|
|
||||||
).save()
|
|
||||||
p = Ocorrence.objects.get()
|
|
||||||
p.person = "new_testte"
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
def test_general_things(self):
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
class Ocorrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = CachedReferenceField(Animal, fields=["tag"])
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocorrence.drop_collection()
|
|
||||||
|
|
||||||
a = Animal(name="Leopard", tag="heavy")
|
|
||||||
a.save()
|
|
||||||
|
|
||||||
assert Animal._cached_reference_fields == [Ocorrence.animal]
|
|
||||||
o = Ocorrence(person="teste", animal=a)
|
|
||||||
o.save()
|
|
||||||
|
|
||||||
p = Ocorrence(person="Wilson")
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
assert Ocorrence.objects(animal=None).count() == 1
|
|
||||||
|
|
||||||
assert a.to_mongo(fields=["tag"]) == {"tag": "heavy", "_id": a.pk}
|
|
||||||
|
|
||||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
|
||||||
|
|
||||||
# counts
|
|
||||||
Ocorrence(person="teste 2").save()
|
|
||||||
Ocorrence(person="teste 3").save()
|
|
||||||
|
|
||||||
count = Ocorrence.objects(animal__tag="heavy").count()
|
|
||||||
assert count == 1
|
|
||||||
|
|
||||||
ocorrence = Ocorrence.objects(animal__tag="heavy").first()
|
|
||||||
assert ocorrence.person == "teste"
|
|
||||||
assert isinstance(ocorrence.animal, Animal)
|
|
||||||
|
|
||||||
def test_with_decimal(self):
|
|
||||||
class PersonAuto(Document):
|
|
||||||
name = StringField()
|
|
||||||
salary = DecimalField()
|
|
||||||
|
|
||||||
class SocialTest(Document):
|
|
||||||
group = StringField()
|
|
||||||
person = CachedReferenceField(PersonAuto, fields=("salary",))
|
|
||||||
|
|
||||||
PersonAuto.drop_collection()
|
|
||||||
SocialTest.drop_collection()
|
|
||||||
|
|
||||||
p = PersonAuto(name="Alberto", salary=Decimal("7000.00"))
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
s = SocialTest(group="dev", person=p)
|
|
||||||
s.save()
|
|
||||||
|
|
||||||
assert SocialTest.objects._collection.find_one({"person.salary": 7000.00}) == {
|
|
||||||
"_id": s.pk,
|
|
||||||
"group": s.group,
|
|
||||||
"person": {"_id": p.pk, "salary": 7000.00},
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_cached_reference_field_reference(self):
|
|
||||||
class Group(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
group = ReferenceField(Group)
|
|
||||||
|
|
||||||
class SocialData(Document):
|
|
||||||
obs = StringField()
|
|
||||||
tags = ListField(StringField())
|
|
||||||
person = CachedReferenceField(Person, fields=("group",))
|
|
||||||
|
|
||||||
Group.drop_collection()
|
|
||||||
Person.drop_collection()
|
|
||||||
SocialData.drop_collection()
|
|
||||||
|
|
||||||
g1 = Group(name="dev")
|
|
||||||
g1.save()
|
|
||||||
|
|
||||||
g2 = Group(name="designers")
|
|
||||||
g2.save()
|
|
||||||
|
|
||||||
p1 = Person(name="Alberto", group=g1)
|
|
||||||
p1.save()
|
|
||||||
|
|
||||||
p2 = Person(name="Andre", group=g1)
|
|
||||||
p2.save()
|
|
||||||
|
|
||||||
p3 = Person(name="Afro design", group=g2)
|
|
||||||
p3.save()
|
|
||||||
|
|
||||||
s1 = SocialData(obs="testing 123", person=p1, tags=["tag1", "tag2"])
|
|
||||||
s1.save()
|
|
||||||
|
|
||||||
s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"])
|
|
||||||
s2.save()
|
|
||||||
|
|
||||||
assert SocialData.objects._collection.find_one({"tags": "tag2"}) == {
|
|
||||||
"_id": s1.pk,
|
|
||||||
"obs": "testing 123",
|
|
||||||
"tags": ["tag1", "tag2"],
|
|
||||||
"person": {"_id": p1.pk, "group": g1.pk},
|
|
||||||
}
|
|
||||||
|
|
||||||
assert SocialData.objects(person__group=g2).count() == 1
|
|
||||||
assert SocialData.objects(person__group=g2).first() == s2
|
|
||||||
|
|
||||||
def test_cached_reference_field_push_with_fields(self):
|
|
||||||
class Product(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Product.drop_collection()
|
|
||||||
|
|
||||||
class Basket(Document):
|
|
||||||
products = ListField(CachedReferenceField(Product, fields=["name"]))
|
|
||||||
|
|
||||||
Basket.drop_collection()
|
|
||||||
product1 = Product(name="abc").save()
|
|
||||||
product2 = Product(name="def").save()
|
|
||||||
basket = Basket(products=[product1]).save()
|
|
||||||
assert Basket.objects._collection.find_one() == {
|
|
||||||
"_id": basket.pk,
|
|
||||||
"products": [{"_id": product1.pk, "name": product1.name}],
|
|
||||||
}
|
|
||||||
# push to list
|
|
||||||
basket.update(push__products=product2)
|
|
||||||
basket.reload()
|
|
||||||
assert Basket.objects._collection.find_one() == {
|
|
||||||
"_id": basket.pk,
|
|
||||||
"products": [
|
|
||||||
{"_id": product1.pk, "name": product1.name},
|
|
||||||
{"_id": product2.pk, "name": product2.name},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_cached_reference_field_update_all(self):
|
|
||||||
class Person(Document):
|
|
||||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
|
||||||
name = StringField()
|
|
||||||
tp = StringField(choices=TYPES)
|
|
||||||
father = CachedReferenceField("self", fields=("tp",))
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
a1 = Person(name="Wilson Father", tp="pj")
|
|
||||||
a1.save()
|
|
||||||
|
|
||||||
a2 = Person(name="Wilson Junior", tp="pf", father=a1)
|
|
||||||
a2.save()
|
|
||||||
|
|
||||||
a2 = Person.objects.with_id(a2.id)
|
|
||||||
assert a2.father.tp == a1.tp
|
|
||||||
|
|
||||||
assert dict(a2.to_mongo()) == {
|
|
||||||
"_id": a2.pk,
|
|
||||||
"name": "Wilson Junior",
|
|
||||||
"tp": "pf",
|
|
||||||
"father": {"_id": a1.pk, "tp": "pj"},
|
|
||||||
}
|
|
||||||
|
|
||||||
assert Person.objects(father=a1)._query == {"father._id": a1.pk}
|
|
||||||
assert Person.objects(father=a1).count() == 1
|
|
||||||
|
|
||||||
Person.objects.update(set__tp="pf")
|
|
||||||
Person.father.sync_all()
|
|
||||||
|
|
||||||
a2.reload()
|
|
||||||
assert dict(a2.to_mongo()) == {
|
|
||||||
"_id": a2.pk,
|
|
||||||
"name": "Wilson Junior",
|
|
||||||
"tp": "pf",
|
|
||||||
"father": {"_id": a1.pk, "tp": "pf"},
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_cached_reference_fields_on_embedded_documents(self):
|
|
||||||
with pytest.raises(InvalidDocumentError):
|
|
||||||
|
|
||||||
class Test(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
type(
|
|
||||||
"WrongEmbeddedDocument",
|
|
||||||
(EmbeddedDocument,),
|
|
||||||
{"test": CachedReferenceField(Test)},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_cached_reference_auto_sync(self):
|
|
||||||
class Person(Document):
|
|
||||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
|
||||||
name = StringField()
|
|
||||||
tp = StringField(choices=TYPES)
|
|
||||||
|
|
||||||
father = CachedReferenceField("self", fields=("tp",))
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
a1 = Person(name="Wilson Father", tp="pj")
|
|
||||||
a1.save()
|
|
||||||
|
|
||||||
a2 = Person(name="Wilson Junior", tp="pf", father=a1)
|
|
||||||
a2.save()
|
|
||||||
|
|
||||||
a1.tp = "pf"
|
|
||||||
a1.save()
|
|
||||||
|
|
||||||
a2.reload()
|
|
||||||
assert dict(a2.to_mongo()) == {
|
|
||||||
"_id": a2.pk,
|
|
||||||
"name": "Wilson Junior",
|
|
||||||
"tp": "pf",
|
|
||||||
"father": {"_id": a1.pk, "tp": "pf"},
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_cached_reference_auto_sync_disabled(self):
|
|
||||||
class Persone(Document):
|
|
||||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
|
||||||
name = StringField()
|
|
||||||
tp = StringField(choices=TYPES)
|
|
||||||
|
|
||||||
father = CachedReferenceField("self", fields=("tp",), auto_sync=False)
|
|
||||||
|
|
||||||
Persone.drop_collection()
|
|
||||||
|
|
||||||
a1 = Persone(name="Wilson Father", tp="pj")
|
|
||||||
a1.save()
|
|
||||||
|
|
||||||
a2 = Persone(name="Wilson Junior", tp="pf", father=a1)
|
|
||||||
a2.save()
|
|
||||||
|
|
||||||
a1.tp = "pf"
|
|
||||||
a1.save()
|
|
||||||
|
|
||||||
assert Persone.objects._collection.find_one({"_id": a2.pk}) == {
|
|
||||||
"_id": a2.pk,
|
|
||||||
"name": "Wilson Junior",
|
|
||||||
"tp": "pf",
|
|
||||||
"father": {"_id": a1.pk, "tp": "pj"},
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_cached_reference_embedded_fields(self):
|
|
||||||
class Owner(EmbeddedDocument):
|
|
||||||
TPS = (("n", "Normal"), ("u", "Urgent"))
|
|
||||||
name = StringField()
|
|
||||||
tp = StringField(verbose_name="Type", db_field="t", choices=TPS)
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
owner = EmbeddedDocumentField(Owner)
|
|
||||||
|
|
||||||
class Ocorrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = CachedReferenceField(Animal, fields=["tag", "owner.tp"])
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocorrence.drop_collection()
|
|
||||||
|
|
||||||
a = Animal(
|
|
||||||
name="Leopard", tag="heavy", owner=Owner(tp="u", name="Wilson Júnior")
|
|
||||||
)
|
|
||||||
a.save()
|
|
||||||
|
|
||||||
o = Ocorrence(person="teste", animal=a)
|
|
||||||
o.save()
|
|
||||||
assert dict(a.to_mongo(fields=["tag", "owner.tp"])) == {
|
|
||||||
"_id": a.pk,
|
|
||||||
"tag": "heavy",
|
|
||||||
"owner": {"t": "u"},
|
|
||||||
}
|
|
||||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
|
||||||
assert o.to_mongo()["animal"]["owner"]["t"] == "u"
|
|
||||||
|
|
||||||
# Check to_mongo with fields
|
|
||||||
assert "animal" not in o.to_mongo(fields=["person"])
|
|
||||||
|
|
||||||
# counts
|
|
||||||
Ocorrence(person="teste 2").save()
|
|
||||||
Ocorrence(person="teste 3").save()
|
|
||||||
|
|
||||||
count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count()
|
|
||||||
assert count == 1
|
|
||||||
|
|
||||||
ocorrence = Ocorrence.objects(
|
|
||||||
animal__tag="heavy", animal__owner__tp="u"
|
|
||||||
).first()
|
|
||||||
assert ocorrence.person == "teste"
|
|
||||||
assert isinstance(ocorrence.animal, Animal)
|
|
||||||
|
|
||||||
def test_cached_reference_embedded_list_fields(self):
|
|
||||||
class Owner(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
tags = ListField(StringField())
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
tag = StringField()
|
|
||||||
|
|
||||||
owner = EmbeddedDocumentField(Owner)
|
|
||||||
|
|
||||||
class Ocorrence(Document):
|
|
||||||
person = StringField()
|
|
||||||
animal = CachedReferenceField(Animal, fields=["tag", "owner.tags"])
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
Ocorrence.drop_collection()
|
|
||||||
|
|
||||||
a = Animal(
|
|
||||||
name="Leopard",
|
|
||||||
tag="heavy",
|
|
||||||
owner=Owner(tags=["cool", "funny"], name="Wilson Júnior"),
|
|
||||||
)
|
|
||||||
a.save()
|
|
||||||
|
|
||||||
o = Ocorrence(person="teste 2", animal=a)
|
|
||||||
o.save()
|
|
||||||
assert dict(a.to_mongo(fields=["tag", "owner.tags"])) == {
|
|
||||||
"_id": a.pk,
|
|
||||||
"tag": "heavy",
|
|
||||||
"owner": {"tags": ["cool", "funny"]},
|
|
||||||
}
|
|
||||||
|
|
||||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
|
||||||
assert o.to_mongo()["animal"]["owner"]["tags"] == ["cool", "funny"]
|
|
||||||
|
|
||||||
# counts
|
|
||||||
Ocorrence(person="teste 2").save()
|
|
||||||
Ocorrence(person="teste 3").save()
|
|
||||||
|
|
||||||
query = Ocorrence.objects(
|
|
||||||
animal__tag="heavy", animal__owner__tags="cool"
|
|
||||||
)._query
|
|
||||||
assert query == {"animal.owner.tags": "cool", "animal.tag": "heavy"}
|
|
||||||
|
|
||||||
ocorrence = Ocorrence.objects(
|
|
||||||
animal__tag="heavy", animal__owner__tags="cool"
|
|
||||||
).first()
|
|
||||||
assert ocorrence.person == "teste 2"
|
|
||||||
assert isinstance(ocorrence.animal, Animal)
|
|
@ -1,208 +0,0 @@
|
|||||||
import datetime
|
|
||||||
import itertools
|
|
||||||
import math
|
|
||||||
import re
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from tests.utils import MongoDBTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class ComplexDateTimeFieldTest(MongoDBTestCase):
|
|
||||||
def test_complexdatetime_storage(self):
|
|
||||||
"""Tests for complex datetime fields - which can handle
|
|
||||||
microseconds without rounding.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class LogEntry(Document):
|
|
||||||
date = ComplexDateTimeField()
|
|
||||||
date_with_dots = ComplexDateTimeField(separator=".")
|
|
||||||
|
|
||||||
LogEntry.drop_collection()
|
|
||||||
|
|
||||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
|
||||||
# dropped - with default datetimefields
|
|
||||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
|
||||||
log = LogEntry()
|
|
||||||
log.date = d1
|
|
||||||
log.save()
|
|
||||||
log.reload()
|
|
||||||
assert log.date == d1
|
|
||||||
|
|
||||||
# Post UTC - microseconds are rounded (down) nearest millisecond - with
|
|
||||||
# default datetimefields
|
|
||||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
|
||||||
log.date = d1
|
|
||||||
log.save()
|
|
||||||
log.reload()
|
|
||||||
assert log.date == d1
|
|
||||||
|
|
||||||
# Pre UTC dates microseconds below 1000 are dropped - with default
|
|
||||||
# datetimefields
|
|
||||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
|
||||||
log.date = d1
|
|
||||||
log.save()
|
|
||||||
log.reload()
|
|
||||||
assert log.date == d1
|
|
||||||
|
|
||||||
# Pre UTC microseconds above 1000 is wonky - with default datetimefields
|
|
||||||
# log.date has an invalid microsecond value so I can't construct
|
|
||||||
# a date to compare.
|
|
||||||
for i in range(1001, 3113, 33):
|
|
||||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i)
|
|
||||||
log.date = d1
|
|
||||||
log.save()
|
|
||||||
log.reload()
|
|
||||||
assert log.date == d1
|
|
||||||
log1 = LogEntry.objects.get(date=d1)
|
|
||||||
assert log == log1
|
|
||||||
|
|
||||||
# Test string padding
|
|
||||||
microsecond = map(int, (math.pow(10, x) for x in range(6)))
|
|
||||||
mm = dd = hh = ii = ss = [1, 10]
|
|
||||||
|
|
||||||
for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond):
|
|
||||||
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"]
|
|
||||||
assert (
|
|
||||||
re.match(r"^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored)
|
|
||||||
is not None
|
|
||||||
)
|
|
||||||
|
|
||||||
# Test separator
|
|
||||||
stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[
|
|
||||||
"date_with_dots"
|
|
||||||
]
|
|
||||||
assert (
|
|
||||||
re.match(r"^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_complexdatetime_usage(self):
|
|
||||||
"""Tests for complex datetime fields - which can handle
|
|
||||||
microseconds without rounding.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class LogEntry(Document):
|
|
||||||
date = ComplexDateTimeField()
|
|
||||||
|
|
||||||
LogEntry.drop_collection()
|
|
||||||
|
|
||||||
d1 = datetime.datetime(1950, 1, 1, 0, 0, 1, 999)
|
|
||||||
log = LogEntry()
|
|
||||||
log.date = d1
|
|
||||||
log.save()
|
|
||||||
|
|
||||||
log1 = LogEntry.objects.get(date=d1)
|
|
||||||
assert log == log1
|
|
||||||
|
|
||||||
# create extra 59 log entries for a total of 60
|
|
||||||
for i in range(1951, 2010):
|
|
||||||
d = datetime.datetime(i, 1, 1, 0, 0, 1, 999)
|
|
||||||
LogEntry(date=d).save()
|
|
||||||
|
|
||||||
assert LogEntry.objects.count() == 60
|
|
||||||
|
|
||||||
# Test ordering
|
|
||||||
logs = LogEntry.objects.order_by("date")
|
|
||||||
i = 0
|
|
||||||
while i < 59:
|
|
||||||
assert logs[i].date <= logs[i + 1].date
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
logs = LogEntry.objects.order_by("-date")
|
|
||||||
i = 0
|
|
||||||
while i < 59:
|
|
||||||
assert logs[i].date >= logs[i + 1].date
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
# Test searching
|
|
||||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
|
||||||
assert logs.count() == 30
|
|
||||||
|
|
||||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1))
|
|
||||||
assert logs.count() == 30
|
|
||||||
|
|
||||||
logs = LogEntry.objects.filter(
|
|
||||||
date__lte=datetime.datetime(2011, 1, 1),
|
|
||||||
date__gte=datetime.datetime(2000, 1, 1),
|
|
||||||
)
|
|
||||||
assert logs.count() == 10
|
|
||||||
|
|
||||||
LogEntry.drop_collection()
|
|
||||||
|
|
||||||
# Test microsecond-level ordering/filtering
|
|
||||||
for microsecond in (99, 999, 9999, 10000):
|
|
||||||
LogEntry(date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)).save()
|
|
||||||
|
|
||||||
logs = list(LogEntry.objects.order_by("date"))
|
|
||||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
|
||||||
next_log = logs[next_idx]
|
|
||||||
assert log.date < next_log.date
|
|
||||||
|
|
||||||
logs = list(LogEntry.objects.order_by("-date"))
|
|
||||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
|
||||||
next_log = logs[next_idx]
|
|
||||||
assert log.date > next_log.date
|
|
||||||
|
|
||||||
logs = LogEntry.objects.filter(
|
|
||||||
date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)
|
|
||||||
)
|
|
||||||
assert logs.count() == 4
|
|
||||||
|
|
||||||
def test_no_default_value(self):
|
|
||||||
class Log(Document):
|
|
||||||
timestamp = ComplexDateTimeField()
|
|
||||||
|
|
||||||
Log.drop_collection()
|
|
||||||
|
|
||||||
log = Log()
|
|
||||||
assert log.timestamp is None
|
|
||||||
log.save()
|
|
||||||
|
|
||||||
fetched_log = Log.objects.with_id(log.id)
|
|
||||||
assert fetched_log.timestamp is None
|
|
||||||
|
|
||||||
def test_default_static_value(self):
|
|
||||||
NOW = datetime.datetime.utcnow()
|
|
||||||
|
|
||||||
class Log(Document):
|
|
||||||
timestamp = ComplexDateTimeField(default=NOW)
|
|
||||||
|
|
||||||
Log.drop_collection()
|
|
||||||
|
|
||||||
log = Log()
|
|
||||||
assert log.timestamp == NOW
|
|
||||||
log.save()
|
|
||||||
|
|
||||||
fetched_log = Log.objects.with_id(log.id)
|
|
||||||
assert fetched_log.timestamp == NOW
|
|
||||||
|
|
||||||
def test_default_callable(self):
|
|
||||||
NOW = datetime.datetime.utcnow()
|
|
||||||
|
|
||||||
class Log(Document):
|
|
||||||
timestamp = ComplexDateTimeField(default=datetime.datetime.utcnow)
|
|
||||||
|
|
||||||
Log.drop_collection()
|
|
||||||
|
|
||||||
log = Log()
|
|
||||||
assert log.timestamp >= NOW
|
|
||||||
log.save()
|
|
||||||
|
|
||||||
fetched_log = Log.objects.with_id(log.id)
|
|
||||||
assert fetched_log.timestamp >= NOW
|
|
||||||
|
|
||||||
def test_setting_bad_value_does_not_raise_unless_validate_is_called(self):
|
|
||||||
# test regression of #2253
|
|
||||||
|
|
||||||
class Log(Document):
|
|
||||||
timestamp = ComplexDateTimeField()
|
|
||||||
|
|
||||||
Log.drop_collection()
|
|
||||||
|
|
||||||
log = Log(timestamp="garbage")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
log.save()
|
|
@ -1,163 +0,0 @@
|
|||||||
import datetime
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
try:
|
|
||||||
import dateutil
|
|
||||||
except ImportError:
|
|
||||||
dateutil = None
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from tests.utils import MongoDBTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestDateField(MongoDBTestCase):
|
|
||||||
def test_date_from_empty_string(self):
|
|
||||||
"""
|
|
||||||
Ensure an exception is raised when trying to
|
|
||||||
cast an empty string to datetime.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class MyDoc(Document):
|
|
||||||
dt = DateField()
|
|
||||||
|
|
||||||
md = MyDoc(dt="")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
md.save()
|
|
||||||
|
|
||||||
def test_date_from_whitespace_string(self):
|
|
||||||
"""
|
|
||||||
Ensure an exception is raised when trying to
|
|
||||||
cast a whitespace-only string to datetime.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class MyDoc(Document):
|
|
||||||
dt = DateField()
|
|
||||||
|
|
||||||
md = MyDoc(dt=" ")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
md.save()
|
|
||||||
|
|
||||||
def test_default_values_today(self):
|
|
||||||
"""Ensure that default field values are used when creating
|
|
||||||
a document.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
day = DateField(default=datetime.date.today)
|
|
||||||
|
|
||||||
person = Person()
|
|
||||||
person.validate()
|
|
||||||
assert person.day == person.day
|
|
||||||
assert person.day == datetime.date.today()
|
|
||||||
assert person._data["day"] == person.day
|
|
||||||
|
|
||||||
def test_date(self):
|
|
||||||
"""Tests showing pymongo date fields
|
|
||||||
|
|
||||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
|
||||||
"""
|
|
||||||
|
|
||||||
class LogEntry(Document):
|
|
||||||
date = DateField()
|
|
||||||
|
|
||||||
LogEntry.drop_collection()
|
|
||||||
|
|
||||||
# Test can save dates
|
|
||||||
log = LogEntry()
|
|
||||||
log.date = datetime.date.today()
|
|
||||||
log.save()
|
|
||||||
log.reload()
|
|
||||||
assert log.date == datetime.date.today()
|
|
||||||
|
|
||||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
|
||||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
|
||||||
log = LogEntry()
|
|
||||||
log.date = d1
|
|
||||||
log.save()
|
|
||||||
log.reload()
|
|
||||||
assert log.date == d1.date()
|
|
||||||
assert log.date == d2.date()
|
|
||||||
|
|
||||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
|
||||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000)
|
|
||||||
log.date = d1
|
|
||||||
log.save()
|
|
||||||
log.reload()
|
|
||||||
assert log.date == d1.date()
|
|
||||||
assert log.date == d2.date()
|
|
||||||
|
|
||||||
def test_regular_usage(self):
|
|
||||||
"""Tests for regular datetime fields"""
|
|
||||||
|
|
||||||
class LogEntry(Document):
|
|
||||||
date = DateField()
|
|
||||||
|
|
||||||
LogEntry.drop_collection()
|
|
||||||
|
|
||||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
|
||||||
log = LogEntry()
|
|
||||||
log.date = d1
|
|
||||||
log.validate()
|
|
||||||
log.save()
|
|
||||||
|
|
||||||
for query in (d1, d1.isoformat(" ")):
|
|
||||||
log1 = LogEntry.objects.get(date=query)
|
|
||||||
assert log == log1
|
|
||||||
|
|
||||||
if dateutil:
|
|
||||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
|
||||||
assert log == log1
|
|
||||||
|
|
||||||
# create additional 19 log entries for a total of 20
|
|
||||||
for i in range(1971, 1990):
|
|
||||||
d = datetime.datetime(i, 1, 1, 0, 0, 1)
|
|
||||||
LogEntry(date=d).save()
|
|
||||||
|
|
||||||
assert LogEntry.objects.count() == 20
|
|
||||||
|
|
||||||
# Test ordering
|
|
||||||
logs = LogEntry.objects.order_by("date")
|
|
||||||
i = 0
|
|
||||||
while i < 19:
|
|
||||||
assert logs[i].date <= logs[i + 1].date
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
logs = LogEntry.objects.order_by("-date")
|
|
||||||
i = 0
|
|
||||||
while i < 19:
|
|
||||||
assert logs[i].date >= logs[i + 1].date
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
# Test searching
|
|
||||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
|
||||||
assert logs.count() == 10
|
|
||||||
|
|
||||||
def test_validation(self):
|
|
||||||
"""Ensure that invalid values cannot be assigned to datetime
|
|
||||||
fields.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class LogEntry(Document):
|
|
||||||
time = DateField()
|
|
||||||
|
|
||||||
log = LogEntry()
|
|
||||||
log.time = datetime.datetime.now()
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
log.time = datetime.date.today()
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
log.time = datetime.datetime.now().isoformat(" ")
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
if dateutil:
|
|
||||||
log.time = datetime.datetime.now().isoformat("T")
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
log.time = -1
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
log.validate()
|
|
||||||
log.time = "ABC"
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
log.validate()
|
|
@ -1,232 +0,0 @@
|
|||||||
import datetime as dt
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
try:
|
|
||||||
import dateutil
|
|
||||||
except ImportError:
|
|
||||||
dateutil = None
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine import connection
|
|
||||||
from tests.utils import MongoDBTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestDateTimeField(MongoDBTestCase):
|
|
||||||
def test_datetime_from_empty_string(self):
|
|
||||||
"""
|
|
||||||
Ensure an exception is raised when trying to
|
|
||||||
cast an empty string to datetime.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class MyDoc(Document):
|
|
||||||
dt = DateTimeField()
|
|
||||||
|
|
||||||
md = MyDoc(dt="")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
md.save()
|
|
||||||
|
|
||||||
def test_datetime_from_whitespace_string(self):
|
|
||||||
"""
|
|
||||||
Ensure an exception is raised when trying to
|
|
||||||
cast a whitespace-only string to datetime.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class MyDoc(Document):
|
|
||||||
dt = DateTimeField()
|
|
||||||
|
|
||||||
md = MyDoc(dt=" ")
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
md.save()
|
|
||||||
|
|
||||||
def test_default_value_utcnow(self):
|
|
||||||
"""Ensure that default field values are used when creating
|
|
||||||
a document.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
created = DateTimeField(default=dt.datetime.utcnow)
|
|
||||||
|
|
||||||
utcnow = dt.datetime.utcnow()
|
|
||||||
person = Person()
|
|
||||||
person.validate()
|
|
||||||
person_created_t0 = person.created
|
|
||||||
assert person.created - utcnow < dt.timedelta(seconds=1)
|
|
||||||
assert person_created_t0 == person.created # make sure it does not change
|
|
||||||
assert person._data["created"] == person.created
|
|
||||||
|
|
||||||
def test_handling_microseconds(self):
|
|
||||||
"""Tests showing pymongo datetime fields handling of microseconds.
|
|
||||||
Microseconds are rounded to the nearest millisecond and pre UTC
|
|
||||||
handling is wonky.
|
|
||||||
|
|
||||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
|
||||||
"""
|
|
||||||
|
|
||||||
class LogEntry(Document):
|
|
||||||
date = DateTimeField()
|
|
||||||
|
|
||||||
LogEntry.drop_collection()
|
|
||||||
|
|
||||||
# Test can save dates
|
|
||||||
log = LogEntry()
|
|
||||||
log.date = dt.date.today()
|
|
||||||
log.save()
|
|
||||||
log.reload()
|
|
||||||
assert log.date.date() == dt.date.today()
|
|
||||||
|
|
||||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
|
||||||
# dropped
|
|
||||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 999)
|
|
||||||
d2 = dt.datetime(1970, 1, 1, 0, 0, 1)
|
|
||||||
log = LogEntry()
|
|
||||||
log.date = d1
|
|
||||||
log.save()
|
|
||||||
log.reload()
|
|
||||||
assert log.date != d1
|
|
||||||
assert log.date == d2
|
|
||||||
|
|
||||||
# Post UTC - microseconds are rounded (down) nearest millisecond
|
|
||||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
|
||||||
d2 = dt.datetime(1970, 1, 1, 0, 0, 1, 9000)
|
|
||||||
log.date = d1
|
|
||||||
log.save()
|
|
||||||
log.reload()
|
|
||||||
assert log.date != d1
|
|
||||||
assert log.date == d2
|
|
||||||
|
|
||||||
def test_regular_usage(self):
|
|
||||||
"""Tests for regular datetime fields"""
|
|
||||||
|
|
||||||
class LogEntry(Document):
|
|
||||||
date = DateTimeField()
|
|
||||||
|
|
||||||
LogEntry.drop_collection()
|
|
||||||
|
|
||||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1)
|
|
||||||
log = LogEntry()
|
|
||||||
log.date = d1
|
|
||||||
log.validate()
|
|
||||||
log.save()
|
|
||||||
|
|
||||||
for query in (d1, d1.isoformat(" ")):
|
|
||||||
log1 = LogEntry.objects.get(date=query)
|
|
||||||
assert log == log1
|
|
||||||
|
|
||||||
if dateutil:
|
|
||||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
|
||||||
assert log == log1
|
|
||||||
|
|
||||||
# create additional 19 log entries for a total of 20
|
|
||||||
for i in range(1971, 1990):
|
|
||||||
d = dt.datetime(i, 1, 1, 0, 0, 1)
|
|
||||||
LogEntry(date=d).save()
|
|
||||||
|
|
||||||
assert LogEntry.objects.count() == 20
|
|
||||||
|
|
||||||
# Test ordering
|
|
||||||
logs = LogEntry.objects.order_by("date")
|
|
||||||
i = 0
|
|
||||||
while i < 19:
|
|
||||||
assert logs[i].date <= logs[i + 1].date
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
logs = LogEntry.objects.order_by("-date")
|
|
||||||
i = 0
|
|
||||||
while i < 19:
|
|
||||||
assert logs[i].date >= logs[i + 1].date
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
# Test searching
|
|
||||||
logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1))
|
|
||||||
assert logs.count() == 10
|
|
||||||
|
|
||||||
logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1))
|
|
||||||
assert logs.count() == 10
|
|
||||||
|
|
||||||
logs = LogEntry.objects.filter(
|
|
||||||
date__lte=dt.datetime(1980, 1, 1), date__gte=dt.datetime(1975, 1, 1)
|
|
||||||
)
|
|
||||||
assert logs.count() == 5
|
|
||||||
|
|
||||||
def test_datetime_validation(self):
|
|
||||||
"""Ensure that invalid values cannot be assigned to datetime
|
|
||||||
fields.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class LogEntry(Document):
|
|
||||||
time = DateTimeField()
|
|
||||||
|
|
||||||
log = LogEntry()
|
|
||||||
log.time = dt.datetime.now()
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
log.time = dt.date.today()
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
log.time = dt.datetime.now().isoformat(" ")
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
log.time = "2019-05-16 21:42:57.897847"
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
if dateutil:
|
|
||||||
log.time = dt.datetime.now().isoformat("T")
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
log.time = -1
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
log.validate()
|
|
||||||
log.time = "ABC"
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
log.validate()
|
|
||||||
log.time = "2019-05-16 21:GARBAGE:12"
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
log.validate()
|
|
||||||
log.time = "2019-05-16 21:42:57.GARBAGE"
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
log.validate()
|
|
||||||
log.time = "2019-05-16 21:42:57.123.456"
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
def test_parse_datetime_as_str(self):
|
|
||||||
class DTDoc(Document):
|
|
||||||
date = DateTimeField()
|
|
||||||
|
|
||||||
date_str = "2019-03-02 22:26:01"
|
|
||||||
|
|
||||||
# make sure that passing a parsable datetime works
|
|
||||||
dtd = DTDoc()
|
|
||||||
dtd.date = date_str
|
|
||||||
assert isinstance(dtd.date, str)
|
|
||||||
dtd.save()
|
|
||||||
dtd.reload()
|
|
||||||
|
|
||||||
assert isinstance(dtd.date, dt.datetime)
|
|
||||||
assert str(dtd.date) == date_str
|
|
||||||
|
|
||||||
dtd.date = "January 1st, 9999999999"
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
dtd.validate()
|
|
||||||
|
|
||||||
|
|
||||||
class TestDateTimeTzAware(MongoDBTestCase):
|
|
||||||
def test_datetime_tz_aware_mark_as_changed(self):
|
|
||||||
# Reset the connections
|
|
||||||
connection._connection_settings = {}
|
|
||||||
connection._connections = {}
|
|
||||||
connection._dbs = {}
|
|
||||||
|
|
||||||
connect(db="mongoenginetest", tz_aware=True)
|
|
||||||
|
|
||||||
class LogEntry(Document):
|
|
||||||
time = DateTimeField()
|
|
||||||
|
|
||||||
LogEntry.drop_collection()
|
|
||||||
|
|
||||||
LogEntry(time=dt.datetime(2013, 1, 1, 0, 0, 0)).save()
|
|
||||||
|
|
||||||
log = LogEntry.objects.first()
|
|
||||||
log.time = dt.datetime(2013, 1, 1, 0, 0, 0)
|
|
||||||
assert ["time"] == log._changed_fields
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user