Compare commits
466 Commits
misleading
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
2f4b2aadb4 | ||
|
f4a87bff1d | ||
|
3a32c195d5 | ||
|
880a8efc63 | ||
|
9a414edb25 | ||
|
4b643ad01f | ||
|
811dc12db5 | ||
|
dbd72282bc | ||
|
686b42c29b | ||
|
4210ea06c3 | ||
|
8c3e2b340b | ||
|
7a0a58c163 | ||
|
0af1a1151f | ||
|
28226f81a8 | ||
|
558e3299fe | ||
|
cb0035f87a | ||
|
85b0c1c945 | ||
|
3d6b650592 | ||
|
6bc1b83695 | ||
|
7e10bb2894 | ||
|
8428da368d | ||
|
5fe9436ea7 | ||
|
1b249e336e | ||
|
533700583d | ||
|
639124c311 | ||
|
8b5cf9e2be | ||
|
0af96b1323 | ||
|
f6d864b6d1 | ||
|
bb9ba73a7b | ||
|
66978aea67 | ||
|
5a41998eda | ||
|
c32b308730 | ||
|
88642eb021 | ||
|
3b10236b5e | ||
|
bffe058726 | ||
|
09c415a416 | ||
|
4670508a1c | ||
|
e7f2d77a75 | ||
|
7bf7153e4f | ||
|
0df0585d07 | ||
|
6ae722d04b | ||
|
cc85165b0c | ||
|
2fd4169656 | ||
|
5897b1d042 | ||
|
756bffe868 | ||
|
5209547a89 | ||
|
ecb5dda281 | ||
|
34245fe258 | ||
|
04c26acdd6 | ||
|
d0a15a8924 | ||
|
2215e2746b | ||
|
0e6bcbc030 | ||
|
ecf84cbd5d | ||
|
232071f8f4 | ||
|
fc0fb31d43 | ||
|
1bd8cd803e | ||
|
ef57a58155 | ||
|
9680259904 | ||
|
49a4d23371 | ||
|
b9d370c885 | ||
|
e5a2714baf | ||
|
ff596fcb7e | ||
|
f0fad6df19 | ||
|
da173cf0e2 | ||
|
1669f0c5a4 | ||
|
b045925efe | ||
|
b3ce65453a | ||
|
50d891cb7b | ||
|
e31f9150d2 | ||
|
74ceb9703b | ||
|
58a3c6de03 | ||
|
86ad8d119d | ||
|
34d273015c | ||
|
7147043d63 | ||
|
b9b536133d | ||
|
8fd969aba9 | ||
|
f244207168 | ||
|
0620ac5641 | ||
|
3b9a167022 | ||
|
b479bb7c6b | ||
|
8ef771912d | ||
|
2d1c9afbb7 | ||
|
9ff5d8426c | ||
|
467e9c3ddf | ||
|
0d5e028c55 | ||
|
5858ea1bf0 | ||
|
1f220b4eaf | ||
|
97c99ca40d | ||
|
80a3b1c88c | ||
|
68447af127 | ||
|
d033e3b133 | ||
|
4428842e77 | ||
|
f38cc6edd3 | ||
|
aeb4f8f4da | ||
|
1b7c2085c9 | ||
|
48b979599f | ||
|
af3d3b7ee6 | ||
|
56fe126f3a | ||
|
04905d4b37 | ||
|
460df112f4 | ||
|
772096ec55 | ||
|
98d64f41c6 | ||
|
9a3bca8ab6 | ||
|
5781753cc8 | ||
|
fd3699a519 | ||
|
4f6a24411d | ||
|
de3888a48b | ||
|
700fe80a00 | ||
|
49e33b978d | ||
|
81197d6061 | ||
|
aa368be4d3 | ||
|
0f1fce4a7b | ||
|
cc591a634a | ||
|
6e332e782b | ||
|
0e9920b190 | ||
|
fd35df07c4 | ||
|
8f3d21c312 | ||
|
7b772e3a4a | ||
|
59438a4768 | ||
|
fe9f7f1f80 | ||
|
6b5231265c | ||
|
0014346de1 | ||
|
bc0e04d833 | ||
|
dd11911ed7 | ||
|
0b3feedf94 | ||
|
65b8cfc96a | ||
|
2e284b93b8 | ||
|
c28bb517cb | ||
|
aa3ff39ef8 | ||
|
149fb953e7 | ||
|
bf12621ce9 | ||
|
8a1a68ea7c | ||
|
eabb8f60f5 | ||
|
c00a378776 | ||
|
ee6ef1ff4b | ||
|
fb8f02d0c0 | ||
|
a025199294 | ||
|
87babaaa30 | ||
|
a4fff15491 | ||
|
a190dfe2c4 | ||
|
3926473917 | ||
|
9ffe0bcdee | ||
|
4fa3134294 | ||
|
92f6fce77d | ||
|
b1a2cf061d | ||
|
0a05c1f590 | ||
|
7dbc217768 | ||
|
bf411ab2ca | ||
|
277b827d4d | ||
|
e0bec881bc | ||
|
cc5e2ba054 | ||
|
904fcd1a0a | ||
|
2ec454447f | ||
|
ecd297e227 | ||
|
079ee3c191 | ||
|
f2638ecd02 | ||
|
ad6ff819fe | ||
|
48357640c6 | ||
|
e6c2169f76 | ||
|
1d17dc4663 | ||
|
eeac3bd2e6 | ||
|
3f5a15d236 | ||
|
91493a1e79 | ||
|
0c274908ec | ||
|
338c40b5d5 | ||
|
fc3ccf9606 | ||
|
746faceb5c | ||
|
8c3058d99b | ||
|
eb56fb9bda | ||
|
161493c0d2 | ||
|
cb9f329d11 | ||
|
03af784ebe | ||
|
e5f6e4584a | ||
|
79f9f223d0 | ||
|
0bc18cd6e1 | ||
|
30a3c6a5b7 | ||
|
90c5d83f84 | ||
|
d8b8ff6851 | ||
|
ee664f0c90 | ||
|
f8d371229e | ||
|
94a7e813b1 | ||
|
8ef7213426 | ||
|
2f4464ead5 | ||
|
89b93461ac | ||
|
9e40f3ae83 | ||
|
f4962fbc40 | ||
|
c9d53ca5d5 | ||
|
65f50fd713 | ||
|
bf1d04e399 | ||
|
5a8e5e5a40 | ||
|
f3919dd839 | ||
|
9f82a02ddf | ||
|
015a36c85f | ||
|
fbd3388a59 | ||
|
d8a52d68c5 | ||
|
4286708e2e | ||
|
e362d089e1 | ||
|
6b657886a5 | ||
|
eb16945147 | ||
|
38047ca992 | ||
|
c801e79d4b | ||
|
3fca3739de | ||
|
c218c8bb6c | ||
|
0bbc05995a | ||
|
3adb67901b | ||
|
d4350e7da4 | ||
|
4665658145 | ||
|
0d289fd5a1 | ||
|
aabc18755c | ||
|
1f2a5db016 | ||
|
ff40f66291 | ||
|
7f77084e0e | ||
|
aca4de728e | ||
|
9e7ca43cad | ||
|
7116dec74a | ||
|
a5302b870b | ||
|
604e9974b6 | ||
|
3e1c83f8fa | ||
|
e431e27cb2 | ||
|
4f188655d0 | ||
|
194b0cac88 | ||
|
7b4175fc5c | ||
|
adb5f74ddb | ||
|
107a1c34c8 | ||
|
dc7da5204f | ||
|
0301bca176 | ||
|
49f9bca23b | ||
|
31498bd7dd | ||
|
1698f398eb | ||
|
4275c2d7b7 | ||
|
22bff8566d | ||
|
d8657be320 | ||
|
3db9d58dac | ||
|
3fbe9c3cdd | ||
|
130e9c519c | ||
|
78c9e9745d | ||
|
38ebb5abf4 | ||
|
9b73be26ab | ||
|
fd0095b73f | ||
|
226049f66a | ||
|
dc1cf88ca6 | ||
|
f5f8b730b5 | ||
|
e8f6b42316 | ||
|
49b0d73654 | ||
|
394da67cf1 | ||
|
ef7da36ac6 | ||
|
1312100bc7 | ||
|
4085bc2152 | ||
|
f4d7e72426 | ||
|
ece63ad071 | ||
|
a9550b8243 | ||
|
43724e40b2 | ||
|
1bfa40e926 | ||
|
d493f71c4e | ||
|
87f4d1a323 | ||
|
0a0e6114f5 | ||
|
41d36fa3bf | ||
|
707923e3f5 | ||
|
d9b9581df2 | ||
|
463e7c66af | ||
|
2be28a22a7 | ||
|
d73f0bb1af | ||
|
ce74978b1e | ||
|
2b0157aecd | ||
|
f49baf5d90 | ||
|
7cc964c7d8 | ||
|
bc77322c2f | ||
|
8913a74a86 | ||
|
af35b25d15 | ||
|
476b07af6e | ||
|
e2b9a02531 | ||
|
6cc6229066 | ||
|
4c62a060f0 | ||
|
3d80637fa4 | ||
|
68be9fe979 | ||
|
547cd4a3ae | ||
|
ee2d50b2d1 | ||
|
15c3ddece8 | ||
|
beaa9744b7 | ||
|
8eb51790b5 | ||
|
aadc6262ed | ||
|
00ae6298d4 | ||
|
ad0669a326 | ||
|
85df76c623 | ||
|
87512246cb | ||
|
a3f9016ae9 | ||
|
4e58e9f8d1 | ||
|
7c533394fd | ||
|
333e014f13 | ||
|
c0c0efce18 | ||
|
beabaee345 | ||
|
c937af3919 | ||
|
aa4a6ae023 | ||
|
b57946ec98 | ||
|
1e110a2c41 | ||
|
b234aa48e4 | ||
|
8086576677 | ||
|
03e34299f0 | ||
|
421e3f324f | ||
|
a0b803959c | ||
|
ff4d57032a | ||
|
ba34589065 | ||
|
a4d11eef46 | ||
|
fda2e2b47a | ||
|
d287f480e5 | ||
|
d85f0e6226 | ||
|
cfb4943986 | ||
|
b453a96211 | ||
|
81f9b351b3 | ||
|
4bca3de42f | ||
|
235b1a3679 | ||
|
450658d7ac | ||
|
8e17e42e26 | ||
|
2d6a4c4b90 | ||
|
38703acc29 | ||
|
095217e797 | ||
|
86e965f854 | ||
|
57db68dc04 | ||
|
72de6d67c7 | ||
|
b2c3acd025 | ||
|
605de59bd0 | ||
|
e0565ddac5 | ||
|
18b68f1b80 | ||
|
ea88806630 | ||
|
412bed0f6d | ||
|
53cf26b9af | ||
|
d738462139 | ||
|
2fa48cd9e5 | ||
|
e64a7a9448 | ||
|
9490ad2bf7 | ||
|
84f3dce492 | ||
|
60c42dddd5 | ||
|
f93f9406ee | ||
|
705c55ce24 | ||
|
928770c43a | ||
|
59fbd505a0 | ||
|
1cc20c9770 | ||
|
f8f267a880 | ||
|
80ea1f6883 | ||
|
75ee282a3d | ||
|
4edad4601c | ||
|
152b51fd33 | ||
|
66a0fca4ad | ||
|
e7c7a66cd1 | ||
|
b3dbb87c3c | ||
|
3d45538998 | ||
|
8df9d3fef9 | ||
|
99e660c66d | ||
|
aa02f87b69 | ||
|
f0d1ee2cb4 | ||
|
ca4967311d | ||
|
65eb6ab611 | ||
|
1cb2f7814c | ||
|
b5485b16e6 | ||
|
62c8597a3b | ||
|
488604ff2e | ||
|
bd88a17b8e | ||
|
8e892dccfe | ||
|
c22eb34017 | ||
|
dcf3edb03e | ||
|
c85b59d3b5 | ||
|
1170de1e8e | ||
|
332bd767d4 | ||
|
0053b30237 | ||
|
d44533d956 | ||
|
12d8bd5a22 | ||
|
ae326678ec | ||
|
8d31f165c0 | ||
|
cfd4d6a161 | ||
|
329f030a41 | ||
|
68dc2925fb | ||
|
0d4e61d489 | ||
|
dc7b96a569 | ||
|
50882e5bb0 | ||
|
280a73af3b | ||
|
d8c0631dab | ||
|
9166ba91d7 | ||
|
6bc4e602bb | ||
|
45a7520fc3 | ||
|
64c0cace85 | ||
|
82af5e4a19 | ||
|
7e0ba1b335 | ||
|
44b7f792fe | ||
|
a3e432eb68 | ||
|
009f9a2b14 | ||
|
2ca905b6e5 | ||
|
3b099f936a | ||
|
4d6ddb070e | ||
|
b205314424 | ||
|
e83132f32c | ||
|
1b38309d70 | ||
|
6e8196d475 | ||
|
90fecc56dd | ||
|
d3d7f0e670 | ||
|
37ffeafeff | ||
|
abc159b7b9 | ||
|
648b28876d | ||
|
5b9f2bac87 | ||
|
17151f67c2 | ||
|
5f14d958ac | ||
|
bd6c52e025 | ||
|
cb77bb6b69 | ||
|
78b240b740 | ||
|
7e30f00178 | ||
|
35310dbc73 | ||
|
af82c07acc | ||
|
3f75f30f26 | ||
|
f7f0e10d4d | ||
|
091238a2cf | ||
|
0458ef869e | ||
|
0bf08db7b9 | ||
|
d3420918cd | ||
|
138e759161 | ||
|
f1d6ce7d12 | ||
|
ff749a7a0a | ||
|
bff78ca8dd | ||
|
81647d67a0 | ||
|
d8924ed892 | ||
|
799cdafae6 | ||
|
bc0c55e49a | ||
|
c61c6a8525 | ||
|
3e764d068c | ||
|
ac25f4b98b | ||
|
aa6ff8c84a | ||
|
37ca79e9c5 | ||
|
6040b4b494 | ||
|
51ea3e3c6f | ||
|
5a16dda50d | ||
|
bbfa978861 | ||
|
54ca7bf09f | ||
|
8bf5370b6c | ||
|
ecefa05e03 | ||
|
e013494fb2 | ||
|
4853f74dbf | ||
|
6f45ee6813 | ||
|
c60ed32f3a | ||
|
178851589d | ||
|
5bcc679194 | ||
|
1e17b5ac66 | ||
|
19f12f3f2f | ||
|
71e8d9a490 | ||
|
e3cd553f82 | ||
|
b61c8cd104 | ||
|
8f288fe458 | ||
|
02a920feea | ||
|
be2c4f2b3c | ||
|
7ac74b1c1f | ||
|
933cb1d5c7 | ||
|
6203e30152 | ||
|
7d94af0e31 | ||
|
564a2b5f1e | ||
|
1dbe7a3163 | ||
|
47f8a126ca | ||
|
693195f70b | ||
|
2267b7e7d7 | ||
|
47c67ecc99 | ||
|
4c4b7cbeae | ||
|
ddececbfea | ||
|
71a6f3d1a4 | ||
|
fbb3bf869c | ||
|
b887ea9623 | ||
|
c68e3e1238 | ||
|
c5080e4030 | ||
|
d37a30e083 | ||
|
c9ed930606 | ||
|
6a4c342e45 |
143
.github/workflows/github-actions.yml
vendored
Normal file
143
.github/workflows/github-actions.yml
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
name: MongoengineCI
|
||||
on:
|
||||
# All PR
|
||||
pull_request:
|
||||
# master branch merge
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
# release tags
|
||||
create:
|
||||
tags:
|
||||
- 'v[0-9]+\.[0-9]+\.[0-9]+*'
|
||||
env:
|
||||
MONGODB_3_6: 3.6.14
|
||||
MONGODB_4_0: 4.0.23
|
||||
MONGODB_4_2: 4.2
|
||||
MONGODB_4_4: 4.4
|
||||
|
||||
PYMONGO_3_4: 3.4
|
||||
PYMONGO_3_6: 3.6
|
||||
PYMONGO_3_9: 3.9
|
||||
PYMONGO_3_11: 3.11
|
||||
|
||||
MAIN_PYTHON_VERSION: 3.7
|
||||
|
||||
jobs:
|
||||
linting:
|
||||
# Run pre-commit (https://pre-commit.com/)
|
||||
# which runs pre-configured linter & autoformatter
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.7
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.7
|
||||
- run: bash .github/workflows/install_ci_python_dep.sh
|
||||
- run: pre-commit run -a
|
||||
|
||||
test:
|
||||
# Test suite run against recent python versions
|
||||
# and against a few combination of MongoDB and pymongo
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8, 3.9, "3.10", pypy3]
|
||||
MONGODB: [$MONGODB_4_0]
|
||||
PYMONGO: [$PYMONGO_3_11]
|
||||
include:
|
||||
- python-version: 3.7
|
||||
MONGODB: $MONGODB_3_6
|
||||
PYMONGO: $PYMONGO_3_9
|
||||
- python-version: 3.7
|
||||
MONGODB: $MONGODB_4_2
|
||||
PYMONGO: $PYMONGO_3_6
|
||||
- python-version: 3.7
|
||||
MONGODB: $MONGODB_4_4
|
||||
PYMONGO: $PYMONGO_3_11
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: install mongo and ci dependencies
|
||||
run: |
|
||||
bash .github/workflows/install_mongo.sh ${{ matrix.MONGODB }}
|
||||
bash .github/workflows/install_ci_python_dep.sh
|
||||
bash .github/workflows/start_mongo.sh ${{ matrix.MONGODB }}
|
||||
- name: tox dry-run (to pre-install venv)
|
||||
run: tox -e $(echo py${{ matrix.python-version }}-mg${{ matrix.PYMONGO }} | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder"
|
||||
- name: Run test suite
|
||||
run: tox -e $(echo py${{ matrix.python-version }}-mg${{ matrix.PYMONGO }} | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine"
|
||||
- name: Send coverage to Coveralls
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
COVERALLS_SERVICE_NAME: github
|
||||
if: ${{ matrix.python-version == env.MAIN_PYTHON_VERSION }}
|
||||
run: coveralls
|
||||
|
||||
build_doc_dryrun:
|
||||
# ensures that readthedocs can be built continuously
|
||||
# to avoid that it breaks when new releases are being created
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.7
|
||||
- name: install python dep
|
||||
run: |
|
||||
pip install -e .
|
||||
pip install -r docs/requirements.txt
|
||||
- name: build doc
|
||||
run: |
|
||||
cd docs
|
||||
make html-readthedocs
|
||||
|
||||
build-n-publish-dummy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [linting, test, build_doc_dryrun]
|
||||
if: github.event_name != 'pull_request'
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Set up Python 3.7
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.7
|
||||
- name: build dummy wheel for test-pypi
|
||||
run: |
|
||||
pip install wheel
|
||||
python setup.py egg_info -b ".dev`date '+%Y%m%d%H%M%S'`" build sdist bdist_wheel
|
||||
# - name: publish test-pypi
|
||||
# # Although working and recommended, test-pypi has a limit
|
||||
# # in the size of projects so it's better to avoid publishing
|
||||
# # until there is a way to garbage collect these dummy releases
|
||||
# uses: pypa/gh-action-pypi-publish@master
|
||||
# with:
|
||||
# password: ${{ secrets.test_pypi_token }}
|
||||
# repository_url: https://test.pypi.org/legacy/
|
||||
|
||||
build-n-publish:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [linting, test, build_doc_dryrun, build-n-publish-dummy]
|
||||
if: github.event_name == 'create' && startsWith(github.ref, 'refs/tags/v')
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Set up Python 3.7
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.7
|
||||
# todo separate build from publish
|
||||
# https://stackoverflow.com/questions/59349905/which-properties-does-github-event-in-a-github-workflow-have
|
||||
- name: build dummy wheel for test-pypi
|
||||
run: |
|
||||
pip install wheel
|
||||
python setup.py sdist bdist_wheel
|
||||
- name: publish pypi
|
||||
uses: pypa/gh-action-pypi-publish@master
|
||||
with:
|
||||
password: ${{ secrets.pypi_token }}
|
5
.github/workflows/install_ci_python_dep.sh
vendored
Normal file
5
.github/workflows/install_ci_python_dep.sh
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
pip install --upgrade pip
|
||||
pip install coveralls
|
||||
pip install pre-commit
|
||||
pip install tox
|
18
.github/workflows/install_mongo.sh
vendored
Normal file
18
.github/workflows/install_mongo.sh
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
|
||||
MONGODB=$1
|
||||
|
||||
# Mongo > 4.0 follows different name convention for download links
|
||||
mongo_build=mongodb-linux-x86_64-${MONGODB}
|
||||
|
||||
if [[ "$MONGODB" == *"4.2"* ]]; then
|
||||
mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest
|
||||
elif [[ "$MONGODB" == *"4.4"* ]]; then
|
||||
mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest
|
||||
fi
|
||||
|
||||
wget http://fastdl.mongodb.org/linux/$mongo_build.tgz
|
||||
tar xzf $mongo_build.tgz
|
||||
|
||||
mongodb_dir=$(find ${PWD}/ -type d -name "mongodb-linux-x86_64*")
|
||||
$mongodb_dir/bin/mongod --version
|
9
.github/workflows/start_mongo.sh
vendored
Normal file
9
.github/workflows/start_mongo.sh
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
MONGODB=$1
|
||||
|
||||
mongodb_dir=$(find ${PWD}/ -type d -name "mongodb-linux-x86_64*")
|
||||
|
||||
mkdir $mongodb_dir/data
|
||||
$mongodb_dir/bin/mongod --dbpath $mongodb_dir/data --logpath $mongodb_dir/mongodb.log --fork
|
||||
mongo --eval 'db.version();' # Make sure mongo is awake
|
11
.gitignore
vendored
11
.gitignore
vendored
@ -1,8 +1,15 @@
|
||||
.*
|
||||
!.gitignore
|
||||
*~
|
||||
*.py[co]
|
||||
.*.sw[po]
|
||||
.cache/
|
||||
.coverage
|
||||
.coveragerc
|
||||
.env
|
||||
.idea/
|
||||
.pytest_cache/
|
||||
.tox/
|
||||
.eggs/
|
||||
*.egg
|
||||
docs/.build
|
||||
docs/_build
|
||||
@ -13,8 +20,6 @@ env/
|
||||
.settings
|
||||
.project
|
||||
.pydevproject
|
||||
tests/test_bugfix.py
|
||||
htmlcov/
|
||||
venv
|
||||
venv3
|
||||
scratchpad
|
||||
|
@ -1,22 +0,0 @@
|
||||
pylint:
|
||||
disable:
|
||||
# We use this a lot (e.g. via document._meta)
|
||||
- protected-access
|
||||
|
||||
options:
|
||||
additional-builtins:
|
||||
# add xrange and long as valid built-ins. In Python 3, xrange is
|
||||
# translated into range and long is translated into int via 2to3 (see
|
||||
# "use_2to3" in setup.py). This should be removed when we drop Python
|
||||
# 2 support (which probably won't happen any time soon).
|
||||
- xrange
|
||||
- long
|
||||
|
||||
pyflakes:
|
||||
disable:
|
||||
# undefined variables are already covered by pylint (and exclude
|
||||
# xrange & long)
|
||||
- F821
|
||||
|
||||
ignore-paths:
|
||||
- benchmark.py
|
26
.pre-commit-config.yaml
Normal file
26
.pre-commit-config.yaml
Normal file
@ -0,0 +1,26 @@
|
||||
fail_fast: false
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.0.1
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: debug-statements
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- repo: https://github.com/ambv/black
|
||||
rev: 21.5b2
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.9.2
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.19.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py36-plus]
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.8.0
|
||||
hooks:
|
||||
- id: isort
|
20
.readthedocs.yml
Normal file
20
.readthedocs.yml
Normal file
@ -0,0 +1,20 @@
|
||||
# .readthedocs.yml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
# Optionally set the version of Python and requirements required to build your docs
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
# docs/conf.py is importing mongoengine
|
||||
# so mongoengine needs to be installed as well
|
||||
- method: setuptools
|
||||
path: .
|
111
.travis.yml
111
.travis.yml
@ -1,111 +0,0 @@
|
||||
# For full coverage, we'd have to test all supported Python, MongoDB, and
|
||||
# PyMongo combinations. However, that would result in an overly long build
|
||||
# with a very large number of jobs, hence we only test a subset of all the
|
||||
# combinations:
|
||||
# * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup,
|
||||
# tested against Python v2.7, v3.5, v3.6, and PyPy.
|
||||
# * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo
|
||||
# combination: MongoDB v3.4, PyMongo v3.4, Python v2.7.
|
||||
# * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8.
|
||||
#
|
||||
# We should periodically check MongoDB Server versions supported by MongoDB
|
||||
# Inc., add newly released versions to the test matrix, and remove versions
|
||||
# which have reached their End of Life. See:
|
||||
# 1. https://www.mongodb.com/support-policy.
|
||||
# 2. https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility
|
||||
#
|
||||
# Reminder: Update README.rst if you change MongoDB versions we test.
|
||||
|
||||
|
||||
language: python
|
||||
python:
|
||||
- 2.7
|
||||
- 3.5
|
||||
- 3.6
|
||||
- 3.7
|
||||
- pypy
|
||||
- pypy3
|
||||
|
||||
dist: xenial
|
||||
|
||||
env:
|
||||
global:
|
||||
- MONGODB_3_4=3.4.17
|
||||
- MONGODB_3_6=3.6.12
|
||||
- PYMONGO_3_6=3.6
|
||||
- PYMONGO_3_4=3.4
|
||||
matrix:
|
||||
- MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_6}
|
||||
|
||||
matrix:
|
||||
|
||||
# Finish the build as soon as one job fails
|
||||
fast_finish: true
|
||||
|
||||
include:
|
||||
- python: 2.7
|
||||
env: MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_4}
|
||||
- python: 3.7
|
||||
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6}
|
||||
|
||||
|
||||
install:
|
||||
# Install Mongo
|
||||
- wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz
|
||||
- tar xzf mongodb-linux-x86_64-${MONGODB}.tgz
|
||||
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version
|
||||
# Install Python dependencies.
|
||||
- pip install --upgrade pip
|
||||
- pip install coveralls
|
||||
- pip install flake8 flake8-import-order
|
||||
- pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0
|
||||
- pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
||||
# Install the tox venv.
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
||||
# Install black for Python v3.7 only.
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then pip install black; fi
|
||||
|
||||
before_script:
|
||||
- mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data
|
||||
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for Python 2.7 only
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then black --check .; else echo "black only runs on py37"; fi # Run black for Python 3.7 only
|
||||
- mongo --eval 'db.version();' # Make sure mongo is awake
|
||||
|
||||
script:
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
||||
|
||||
# For now only submit coveralls for Python v2.7. Python v3.x currently shows
|
||||
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
|
||||
# code in a separate dir and runs tests on that.
|
||||
after_success:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi
|
||||
|
||||
notifications:
|
||||
irc: irc.freenode.org#mongoengine
|
||||
|
||||
# Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z)
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- /^v.*$/
|
||||
|
||||
# Whenever a new release is created via GitHub, publish it on PyPI.
|
||||
deploy:
|
||||
provider: pypi
|
||||
user: the_drow
|
||||
password:
|
||||
secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek=
|
||||
|
||||
# Create a source distribution and a pure python wheel for faster installs.
|
||||
distributions: "sdist bdist_wheel"
|
||||
|
||||
# Only deploy on tagged commits (aka GitHub releases) and only for the parent
|
||||
# repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4.
|
||||
# We run Travis against many different Python, PyMongo, and MongoDB versions
|
||||
# and we don't want the deploy to occur multiple times).
|
||||
on:
|
||||
tags: true
|
||||
repo: MongoEngine/mongoengine
|
||||
condition: ($PYMONGO = ${PYMONGO_3_6}) && ($MONGODB = ${MONGODB_3_4})
|
||||
python: 2.7
|
11
AUTHORS
11
AUTHORS
@ -252,3 +252,14 @@ that much better:
|
||||
* Paulo Amaral (https://github.com/pauloAmaral)
|
||||
* Gaurav Dadhania (https://github.com/GVRV)
|
||||
* Yurii Andrieiev (https://github.com/yandrieiev)
|
||||
* Filip Kucharczyk (https://github.com/Pacu2)
|
||||
* Eric Timmons (https://github.com/daewok)
|
||||
* Matthew Simpson (https://github.com/mcsimps2)
|
||||
* Leonardo Domingues (https://github.com/leodmgs)
|
||||
* Agustin Barto (https://github.com/abarto)
|
||||
* Stankiewicz Mateusz (https://github.com/mas15)
|
||||
* Felix Schultheiß (https://github.com/felix-smashdocs)
|
||||
* Jan Stein (https://github.com/janste63)
|
||||
* Timothé Perez (https://github.com/AchilleAsh)
|
||||
* oleksandr-l5 (https://github.com/oleksandr-l5)
|
||||
* Ido Shraga (https://github.com/idoshr)
|
||||
|
@ -20,19 +20,47 @@ post to the `user group <http://groups.google.com/group/mongoengine-users>`
|
||||
Supported Interpreters
|
||||
----------------------
|
||||
|
||||
MongoEngine supports CPython 2.7 and newer. Language
|
||||
features not supported by all interpreters can not be used.
|
||||
The codebase is written in python 2 so you must be using python 2
|
||||
when developing new features. Compatibility of the library with Python 3
|
||||
relies on the 2to3 package that gets executed as part of the installation
|
||||
build. You should ensure that your code is properly converted by
|
||||
`2to3 <http://docs.python.org/library/2to3.html>`_.
|
||||
MongoEngine supports CPython 3.5 and newer as well as Pypy3.
|
||||
Language features not supported by all interpreters can not be used.
|
||||
|
||||
Python3 codebase
|
||||
----------------------
|
||||
|
||||
Since 0.20, the codebase is exclusively Python 3.
|
||||
|
||||
Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs.
|
||||
Travis runs the tests against the main Python 3.x versions.
|
||||
|
||||
|
||||
Style Guide
|
||||
-----------
|
||||
|
||||
MongoEngine uses `black <https://github.com/python/black>`_ for code
|
||||
formatting.
|
||||
MongoEngine's codebase is auto-formatted with `black <https://github.com/python/black>`_, imports are ordered with `isort <https://pycqa.github.io/isort/>`_
|
||||
and other tools like flake8 are also used. Those tools will run as part of the CI and will fail in case the code is not formatted properly.
|
||||
|
||||
To install all development tools, simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python -m pip install -r requirements-dev.txt
|
||||
|
||||
|
||||
You can install `pre-commit <https://pre-commit.com/>`_ into your git hooks,
|
||||
to automatically check and fix any formatting issue before creating a
|
||||
git commit.
|
||||
|
||||
To enable ``pre-commit`` simply run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pre-commit install
|
||||
|
||||
See the ``.pre-commit-config.yaml`` configuration file for more information
|
||||
on how it works.
|
||||
|
||||
pre-commit will now run upon every commit and will reject anything that doesn't comply.
|
||||
|
||||
You can also run all the checks with ``pre-commit run -a``, this is what is used in the CI.
|
||||
|
||||
Testing
|
||||
-------
|
||||
@ -54,7 +82,7 @@ General Guidelines
|
||||
should adapt to the breaking change in docs/upgrade.rst.
|
||||
- Write inline documentation for new classes and methods.
|
||||
- Write tests and make sure they pass (make sure you have a mongod
|
||||
running on the default port, then execute ``python setup.py nosetests``
|
||||
running on the default port, then execute ``python setup.py test``
|
||||
from the cmd line to run the test suite).
|
||||
- Ensure tests pass on all supported Python, PyMongo, and MongoDB versions.
|
||||
You can test various Python and PyMongo versions locally by executing
|
||||
|
40
README.rst
40
README.rst
@ -12,9 +12,8 @@ MongoEngine
|
||||
.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master
|
||||
:target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master
|
||||
|
||||
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat
|
||||
:target: https://landscape.io/github/MongoEngine/mongoengine/master
|
||||
:alt: Code Health
|
||||
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
||||
:target: https://github.com/ambv/black
|
||||
|
||||
About
|
||||
=====
|
||||
@ -26,15 +25,15 @@ an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_.
|
||||
|
||||
Supported MongoDB Versions
|
||||
==========================
|
||||
MongoEngine is currently tested against MongoDB v3.4 and v3.6. Future versions
|
||||
MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions
|
||||
should be supported as well, but aren't actively tested at the moment. Make
|
||||
sure to open an issue or submit a pull request if you experience any problems
|
||||
with MongoDB version > 3.6.
|
||||
with MongoDB version > 4.0.
|
||||
|
||||
Installation
|
||||
============
|
||||
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
||||
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
|
||||
`pip <https://pip.pypa.io/>`_. You can then use ``python -m pip install -U mongoengine``.
|
||||
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||
and thus you can use ``easy_install -U mongoengine``. Another option is
|
||||
`pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine``
|
||||
@ -42,13 +41,14 @@ to both create the virtual environment and install the package. Otherwise, you c
|
||||
download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
|
||||
run ``python setup.py install``.
|
||||
|
||||
The support for Python2 was dropped with MongoEngine 0.20.0
|
||||
|
||||
Dependencies
|
||||
============
|
||||
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_.
|
||||
All of the dependencies can easily be installed via `python -m pip <https://pip.pypa.io/>`_.
|
||||
At the very least, you'll need these two packages to use MongoEngine:
|
||||
|
||||
- pymongo>=3.4
|
||||
- six>=1.10.0
|
||||
|
||||
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
||||
|
||||
@ -58,6 +58,10 @@ If you need to use an ``ImageField`` or ``ImageGridFsProxy``:
|
||||
|
||||
- Pillow>=2.0.0
|
||||
|
||||
If you need to use signals:
|
||||
|
||||
- blinker>=1.3
|
||||
|
||||
Examples
|
||||
========
|
||||
Some simple examples of what MongoEngine code looks like:
|
||||
@ -91,12 +95,11 @@ Some simple examples of what MongoEngine code looks like:
|
||||
|
||||
# Iterate over all posts using the BlogPost superclass
|
||||
>>> for post in BlogPost.objects:
|
||||
... print '===', post.title, '==='
|
||||
... print('===', post.title, '===')
|
||||
... if isinstance(post, TextPost):
|
||||
... print post.content
|
||||
... print(post.content)
|
||||
... elif isinstance(post, LinkPost):
|
||||
... print 'Link:', post.url
|
||||
... print
|
||||
... print('Link:', post.url)
|
||||
...
|
||||
|
||||
# Count all blog posts and its subtypes
|
||||
@ -116,7 +119,8 @@ Some simple examples of what MongoEngine code looks like:
|
||||
Tests
|
||||
=====
|
||||
To run the test suite, ensure you are running a local instance of MongoDB on
|
||||
the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``.
|
||||
the standard port and have ``pytest`` installed. Then, run ``python setup.py test``
|
||||
or simply ``pytest``.
|
||||
|
||||
To run the test suite on every supported Python and PyMongo version, you can
|
||||
use ``tox``. You'll need to make sure you have each supported Python version
|
||||
@ -125,20 +129,18 @@ installed in your environment and then:
|
||||
.. code-block:: shell
|
||||
|
||||
# Install tox
|
||||
$ pip install tox
|
||||
$ python -m pip install tox
|
||||
# Run the test suites
|
||||
$ tox
|
||||
|
||||
If you wish to run a subset of tests, use the nosetests convention:
|
||||
If you wish to run a subset of tests, use the pytest convention:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# Run all the tests in a particular test file
|
||||
$ python setup.py nosetests --tests tests/fields/fields.py
|
||||
$ pytest tests/fields/test_fields.py
|
||||
# Run only particular test class in that file
|
||||
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest
|
||||
# Use the -s option if you want to print some debug statements or use pdb
|
||||
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s
|
||||
$ pytest tests/fields/test_fields.py::TestField
|
||||
|
||||
Community
|
||||
=========
|
||||
|
@ -45,7 +45,7 @@ def test_basic():
|
||||
|
||||
print(
|
||||
"Doc setattr: %.3fus"
|
||||
% (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6)
|
||||
% (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6) # noqa B010
|
||||
)
|
||||
|
||||
print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6))
|
||||
|
@ -4,12 +4,14 @@ import timeit
|
||||
def main():
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
|
||||
connection = MongoClient()
|
||||
connection.drop_database('mongoengine_benchmark_test')
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.mongoengine_benchmark_test
|
||||
@ -29,7 +31,7 @@ myNoddys = noddy.find()
|
||||
print("-" * 100)
|
||||
print("PyMongo: Creating 10000 dictionaries.")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient, WriteConcern
|
||||
@ -52,10 +54,11 @@ myNoddys = noddy.find()
|
||||
print("-" * 100)
|
||||
print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
|
||||
connection = MongoClient()
|
||||
connection.drop_database('mongoengine_benchmark_test')
|
||||
connection.close()
|
||||
@ -81,7 +84,7 @@ myNoddys = Noddy.objects()
|
||||
print("-" * 100)
|
||||
print("MongoEngine: Creating 10000 dictionaries.")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
@ -99,7 +102,7 @@ myNoddys = Noddy.objects()
|
||||
print("-" * 100)
|
||||
print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
@ -115,7 +118,7 @@ myNoddys = Noddy.objects()
|
||||
print("-" * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
@ -133,7 +136,7 @@ myNoddys = Noddy.objects()
|
||||
'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).'
|
||||
)
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
@ -151,7 +154,7 @@ myNoddys = Noddy.objects()
|
||||
'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).'
|
||||
)
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -33,8 +33,14 @@ clean:
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. Check $(BUILDDIR)/html/index.html"
|
||||
|
||||
html-readthedocs:
|
||||
$(SPHINXBUILD) -T -E -b readthedocs $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
|
@ -75,6 +75,7 @@ Fields
|
||||
.. autoclass:: mongoengine.fields.StringField
|
||||
.. autoclass:: mongoengine.fields.URLField
|
||||
.. autoclass:: mongoengine.fields.EmailField
|
||||
.. autoclass:: mongoengine.fields.EnumField
|
||||
.. autoclass:: mongoengine.fields.IntField
|
||||
.. autoclass:: mongoengine.fields.LongField
|
||||
.. autoclass:: mongoengine.fields.FloatField
|
||||
|
@ -1,4 +1,5 @@
|
||||
|
||||
|
||||
=========
|
||||
Changelog
|
||||
=========
|
||||
@ -6,6 +7,77 @@ Changelog
|
||||
Development
|
||||
===========
|
||||
- (Fill this out as you fix issues and develop your features).
|
||||
- EnumField improvements: now `choices` limits the values of an enum to allow
|
||||
- Fix deepcopy of EmbeddedDocument #2202
|
||||
- Fix error when using precision=0 with DecimalField #2535
|
||||
- Add support for regex and whole word text search query #2568
|
||||
|
||||
Changes in 0.23.1
|
||||
===========
|
||||
- Bug fix: ignore LazyReferenceFields when clearing _changed_fields #2484
|
||||
- Improve connection doc #2481
|
||||
|
||||
Changes in 0.23.0
|
||||
=================
|
||||
- Bugfix: manually setting SequenceField in DynamicDocument doesn't increment the counter #2471
|
||||
- Add MongoDB 4.2 and 4.4 to CI
|
||||
- Add support for allowDiskUse on querysets #2468
|
||||
|
||||
Changes in 0.22.1
|
||||
=================
|
||||
- Declare that Py3.5 is not supported in package metadata #2449
|
||||
- Moved CI from Travis to Github-Actions
|
||||
|
||||
Changes in 0.22.0
|
||||
=================
|
||||
- Fix LazyReferenceField dereferencing in embedded documents #2426
|
||||
- Fix regarding the recent use of Cursor.__spec in .count() that was interfering with mongomock #2425
|
||||
- Drop support for Python 3.5 by introducing f-strings in the codebase
|
||||
|
||||
Changes in 0.21.0
|
||||
=================
|
||||
- Bug fix in DynamicDocument which is not parsing known fields in constructor like Document do #2412
|
||||
- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count
|
||||
and Cursor.count that got deprecated in pymongo >= 3.7.
|
||||
This should have a negative impact on performance of count see Issue #2219
|
||||
- Fix a bug that made the queryset drop the read_preference after clone().
|
||||
- Remove Py3.5 from CI as it reached EOL and add Python 3.9
|
||||
- Fix some issues related with db_field/field conflict in constructor #2414
|
||||
- BREAKING CHANGE: Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311
|
||||
- Bug fix in ListField when updating the first item, it was saving the whole list, instead of
|
||||
just replacing the first item (as usually done when updating 1 item of the list) #2392
|
||||
- Add EnumField: ``mongoengine.fields.EnumField``
|
||||
- Refactoring - Remove useless code related to Document.__only_fields and Queryset.only_fields
|
||||
- Fix query transformation regarding special operators #2365
|
||||
- Bug Fix: Document.save() fails when shard_key is not _id #2154
|
||||
|
||||
Changes in 0.20.0
|
||||
=================
|
||||
- ATTENTION: Drop support for Python2
|
||||
- Add Mongo 4.0 to Travis
|
||||
- Fix error when setting a string as a ComplexDateTimeField #2253
|
||||
- Bump development Status classifier to Production/Stable #2232
|
||||
- Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630
|
||||
- Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264
|
||||
- Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267
|
||||
- DictField validate failed without default connection (bug introduced in 0.19.0) #2239
|
||||
- Remove methods that were deprecated years ago:
|
||||
- name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field
|
||||
- Queryset.slave_okay() was deprecated since pymongo3
|
||||
- dropDups was dropped with MongoDB3
|
||||
- ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes``
|
||||
- Added pre-commit for development/CI #2212
|
||||
- Renamed requirements-lint.txt to requirements-dev.txt #2212
|
||||
- Support for setting ReadConcern #2255
|
||||
|
||||
Changes in 0.19.1
|
||||
=================
|
||||
- Tests require Pillow < 7.0.0 as it dropped Python2 support
|
||||
- DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of
|
||||
pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079
|
||||
|
||||
Changes in 0.19.0
|
||||
=================
|
||||
- BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112
|
||||
- Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``.
|
||||
- Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``.
|
||||
@ -15,9 +87,23 @@ Development
|
||||
- If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it.
|
||||
- BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103
|
||||
- From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required.
|
||||
- BREAKING CHANGE: A ``LazyReferenceField`` is now stored in the ``_data`` field of its parent as a ``DBRef``, ``Document``, or ``EmbeddedDocument`` (``ObjectId`` is no longer allowed). #2182
|
||||
- DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210
|
||||
- Added ability to check if Q or QNode are empty by parsing them to bool.
|
||||
- Instead of ``Q(name="John").empty`` use ``not Q(name="John")``.
|
||||
- Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125
|
||||
- Only set no_cursor_timeout when requested (fixes an incompatibility with MongoDB 4.2) #2148
|
||||
- ``ListField`` now accepts an optional ``max_length`` parameter. #2110
|
||||
- Improve error message related to InvalidDocumentError #2180
|
||||
- Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152
|
||||
- Added ability to compare Q and Q operations #2204
|
||||
- Added ability to use a db alias on query_counter #2194
|
||||
- Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024
|
||||
- Fix updates of a list field by negative index #2094
|
||||
- Switch from nosetest to pytest as test runner #2114
|
||||
- The codebase is now formatted using ``black``. #2109
|
||||
- Documentation improvements:
|
||||
- Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver.
|
||||
|
||||
Changes in 0.18.2
|
||||
=================
|
||||
@ -414,9 +500,6 @@ Changes in 0.8.3
|
||||
- Document.select_related() now respects ``db_alias`` (#377)
|
||||
- Reload uses shard_key if applicable (#384)
|
||||
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
|
||||
|
||||
**Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3
|
||||
|
||||
- Fixed pickling dynamic documents ``_dynamic_fields`` (#387)
|
||||
- Fixed ListField setslice and delslice dirty tracking (#390)
|
||||
- Added Django 1.5 PY3 support (#392)
|
||||
|
10
docs/conf.py
10
docs/conf.py
@ -1,4 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# MongoEngine documentation build configuration file, created by
|
||||
# sphinx-quickstart on Sun Nov 22 18:14:13 2009.
|
||||
@ -11,7 +10,8 @@
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys, os
|
||||
import os
|
||||
import sys
|
||||
|
||||
import sphinx_rtd_theme
|
||||
|
||||
@ -26,7 +26,7 @@ sys.path.insert(0, os.path.abspath(".."))
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo"]
|
||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo", "readthedocs_ext.readthedocs"]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
@ -41,8 +41,8 @@ source_suffix = ".rst"
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u"MongoEngine"
|
||||
copyright = u"2009, MongoEngine Authors"
|
||||
project = "MongoEngine"
|
||||
copyright = "2009, MongoEngine Authors" # noqa: A001
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
|
12
docs/faq.rst
Normal file
12
docs/faq.rst
Normal file
@ -0,0 +1,12 @@
|
||||
==========================
|
||||
Frequently Asked Questions
|
||||
==========================
|
||||
|
||||
Does MongoEngine support asynchronous drivers (Motor, TxMongo)?
|
||||
---------------------------------------------------------------
|
||||
|
||||
No, MongoEngine is exclusively based on PyMongo and isn't designed to support other driver.
|
||||
If this is a requirement for your project, check the alternative: `uMongo`_ and `MotorEngine`_.
|
||||
|
||||
.. _uMongo: https://umongo.readthedocs.io/
|
||||
.. _MotorEngine: https://motorengine.readthedocs.io/
|
@ -5,7 +5,7 @@ Connecting to MongoDB
|
||||
=====================
|
||||
|
||||
Connections in MongoEngine are registered globally and are identified with aliases.
|
||||
If no `alias` is provided during the connection, it will use "default" as alias.
|
||||
If no ``alias`` is provided during the connection, it will use "default" as alias.
|
||||
|
||||
To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect`
|
||||
function. The first argument is the name of the database to connect to::
|
||||
@ -14,25 +14,66 @@ function. The first argument is the name of the database to connect to::
|
||||
connect('project1')
|
||||
|
||||
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
||||
on **localhost** on port **27017**. If MongoDB is running elsewhere, you should
|
||||
provide the :attr:`host` and :attr:`port` arguments to
|
||||
:func:`~mongoengine.connect`::
|
||||
on **localhost** on port **27017**.
|
||||
|
||||
connect('project1', host='192.168.1.35', port=12345)
|
||||
If MongoDB is running elsewhere, you need to provide details on how to connect. There are two ways of
|
||||
doing this. Using a connection string in URI format (**this is the preferred method**) or individual attributes
|
||||
provided as keyword arguments.
|
||||
|
||||
Connect with URI string
|
||||
=======================
|
||||
|
||||
When using a connection string in URI format you should specify the connection details
|
||||
as the :attr:`host` to :func:`~mongoengine.connect`. In a web application context for instance, the URI
|
||||
is typically read from the config file::
|
||||
|
||||
connect(host="mongodb://127.0.0.1:27017/my_db")
|
||||
|
||||
If the database requires authentication, you can specify it in the
|
||||
URI. As each database can have its own users configured, you need to tell MongoDB
|
||||
where to look for the user you are working with, that's what the ``?authSource=admin`` bit
|
||||
of the MongoDB connection string is for::
|
||||
|
||||
# Connects to 'my_db' database by authenticating
|
||||
# with given credentials against the 'admin' database (by default as authSource isn't provided)
|
||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db")
|
||||
|
||||
# Equivalent to previous connection but explicitly states that
|
||||
# it should use admin as the authentication source database
|
||||
connect(host="mongodb://my_user:my_password@hostname:port/my_db?authSource=admin")
|
||||
|
||||
# Connects to 'my_db' database by authenticating
|
||||
# with given credentials against that same database
|
||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db?authSource=my_db")
|
||||
|
||||
The URI string can also be used to configure advanced parameters like ssl, replicaSet, etc. For more
|
||||
information or example about URI string, you can refer to the `official doc <https://docs.mongodb.com/manual/reference/connection-string/>`_::
|
||||
|
||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db?authSource=admin&ssl=true&replicaSet=globaldb")
|
||||
|
||||
.. note:: URI containing SRV records (e.g "mongodb+srv://server.example.com/") can be used as well
|
||||
|
||||
Connect with keyword attributes
|
||||
===============================
|
||||
|
||||
The second option for specifying the connection details is to provide the information as keyword
|
||||
attributes to :func:`~mongoengine.connect`::
|
||||
|
||||
connect('my_db', host='127.0.0.1', port=27017)
|
||||
|
||||
If the database requires authentication, :attr:`username`, :attr:`password`
|
||||
and :attr:`authentication_source` arguments should be provided::
|
||||
|
||||
connect('project1', username='webapp', password='pwd123', authentication_source='admin')
|
||||
connect('my_db', username='my_user', password='my_password', authentication_source='admin')
|
||||
|
||||
URI style connections are also supported -- just supply the URI as
|
||||
the :attr:`host` to
|
||||
:func:`~mongoengine.connect`::
|
||||
|
||||
connect('project1', host='mongodb://localhost/database_name')
|
||||
The set of attributes that :func:`~mongoengine.connect` recognizes includes but is not limited to:
|
||||
:attr:`host`, :attr:`port`, :attr:`read_preference`, :attr:`username`, :attr:`password`, :attr:`authentication_source`, :attr:`authentication_mechanism`,
|
||||
:attr:`replicaset`, :attr:`tls`, etc. Most of the parameters accepted by `pymongo.MongoClient <https://pymongo.readthedocs.io/en/stable/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient>`_
|
||||
can be used with :func:`~mongoengine.connect` and will simply be forwarded when instantiating the `pymongo.MongoClient`.
|
||||
|
||||
.. note:: Database, username and password from URI string overrides
|
||||
corresponding parameters in :func:`~mongoengine.connect`: ::
|
||||
corresponding parameters in :func:`~mongoengine.connect`, this should
|
||||
obviously be avoided: ::
|
||||
|
||||
connect(
|
||||
db='test',
|
||||
@ -41,28 +82,19 @@ the :attr:`host` to
|
||||
host='mongodb://admin:qwerty@localhost/production'
|
||||
)
|
||||
|
||||
will establish connection to ``production`` database using
|
||||
``admin`` username and ``qwerty`` password.
|
||||
will establish connection to ``production`` database using ``admin`` username and ``qwerty`` password.
|
||||
|
||||
.. note:: Calling :func:`~mongoengine.connect` without argument will establish
|
||||
a connection to the "test" database by default
|
||||
|
||||
Replica Sets
|
||||
============
|
||||
Read Preferences
|
||||
================
|
||||
|
||||
MongoEngine supports connecting to replica sets::
|
||||
|
||||
from mongoengine import connect
|
||||
|
||||
# Regular connect
|
||||
connect('dbname', replicaset='rs-name')
|
||||
|
||||
# MongoDB URI-style connect
|
||||
connect(host='mongodb://localhost/dbname?replicaSet=rs-name')
|
||||
|
||||
Read preferences are supported through the connection or via individual
|
||||
As stated above, Read preferences are supported through the connection but also via individual
|
||||
queries by passing the read_preference ::
|
||||
|
||||
from pymongo import ReadPreference
|
||||
|
||||
Bar.objects().read_preference(ReadPreference.PRIMARY)
|
||||
Bar.objects(read_preference=ReadPreference.PRIMARY)
|
||||
|
||||
|
@ -27,6 +27,8 @@ objects** as class attributes to the document class::
|
||||
As BSON (the binary format for storing data in mongodb) is order dependent,
|
||||
documents are serialized based on their field order.
|
||||
|
||||
.. _dynamic-document-schemas:
|
||||
|
||||
Dynamic document schemas
|
||||
========================
|
||||
One of the benefits of MongoDB is dynamic schemas for a collection, whilst data
|
||||
@ -76,6 +78,7 @@ are as follows:
|
||||
* :class:`~mongoengine.fields.EmailField`
|
||||
* :class:`~mongoengine.fields.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.fields.EmbeddedDocumentListField`
|
||||
* :class:`~mongoengine.fields.EnumField`
|
||||
* :class:`~mongoengine.fields.FileField`
|
||||
* :class:`~mongoengine.fields.FloatField`
|
||||
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
||||
@ -230,6 +233,9 @@ document class as the first argument::
|
||||
comment2 = Comment(content='Nice article!')
|
||||
page = Page(comments=[comment1, comment2])
|
||||
|
||||
Embedded documents can also leverage the flexibility of :ref:`dynamic-document-schemas:`
|
||||
by inheriting :class:`~mongoengine.DynamicEmbeddedDocument`.
|
||||
|
||||
Dictionary Fields
|
||||
-----------------
|
||||
Often, an embedded document may be used instead of a dictionary – generally
|
||||
@ -289,12 +295,12 @@ as the constructor's argument::
|
||||
content = StringField()
|
||||
|
||||
|
||||
.. _one-to-many-with-listfields:
|
||||
.. _many-to-many-with-listfields:
|
||||
|
||||
One to Many with ListFields
|
||||
Many to Many with ListFields
|
||||
'''''''''''''''''''''''''''
|
||||
|
||||
If you are implementing a one to many relationship via a list of references,
|
||||
If you are implementing a many to many relationship via a list of references,
|
||||
then the references are stored as DBRefs and to query you need to pass an
|
||||
instance of the object to the query::
|
||||
|
||||
@ -335,7 +341,6 @@ supplying the :attr:`reverse_delete_rule` attributes on the
|
||||
:class:`ReferenceField` definition, like this::
|
||||
|
||||
class ProfilePage(Document):
|
||||
...
|
||||
employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE)
|
||||
|
||||
The declaration in this example means that when an :class:`Employee` object is
|
||||
@ -352,7 +357,7 @@ Its value can take any of the following constants:
|
||||
Deletion is denied if there still exist references to the object being
|
||||
deleted.
|
||||
:const:`mongoengine.NULLIFY`
|
||||
Any object's fields still referring to the object being deleted are removed
|
||||
Any object's fields still referring to the object being deleted are set to None
|
||||
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
||||
:const:`mongoengine.CASCADE`
|
||||
Any object containing fields that are referring to the object being deleted
|
||||
@ -426,28 +431,15 @@ either a single field name, or a list or tuple of field names::
|
||||
first_name = StringField()
|
||||
last_name = StringField(unique_with='first_name')
|
||||
|
||||
Skipping Document validation on save
|
||||
------------------------------------
|
||||
You can also skip the whole document validation process by setting
|
||||
``validate=False`` when calling the :meth:`~mongoengine.document.Document.save`
|
||||
method::
|
||||
|
||||
class Recipient(Document):
|
||||
name = StringField()
|
||||
email = EmailField()
|
||||
|
||||
recipient = Recipient(name='admin', email='root@localhost')
|
||||
recipient.save() # will raise a ValidationError while
|
||||
recipient.save(validate=False) # won't
|
||||
|
||||
Document collections
|
||||
====================
|
||||
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
||||
will have their own **collection** in the database. The name of the collection
|
||||
is by default the name of the class, converted to lowercase (so in the example
|
||||
above, the collection would be called `page`). If you need to change the name
|
||||
of the collection (e.g. to use MongoEngine with an existing database), then
|
||||
create a class dictionary attribute called :attr:`meta` on your document, and
|
||||
is by default the name of the class converted to snake_case (e.g if your Document class
|
||||
is named `CompanyUser`, the corresponding collection would be `company_user`). If you need
|
||||
to change the name of the collection (e.g. to use MongoEngine with an existing database),
|
||||
then create a class dictionary attribute called :attr:`meta` on your document, and
|
||||
set :attr:`collection` to the name of the collection that you want your
|
||||
document class to use::
|
||||
|
||||
@ -485,7 +477,7 @@ dictionary containing a full index definition.
|
||||
|
||||
A direction may be specified on fields by prefixing the field name with a
|
||||
**+** (for ascending) or a **-** sign (for descending). Note that direction
|
||||
only matters on multi-field indexes. Text indexes may be specified by prefixing
|
||||
only matters on compound indexes. Text indexes may be specified by prefixing
|
||||
the field name with a **$**. Hashed indexes may be specified by prefixing
|
||||
the field name with a **#**::
|
||||
|
||||
@ -496,14 +488,14 @@ the field name with a **#**::
|
||||
created = DateTimeField()
|
||||
meta = {
|
||||
'indexes': [
|
||||
'title',
|
||||
'title', # single-field index
|
||||
'$title', # text index
|
||||
'#title', # hashed index
|
||||
('title', '-rating'),
|
||||
('category', '_cls'),
|
||||
('title', '-rating'), # compound index
|
||||
('category', '_cls'), # compound index
|
||||
{
|
||||
'fields': ['created'],
|
||||
'expireAfterSeconds': 3600
|
||||
'expireAfterSeconds': 3600 # ttl index
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -555,7 +547,6 @@ There are a few top level defaults for all indexes that can be set::
|
||||
'index_background': True,
|
||||
'index_cls': False,
|
||||
'auto_create_index': True,
|
||||
'index_drop_dups': True,
|
||||
}
|
||||
|
||||
|
||||
@ -574,11 +565,6 @@ There are a few top level defaults for all indexes that can be set::
|
||||
in systems where indexes are managed separately. Disabling this will improve
|
||||
performance.
|
||||
|
||||
:attr:`index_drop_dups` (Optional)
|
||||
Set the default value for if an index should drop duplicates
|
||||
Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||
and has no effect
|
||||
|
||||
|
||||
Compound Indexes and Indexing sub documents
|
||||
-------------------------------------------
|
||||
@ -642,8 +628,8 @@ point. To create a geospatial index you must prefix the field with the
|
||||
],
|
||||
}
|
||||
|
||||
Time To Live indexes
|
||||
--------------------
|
||||
Time To Live (TTL) indexes
|
||||
--------------------------
|
||||
|
||||
A special index type that allows you to automatically expire data from a
|
||||
collection after a given period. See the official
|
||||
@ -744,7 +730,7 @@ Document inheritance
|
||||
|
||||
To create a specialised type of a :class:`~mongoengine.Document` you have
|
||||
defined, you may subclass it and add any extra fields or methods you may need.
|
||||
As this is new class is not a direct subclass of
|
||||
As this new class is not a direct subclass of
|
||||
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
||||
will use the same collection as its superclass uses. This allows for more
|
||||
convenient and efficient retrieval of related documents -- all you need do is
|
||||
@ -767,6 +753,27 @@ document.::
|
||||
Setting :attr:`allow_inheritance` to True should also be used in
|
||||
:class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it
|
||||
|
||||
When it comes to querying using :attr:`.objects()`, querying `Page.objects()` will query
|
||||
both `Page` and `DatedPage` whereas querying `DatedPage` will only query the `DatedPage` documents.
|
||||
Behind the scenes, MongoEngine deals with inheritance by adding a :attr:`_cls` attribute that contains
|
||||
the class name in every documents. When a document is loaded, MongoEngine checks
|
||||
it's :attr:`_cls` attribute and use that class to construct the instance.::
|
||||
|
||||
Page(title='a funky title').save()
|
||||
DatedPage(title='another title', date=datetime.utcnow()).save()
|
||||
|
||||
print(Page.objects().count()) # 2
|
||||
print(DatedPage.objects().count()) # 1
|
||||
|
||||
# print documents in their native form
|
||||
# we remove 'id' to avoid polluting the output with unnecessary detail
|
||||
qs = Page.objects.exclude('id').as_pymongo()
|
||||
print(list(qs))
|
||||
# [
|
||||
# {'_cls': u 'Page', 'title': 'a funky title'},
|
||||
# {'_cls': u 'Page.DatedPage', 'title': u 'another title', 'date': datetime.datetime(2019, 12, 13, 20, 16, 59, 993000)}
|
||||
# ]
|
||||
|
||||
Working with existing data
|
||||
--------------------------
|
||||
As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and
|
||||
|
@ -41,35 +41,6 @@ already exist, then any changes will be updated atomically. For example::
|
||||
.. seealso::
|
||||
:ref:`guide-atomic-updates`
|
||||
|
||||
Pre save data validation and cleaning
|
||||
-------------------------------------
|
||||
MongoEngine allows you to create custom cleaning rules for your documents when
|
||||
calling :meth:`~mongoengine.Document.save`. By providing a custom
|
||||
:meth:`~mongoengine.Document.clean` method you can do any pre validation / data
|
||||
cleaning.
|
||||
|
||||
This might be useful if you want to ensure a default value based on other
|
||||
document values for example::
|
||||
|
||||
class Essay(Document):
|
||||
status = StringField(choices=('Published', 'Draft'), required=True)
|
||||
pub_date = DateTimeField()
|
||||
|
||||
def clean(self):
|
||||
"""Ensures that only published essays have a `pub_date` and
|
||||
automatically sets `pub_date` if essay is published and `pub_date`
|
||||
is not set"""
|
||||
if self.status == 'Draft' and self.pub_date is not None:
|
||||
msg = 'Draft entries should not have a publication date.'
|
||||
raise ValidationError(msg)
|
||||
# Set the pub_date for published items if not set.
|
||||
if self.status == 'Published' and self.pub_date is None:
|
||||
self.pub_date = datetime.now()
|
||||
|
||||
.. note::
|
||||
Cleaning is only called if validation is turned on and when calling
|
||||
:meth:`~mongoengine.Document.save`.
|
||||
|
||||
Cascading Saves
|
||||
---------------
|
||||
If your document contains :class:`~mongoengine.fields.ReferenceField` or
|
||||
|
@ -2,16 +2,15 @@
|
||||
GridFS
|
||||
======
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Writing
|
||||
-------
|
||||
|
||||
GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field
|
||||
object. This field acts as a file-like object and provides a couple of
|
||||
different ways of inserting and retrieving data. Arbitrary metadata such as
|
||||
content type can also be stored alongside the files. In the following example,
|
||||
a document is created to store details about animals, including a photo::
|
||||
content type can also be stored alongside the files. The object returned when accessing a
|
||||
FileField is a proxy to `Pymongo's GridFS <https://api.mongodb.com/python/current/examples/gridfs.html#gridfs-example>`_
|
||||
In the following example, a document is created to store details about animals, including a photo::
|
||||
|
||||
class Animal(Document):
|
||||
genus = StringField()
|
||||
@ -20,8 +19,8 @@ a document is created to store details about animals, including a photo::
|
||||
|
||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||
|
||||
marmot_photo = open('marmot.jpg', 'rb')
|
||||
marmot.photo.put(marmot_photo, content_type = 'image/jpeg')
|
||||
with open('marmot.jpg', 'rb') as fd:
|
||||
marmot.photo.put(fd, content_type = 'image/jpeg')
|
||||
marmot.save()
|
||||
|
||||
Retrieval
|
||||
@ -34,6 +33,20 @@ field. The file can also be retrieved just as easily::
|
||||
photo = marmot.photo.read()
|
||||
content_type = marmot.photo.content_type
|
||||
|
||||
.. note:: If you need to read() the content of a file multiple times, you'll need to "rewind"
|
||||
the file-like object using `seek`::
|
||||
|
||||
marmot = Animal.objects(genus='Marmota').first()
|
||||
content1 = marmot.photo.read()
|
||||
assert content1 != ""
|
||||
|
||||
content2 = marmot.photo.read() # will be empty
|
||||
assert content2 == ""
|
||||
|
||||
marmot.photo.seek(0) # rewind the file by setting the current position of the cursor in the file to 0
|
||||
content3 = marmot.photo.read()
|
||||
assert content3 == content1
|
||||
|
||||
Streaming
|
||||
---------
|
||||
|
||||
|
@ -10,7 +10,10 @@ User Guide
|
||||
defining-documents
|
||||
document-instances
|
||||
querying
|
||||
validation
|
||||
gridfs
|
||||
signals
|
||||
text-indexes
|
||||
migration
|
||||
logging-monitoring
|
||||
mongomock
|
||||
|
@ -12,7 +12,7 @@ MongoEngine is available on PyPI, so you can use :program:`pip`:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install mongoengine
|
||||
$ python -m pip install mongoengine
|
||||
|
||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||
|
80
docs/guide/logging-monitoring.rst
Normal file
80
docs/guide/logging-monitoring.rst
Normal file
@ -0,0 +1,80 @@
|
||||
==================
|
||||
Logging/Monitoring
|
||||
==================
|
||||
|
||||
It is possible to use `pymongo.monitoring <https://api.mongodb.com/python/current/api/pymongo/monitoring.html>`_ to monitor
|
||||
the driver events (e.g: queries, connections, etc). This can be handy if you want to monitor the queries issued by
|
||||
MongoEngine to the driver.
|
||||
|
||||
To use `pymongo.monitoring` with MongoEngine, you need to make sure that you are registering the listeners
|
||||
**before** establishing the database connection (i.e calling `connect`):
|
||||
|
||||
The following snippet provides a basic logging of all command events:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import logging
|
||||
from pymongo import monitoring
|
||||
from mongoengine import *
|
||||
|
||||
log = logging.getLogger()
|
||||
log.setLevel(logging.DEBUG)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
|
||||
class CommandLogger(monitoring.CommandListener):
|
||||
|
||||
def started(self, event):
|
||||
log.debug("Command {0.command_name} with request id "
|
||||
"{0.request_id} started on server "
|
||||
"{0.connection_id}".format(event))
|
||||
|
||||
def succeeded(self, event):
|
||||
log.debug("Command {0.command_name} with request id "
|
||||
"{0.request_id} on server {0.connection_id} "
|
||||
"succeeded in {0.duration_micros} "
|
||||
"microseconds".format(event))
|
||||
|
||||
def failed(self, event):
|
||||
log.debug("Command {0.command_name} with request id "
|
||||
"{0.request_id} on server {0.connection_id} "
|
||||
"failed in {0.duration_micros} "
|
||||
"microseconds".format(event))
|
||||
|
||||
monitoring.register(CommandLogger())
|
||||
|
||||
|
||||
class Jedi(Document):
|
||||
name = StringField()
|
||||
|
||||
|
||||
connect()
|
||||
|
||||
|
||||
log.info('GO!')
|
||||
|
||||
log.info('Saving an item through MongoEngine...')
|
||||
Jedi(name='Obi-Wan Kenobii').save()
|
||||
|
||||
log.info('Querying through MongoEngine...')
|
||||
obiwan = Jedi.objects.first()
|
||||
|
||||
log.info('Updating through MongoEngine...')
|
||||
obiwan.name = 'Obi-Wan Kenobi'
|
||||
obiwan.save()
|
||||
|
||||
|
||||
Executing this prints the following output::
|
||||
|
||||
INFO:root:GO!
|
||||
INFO:root:Saving an item through MongoEngine...
|
||||
DEBUG:root:Command insert with request id 1681692777 started on server ('localhost', 27017)
|
||||
DEBUG:root:Command insert with request id 1681692777 on server ('localhost', 27017) succeeded in 562 microseconds
|
||||
INFO:root:Querying through MongoEngine...
|
||||
DEBUG:root:Command find with request id 1714636915 started on server ('localhost', 27017)
|
||||
DEBUG:root:Command find with request id 1714636915 on server ('localhost', 27017) succeeded in 341 microseconds
|
||||
INFO:root:Updating through MongoEngine...
|
||||
DEBUG:root:Command update with request id 1957747793 started on server ('localhost', 27017)
|
||||
DEBUG:root:Command update with request id 1957747793 on server ('localhost', 27017) succeeded in 455 microseconds
|
||||
|
||||
More details can of course be obtained by checking the `event` argument from the `CommandListener`.
|
308
docs/guide/migration.rst
Normal file
308
docs/guide/migration.rst
Normal file
@ -0,0 +1,308 @@
|
||||
===================
|
||||
Documents migration
|
||||
===================
|
||||
|
||||
The structure of your documents and their associated mongoengine schemas are likely
|
||||
to change over the lifetime of an application. This section provides guidance and
|
||||
recommendations on how to deal with migrations.
|
||||
|
||||
Due to the very flexible nature of mongodb, migrations of models aren't trivial and
|
||||
for people that know about `alembic` for `sqlalchemy`, there is unfortunately no equivalent
|
||||
library that will manage the migration in an automatic fashion for mongoengine.
|
||||
|
||||
Example 1: Addition of a field
|
||||
==============================
|
||||
|
||||
Let's start by taking a simple example of a model change and review the different option you
|
||||
have to deal with the migration.
|
||||
|
||||
Let's assume we start with the following schema and save an instance:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
User(name="John Doe").save()
|
||||
|
||||
# print the objects as they exist in mongodb
|
||||
print(User.objects().as_pymongo()) # [{u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John Doe'}]
|
||||
|
||||
On the next version of your application, let's now assume that a new field `enabled` gets added to the
|
||||
existing ``User`` model with a `default=True`. Thus you simply update the ``User`` class to the following:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class User(Document):
|
||||
name = StringField(required=True)
|
||||
enabled = BooleanField(default=True)
|
||||
|
||||
Without applying any migration, we now reload an object from the database into the ``User`` class
|
||||
and checks its `enabled` attribute:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
assert User.objects.count() == 1
|
||||
user = User.objects().first()
|
||||
assert user.enabled is True
|
||||
assert User.objects(enabled=True).count() == 0 # uh?
|
||||
assert User.objects(enabled=False).count() == 0 # uh?
|
||||
|
||||
# this is consistent with what we have in the database
|
||||
# in fact, 'enabled' does not exist
|
||||
print(User.objects().as_pymongo().first()) # {u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John'}
|
||||
assert User.objects(enabled=None).count() == 1
|
||||
|
||||
As you can see, even if the document wasn't updated, mongoengine applies the default value seamlessly when it
|
||||
loads the pymongo dict into a ``User`` instance. At first sight it looks like you don't need to migrate the
|
||||
existing documents when adding new fields but this actually leads to inconsistencies when it comes to querying.
|
||||
|
||||
In fact, when querying, mongoengine isn't trying to account for the default value of the new field and so
|
||||
if you don't actually migrate the existing documents, you are taking a risk that querying/updating
|
||||
will be missing relevant record.
|
||||
|
||||
When adding fields/modifying default values, you can use any of the following to do the migration
|
||||
as a standalone script:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Use mongoengine to set a default value for a given field
|
||||
User.objects().update(enabled=True)
|
||||
# or use pymongo
|
||||
user_coll = User._get_collection()
|
||||
user_coll.update_many({}, {'$set': {'enabled': True}})
|
||||
|
||||
|
||||
Example 2: Inheritance change
|
||||
=============================
|
||||
|
||||
Let's consider the following example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Human(Document):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Jedi(Human):
|
||||
dark_side = BooleanField()
|
||||
light_saber_color = StringField()
|
||||
|
||||
Jedi(name="Darth Vader", dark_side=True, light_saber_color="red").save()
|
||||
Jedi(name="Obi Wan Kenobi", dark_side=False, light_saber_color="blue").save()
|
||||
|
||||
assert Human.objects.count() == 2
|
||||
assert Jedi.objects.count() == 2
|
||||
|
||||
# Let's check how these documents got stored in mongodb
|
||||
print(Jedi.objects.as_pymongo())
|
||||
# [
|
||||
# {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'},
|
||||
# {'_id': ObjectId('5fac4ac4f61d7fb06046e0fa'), '_cls': 'Human.Jedi', 'name': 'Obi Wan Kenobi', 'dark_side': False, 'light_saber_color': 'blue'}
|
||||
# ]
|
||||
|
||||
As you can observe, when you use inheritance, MongoEngine stores a field named '_cls' behind the scene to keep
|
||||
track of the Document class.
|
||||
|
||||
Let's now take the scenario that you want to refactor the inheritance schema and:
|
||||
- Have the Jedi's with dark_side=True/False become GoodJedi's/DarkSith
|
||||
- get rid of the 'dark_side' field
|
||||
|
||||
move to the following schemas:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# unchanged
|
||||
class Human(Document):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
# attribute 'dark_side' removed
|
||||
class GoodJedi(Human):
|
||||
light_saber_color = StringField()
|
||||
|
||||
# new class
|
||||
class BadSith(Human):
|
||||
light_saber_color = StringField()
|
||||
|
||||
MongoEngine doesn't know about the change or how to map them with the existing data
|
||||
so if you don't apply any migration, you will observe a strange behavior, as if the collection was suddenly
|
||||
empty.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# As a reminder, the documents that we inserted
|
||||
# have the _cls field = 'Human.Jedi'
|
||||
|
||||
# Following has no match
|
||||
# because the query that is used behind the scene is
|
||||
# filtering on {'_cls': 'Human.GoodJedi'}
|
||||
assert GoodJedi.objects().count() == 0
|
||||
|
||||
# Following has also no match
|
||||
# because it is filtering on {'_cls': {'$in': ('Human', 'Human.GoodJedi', 'Human.BadSith')}}
|
||||
# which has no match
|
||||
assert Human.objects.count() == 0
|
||||
assert Human.objects.first() is None
|
||||
|
||||
# If we bypass MongoEngine and make use of underlying driver (PyMongo)
|
||||
# we can see that the documents are there
|
||||
humans_coll = Human._get_collection()
|
||||
assert humans_coll.count_documents({}) == 2
|
||||
# print first document
|
||||
print(humans_coll.find_one())
|
||||
# {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'}
|
||||
|
||||
As you can see, first obvious problem is that we need to modify '_cls' values based on existing values of
|
||||
'dark_side' documents.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
humans_coll = Human._get_collection()
|
||||
old_class = 'Human.Jedi'
|
||||
good_jedi_class = 'Human.GoodJedi'
|
||||
bad_sith_class = 'Human.BadSith'
|
||||
humans_coll.update_many({'_cls': old_class, 'dark_side': False}, {'$set': {'_cls': good_jedi_class}})
|
||||
humans_coll.update_many({'_cls': old_class, 'dark_side': True}, {'$set': {'_cls': bad_sith_class}})
|
||||
|
||||
Let's now check if querying improved in MongoEngine:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
assert GoodJedi.objects().count() == 1 # Hoorah!
|
||||
assert BadSith.objects().count() == 1 # Hoorah!
|
||||
assert Human.objects.count() == 2 # Hoorah!
|
||||
|
||||
# let's now check that documents load correctly
|
||||
jedi = GoodJedi.objects().first()
|
||||
# raises FieldDoesNotExist: The fields "{'dark_side'}" do not exist on the document "Human.GoodJedi"
|
||||
|
||||
In fact we only took care of renaming the _cls values but we havn't removed the 'dark_side' fields
|
||||
which does not exist anymore on the GoodJedi's and BadSith's models.
|
||||
Let's remove the field from the collections:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
humans_coll = Human._get_collection()
|
||||
humans_coll.update_many({}, {'$unset': {'dark_side': 1}})
|
||||
|
||||
.. note:: We did this migration in 2 different steps for the sake of example but it could have been combined
|
||||
with the migration of the _cls fields: ::
|
||||
|
||||
humans_coll.update_many(
|
||||
{'_cls': old_class, 'dark_side': False},
|
||||
{
|
||||
'$set': {'_cls': good_jedi_class},
|
||||
'$unset': {'dark_side': 1}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
And verify that the documents now load correctly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
jedi = GoodJedi.objects().first()
|
||||
assert jedi.name == "Obi Wan Kenobi"
|
||||
|
||||
sith = BadSith.objects().first()
|
||||
assert sith.name == "Darth Vader"
|
||||
|
||||
|
||||
An other way of dealing with this migration is to iterate over
|
||||
the documents and update/replace them one by one. This is way slower but
|
||||
it is often useful for complex migrations of Document models.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
for doc in humans_coll.find():
|
||||
if doc['_cls'] == 'Human.Jedi':
|
||||
doc['_cls'] = 'Human.BadSith' if doc['dark_side'] else 'Human.GoodJedi'
|
||||
doc.pop('dark_side')
|
||||
humans_coll.replace_one({'_id': doc['_id']}, doc)
|
||||
|
||||
.. warning:: Be aware of this `flaw <https://groups.google.com/g/mongodb-user/c/AFC1ia7MHzk>`_ if you modify documents while iterating
|
||||
|
||||
Example 4: Index removal
|
||||
========================
|
||||
|
||||
If you remove an index from your Document class, or remove an indexed Field from your Document class,
|
||||
you'll need to manually drop the corresponding index. MongoEngine will not do that for you.
|
||||
|
||||
The way to deal with this case is to identify the name of the index to drop with `index_information()`, and then drop
|
||||
it with `drop_index()`
|
||||
|
||||
Let's for instance assume that you start with the following Document class
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class User(Document):
|
||||
name = StringField(index=True)
|
||||
|
||||
meta = {"indexes": ["name"]}
|
||||
|
||||
User(name="John Doe").save()
|
||||
|
||||
As soon as you start interacting with the Document collection (when `.save()` is called in this case),
|
||||
it would create the following indexes:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
print(User._get_collection().index_information())
|
||||
# {
|
||||
# '_id_': {'key': [('_id', 1)], 'v': 2},
|
||||
# 'name_1': {'background': False, 'key': [('name', 1)], 'v': 2},
|
||||
# }
|
||||
|
||||
Thus: '_id' which is the default index and 'name_1' which is our custom index.
|
||||
If you would remove the 'name' field or its index, you would have to call:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
User._get_collection().drop_index('name_1')
|
||||
|
||||
.. note:: When adding new fields or new indexes, MongoEngine will take care of creating them
|
||||
(unless `auto_create_index` is disabled) ::
|
||||
|
||||
Recommendations
|
||||
===============
|
||||
|
||||
- Write migration scripts whenever you do changes to the model schemas
|
||||
- Using :class:`~mongoengine.DynamicDocument` or ``meta = {"strict": False}`` may help to avoid some migrations or to have the 2 versions of your application to co-exist.
|
||||
- Write post-processing checks to verify that migrations script worked. See below
|
||||
|
||||
Post-processing checks
|
||||
======================
|
||||
|
||||
The following recipe can be used to sanity check a Document collection after you applied migration.
|
||||
It does not make any assumption on what was migrated, it will fetch 1000 objects randomly and
|
||||
run some quick checks on the documents to make sure the document looks OK. As it is, it will fail
|
||||
on the first occurrence of an error but this is something that can be adapted based on your needs.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def get_random_oids(collection, sample_size):
|
||||
pipeline = [{"$project": {'_id': 1}}, {"$sample": {"size": sample_size}}]
|
||||
return [s['_id'] for s in collection.aggregate(pipeline)]
|
||||
|
||||
def get_random_documents(DocCls, sample_size):
|
||||
doc_collection = DocCls._get_collection()
|
||||
random_oids = get_random_oids(doc_collection, sample_size)
|
||||
return DocCls.objects(id__in=random_oids)
|
||||
|
||||
def check_documents(DocCls, sample_size):
|
||||
for doc in get_random_documents(DocCls, sample_size):
|
||||
# general validation (types and values)
|
||||
doc.validate()
|
||||
|
||||
# load all subfields,
|
||||
# this may trigger additional queries if you have ReferenceFields
|
||||
# so it may be slow
|
||||
for field in doc._fields:
|
||||
try:
|
||||
getattr(doc, field)
|
||||
except Exception:
|
||||
LOG.warning(f"Could not load field {field} in Document {doc.id}")
|
||||
raise
|
||||
|
||||
check_documents(Human, sample_size=1000)
|
@ -21,7 +21,7 @@ or with an alias:
|
||||
conn = get_connection('testdb')
|
||||
|
||||
Example of test file:
|
||||
--------
|
||||
---------------------
|
||||
.. code-block:: python
|
||||
|
||||
import unittest
|
||||
@ -45,4 +45,4 @@ Example of test file:
|
||||
pers.save()
|
||||
|
||||
fresh_pers = Person.objects().first()
|
||||
self.assertEqual(fresh_pers.name, 'John')
|
||||
assert fresh_pers.name == 'John'
|
||||
|
@ -86,6 +86,10 @@ expressions:
|
||||
* ``istartswith`` -- string field starts with value (case insensitive)
|
||||
* ``endswith`` -- string field ends with value
|
||||
* ``iendswith`` -- string field ends with value (case insensitive)
|
||||
* ``wholeword`` -- string field contains whole word
|
||||
* ``iwholeword`` -- string field contains whole word (case insensitive)
|
||||
* ``regex`` -- string field match by regex
|
||||
* ``iregex`` -- string field match by regex (case insensitive)
|
||||
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
||||
|
||||
|
||||
@ -222,12 +226,24 @@ keyword argument::
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Sorting/Ordering results
|
||||
========================
|
||||
It is possible to order the results by 1 or more keys using :meth:`~mongoengine.queryset.QuerySet.order_by`.
|
||||
The order may be specified by prepending each of the keys by "+" or "-". Ascending order is assumed if there's no prefix.::
|
||||
|
||||
# Order by ascending date
|
||||
blogs = BlogPost.objects().order_by('date') # equivalent to .order_by('+date')
|
||||
|
||||
# Order by ascending date first, then descending title
|
||||
blogs = BlogPost.objects().order_by('+date', '-title')
|
||||
|
||||
|
||||
Limiting and skipping results
|
||||
=============================
|
||||
Just as with traditional ORMs, you may limit the number of results returned or
|
||||
skip a number or results in you query.
|
||||
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
||||
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
||||
:meth:`~mongoengine.queryset.QuerySet.skip` methods are available on
|
||||
:class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax
|
||||
is preferred for achieving this::
|
||||
|
||||
@ -349,9 +365,9 @@ Just as with limiting and skipping results, there is a method on a
|
||||
You could technically use ``len(User.objects)`` to get the same result, but it
|
||||
would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`.
|
||||
When you execute a server-side count query, you let MongoDB do the heavy
|
||||
lifting and you receive a single integer over the wire. Meanwhile, len()
|
||||
lifting and you receive a single integer over the wire. Meanwhile, ``len()``
|
||||
retrieves all the results, places them in a local cache, and finally counts
|
||||
them. If we compare the performance of the two operations, len() is much slower
|
||||
them. If we compare the performance of the two operations, ``len()`` is much slower
|
||||
than :meth:`~mongoengine.queryset.QuerySet.count`.
|
||||
|
||||
Further aggregation
|
||||
@ -386,6 +402,25 @@ would be generating "tag-clouds"::
|
||||
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
||||
|
||||
|
||||
MongoDB aggregation API
|
||||
-----------------------
|
||||
If you need to run aggregation pipelines, MongoEngine provides an entry point to `Pymongo's aggregation framework <https://api.mongodb.com/python/current/examples/aggregation.html#aggregation-framework>`_
|
||||
through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline.
|
||||
An example of its use would be::
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
|
||||
Person(name='John').save()
|
||||
Person(name='Bob').save()
|
||||
|
||||
pipeline = [
|
||||
{"$sort" : {"name" : -1}},
|
||||
{"$project": {"_id": 0, "name": {"$toUpper": "$name"}}}
|
||||
]
|
||||
data = Person.objects().aggregate(pipeline)
|
||||
assert data == [{'name': 'BOB'}, {'name': 'JOHN'}]
|
||||
|
||||
Query efficiency and performance
|
||||
================================
|
||||
|
||||
@ -512,7 +547,10 @@ Documents may be updated atomically by using the
|
||||
There are several different "modifiers" that you may use with these methods:
|
||||
|
||||
* ``set`` -- set a particular value
|
||||
* ``set_on_insert`` -- set only if this is new document `need to add upsert=True`_
|
||||
* ``unset`` -- delete a particular value (since MongoDB v1.3)
|
||||
* ``max`` -- update only if value is bigger
|
||||
* ``min`` -- update only if value is smaller
|
||||
* ``inc`` -- increment a value by a given amount
|
||||
* ``dec`` -- decrement a value by a given amount
|
||||
* ``push`` -- append a value to a list
|
||||
@ -521,6 +559,7 @@ There are several different "modifiers" that you may use with these methods:
|
||||
* ``pull`` -- remove a value from a list
|
||||
* ``pull_all`` -- remove several values from a list
|
||||
* ``add_to_set`` -- add value to a list only if its not in the list already
|
||||
* ``rename`` -- rename the key name
|
||||
|
||||
.. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/
|
||||
|
||||
@ -566,7 +605,8 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
|
||||
['database', 'mongodb']
|
||||
|
||||
From MongoDB version 2.6, push operator supports $position value which allows
|
||||
to push values with index.
|
||||
to push values with index::
|
||||
|
||||
>>> post = BlogPost(title="Test", tags=["mongo"])
|
||||
>>> post.save()
|
||||
>>> post.update(push__tags__0=["database", "code"])
|
||||
@ -577,7 +617,7 @@ to push values with index.
|
||||
.. note::
|
||||
Currently only top level lists are handled, future versions of mongodb /
|
||||
pymongo plan to support nested positional operators. See `The $ positional
|
||||
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
||||
operator <https://docs.mongodb.com/manual/tutorial/update-documents/#Updating-The%24positionaloperator>`_.
|
||||
|
||||
Server-side javascript execution
|
||||
================================
|
||||
|
122
docs/guide/validation.rst
Normal file
122
docs/guide/validation.rst
Normal file
@ -0,0 +1,122 @@
|
||||
====================
|
||||
Document Validation
|
||||
====================
|
||||
|
||||
By design, MongoEngine strictly validates the documents right before they are inserted in MongoDB
|
||||
and makes sure they are consistent with the fields defined in your models.
|
||||
|
||||
MongoEngine makes the assumption that the documents that exists in the DB are compliant with the schema.
|
||||
This means that Mongoengine will not validate a document when an object is loaded from the DB into an instance
|
||||
of your model but this operation may fail under some circumstances (e.g. if there is a field in
|
||||
the document fetched from the database that is not defined in your model).
|
||||
|
||||
|
||||
Built-in validation
|
||||
===================
|
||||
|
||||
Mongoengine provides different fields that encapsulate the corresponding validation
|
||||
out of the box. Validation runs when calling `.validate()` or `.save()`
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from mongoengine import Document, EmailField
|
||||
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
age = IntField(min_value=0, max_value=99)
|
||||
|
||||
user = User(email='invalid@', age=24)
|
||||
user.validate() # raises ValidationError (Invalid email address: ['email'])
|
||||
user.save() # raises ValidationError (Invalid email address: ['email'])
|
||||
|
||||
user2 = User(email='john.doe@garbage.com', age=1000)
|
||||
user2.save() # raises ValidationError (Integer value is too large: ['age'])
|
||||
|
||||
Custom validation
|
||||
=================
|
||||
|
||||
The following feature can be used to customize the validation:
|
||||
|
||||
* Field `validation` parameter
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def not_john_doe(name):
|
||||
if name == 'John Doe':
|
||||
raise ValidationError("John Doe is not a valid name")
|
||||
|
||||
class Person(Document):
|
||||
full_name = StringField(validation=not_john_doe)
|
||||
|
||||
Person(full_name='Billy Doe').save()
|
||||
Person(full_name='John Doe').save() # raises ValidationError (John Doe is not a valid name)
|
||||
|
||||
|
||||
* Document `clean` method
|
||||
|
||||
This method is called as part of :meth:`~mongoengine.document.Document.save` and should be used to provide
|
||||
custom model validation and/or to modify some of the field values prior to validation.
|
||||
For instance, you could use it to automatically provide a value for a field, or to do validation
|
||||
that requires access to more than a single field.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Essay(Document):
|
||||
status = StringField(choices=('Published', 'Draft'), required=True)
|
||||
pub_date = DateTimeField()
|
||||
|
||||
def clean(self):
|
||||
# Validate that only published essays have a `pub_date`
|
||||
if self.status == 'Draft' and self.pub_date is not None:
|
||||
raise ValidationError('Draft entries should not have a publication date.')
|
||||
# Set the pub_date for published items if not set.
|
||||
if self.status == 'Published' and self.pub_date is None:
|
||||
self.pub_date = datetime.now()
|
||||
|
||||
.. note::
|
||||
Cleaning is only called if validation is turned on and when calling
|
||||
:meth:`~mongoengine.Document.save`.
|
||||
|
||||
* Adding custom Field classes
|
||||
|
||||
We recommend as much as possible to use fields provided by MongoEngine. However, it is also possible
|
||||
to subclass a Field and encapsulate some validation by overriding the `validate` method
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class AgeField(IntField):
|
||||
|
||||
def validate(self, value):
|
||||
super(AgeField, self).validate(value) # let IntField.validate run first
|
||||
if value == 60:
|
||||
self.error('60 is not allowed')
|
||||
|
||||
class Person(Document):
|
||||
age = AgeField(min_value=0, max_value=99)
|
||||
|
||||
Person(age=20).save() # passes
|
||||
Person(age=1000).save() # raises ValidationError (Integer value is too large: ['age'])
|
||||
Person(age=60).save() # raises ValidationError (Person:None) (60 is not allowed: ['age'])
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
When overriding `validate`, use `self.error("your-custom-error")` instead of raising ValidationError explicitly,
|
||||
it will provide a better context with the error message
|
||||
|
||||
Skipping validation
|
||||
====================
|
||||
|
||||
Although discouraged as it allows to violate fields constraints, if for some reason you need to disable
|
||||
the validation and cleaning of a document when you call :meth:`~mongoengine.document.Document.save`, you can use `.save(validate=False)`.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Person(Document):
|
||||
age = IntField(max_value=100)
|
||||
|
||||
Person(age=1000).save() # raises ValidationError (Integer value is too large)
|
||||
|
||||
Person(age=1000).save(validate=False)
|
||||
person = Person.objects.first()
|
||||
assert person.age == 1000
|
@ -7,7 +7,7 @@ MongoDB. To install it, simply run
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install -U mongoengine
|
||||
$ python -m pip install -U mongoengine
|
||||
|
||||
:doc:`tutorial`
|
||||
A quick tutorial building a tumblelog to get you up and running with
|
||||
@ -23,9 +23,18 @@ MongoDB. To install it, simply run
|
||||
:doc:`upgrade`
|
||||
How to upgrade MongoEngine.
|
||||
|
||||
:doc:`faq`
|
||||
Frequently Asked Questions
|
||||
|
||||
:doc:`django`
|
||||
Using MongoEngine and Django
|
||||
|
||||
MongoDB and driver support
|
||||
--------------------------
|
||||
|
||||
MongoEngine is based on the PyMongo driver and tested against multiple versions of MongoDB.
|
||||
For further details, please refer to the `readme <https://github.com/MongoEngine/mongoengine#mongoengine>`_.
|
||||
|
||||
Community
|
||||
---------
|
||||
|
||||
@ -73,6 +82,7 @@ formats for offline reading.
|
||||
apireference
|
||||
changelog
|
||||
upgrade
|
||||
faq
|
||||
django
|
||||
|
||||
Indices and tables
|
||||
@ -81,4 +91,3 @@ Indices and tables
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
||||
|
3
docs/requirements.txt
Normal file
3
docs/requirements.txt
Normal file
@ -0,0 +1,3 @@
|
||||
Sphinx==3.3.0
|
||||
sphinx-rtd-theme==0.5.0
|
||||
readthedocs-sphinx-ext==2.1.1
|
@ -18,7 +18,7 @@ location --- running it locally will be easier, but if that is not an option
|
||||
then it may be run on a remote server. If you haven't installed MongoEngine,
|
||||
simply use pip to install it like so::
|
||||
|
||||
$ pip install mongoengine
|
||||
$ python -m pip install mongoengine
|
||||
|
||||
Before we can start using MongoEngine, we need to tell it how to connect to our
|
||||
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
||||
|
@ -52,7 +52,7 @@ rename its occurrences.
|
||||
This release includes a major rehaul of MongoEngine's code quality and
|
||||
introduces a few breaking changes. It also touches many different parts of
|
||||
the package and although all the changes have been tested and scrutinized,
|
||||
you're encouraged to thorougly test the upgrade.
|
||||
you're encouraged to thoroughly test the upgrade.
|
||||
|
||||
First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`.
|
||||
If you import or catch this exception, you'll need to rename it in your code.
|
||||
@ -85,10 +85,10 @@ by default from now on.
|
||||
|
||||
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
|
||||
|
||||
pip uninstall pymongo
|
||||
pip uninstall mongoengine
|
||||
pip install pymongo==2.8
|
||||
pip install mongoengine
|
||||
python -m pip uninstall pymongo
|
||||
python -m pip uninstall mongoengine
|
||||
python -m pip install pymongo==2.8
|
||||
python -m pip install mongoengine
|
||||
|
||||
0.8.7
|
||||
*****
|
||||
@ -153,7 +153,7 @@ inherited classes like so: ::
|
||||
|
||||
# 4. Remove indexes
|
||||
info = collection.index_information()
|
||||
indexes_to_drop = [key for key, value in info.iteritems()
|
||||
indexes_to_drop = [key for key, value in info.items()
|
||||
if '_types' in dict(value['key'])]
|
||||
for index in indexes_to_drop:
|
||||
collection.drop_index(index)
|
||||
|
@ -1,22 +1,23 @@
|
||||
# Import submodules so that we can expose their __all__
|
||||
from mongoengine import connection
|
||||
from mongoengine import document
|
||||
from mongoengine import errors
|
||||
from mongoengine import fields
|
||||
from mongoengine import queryset
|
||||
from mongoengine import signals
|
||||
from mongoengine import (
|
||||
connection,
|
||||
document,
|
||||
errors,
|
||||
fields,
|
||||
queryset,
|
||||
signals,
|
||||
)
|
||||
|
||||
# Import everything from each submodule so that it can be accessed via
|
||||
# mongoengine, e.g. instead of `from mongoengine.connection import connect`,
|
||||
# users can simply use `from mongoengine import connect`, or even
|
||||
# `from mongoengine import *` and then `connect('testdb')`.
|
||||
from mongoengine.connection import *
|
||||
from mongoengine.document import *
|
||||
from mongoengine.errors import *
|
||||
from mongoengine.fields import *
|
||||
from mongoengine.queryset import *
|
||||
from mongoengine.signals import *
|
||||
|
||||
from mongoengine.connection import * # noqa: F401
|
||||
from mongoengine.document import * # noqa: F401
|
||||
from mongoengine.errors import * # noqa: F401
|
||||
from mongoengine.fields import * # noqa: F401
|
||||
from mongoengine.queryset import * # noqa: F401
|
||||
from mongoengine.signals import * # noqa: F401
|
||||
|
||||
__all__ = (
|
||||
list(document.__all__)
|
||||
@ -28,7 +29,7 @@ __all__ = (
|
||||
)
|
||||
|
||||
|
||||
VERSION = (0, 18, 2)
|
||||
VERSION = (0, 23, 1)
|
||||
|
||||
|
||||
def get_version():
|
||||
|
@ -1,8 +1,6 @@
|
||||
import weakref
|
||||
|
||||
from bson import DBRef
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||
@ -53,7 +51,7 @@ class BaseDict(dict):
|
||||
if isinstance(instance, BaseDocument):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
super(BaseDict, self).__init__(dict_items)
|
||||
super().__init__(dict_items)
|
||||
|
||||
def get(self, key, default=None):
|
||||
# get does not use __getitem__ by default so we must override it as well
|
||||
@ -63,18 +61,18 @@ class BaseDict(dict):
|
||||
return default
|
||||
|
||||
def __getitem__(self, key):
|
||||
value = super(BaseDict, self).__getitem__(key)
|
||||
value = super().__getitem__(key)
|
||||
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
value = BaseDict(value, None, "%s.%s" % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
value = BaseDict(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
value = BaseList(value, None, "%s.%s" % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
value = BaseList(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
@ -99,7 +97,7 @@ class BaseDict(dict):
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, "_mark_as_changed"):
|
||||
if key:
|
||||
self._instance._mark_as_changed("%s.%s" % (self._name, key))
|
||||
self._instance._mark_as_changed(f"{self._name}.{key}")
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
@ -117,10 +115,13 @@ class BaseList(list):
|
||||
if isinstance(instance, BaseDocument):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
super(BaseList, self).__init__(list_items)
|
||||
super().__init__(list_items)
|
||||
|
||||
def __getitem__(self, key):
|
||||
value = super(BaseList, self).__getitem__(key)
|
||||
# change index to positive value because MongoDB does not support negative one
|
||||
if isinstance(key, int) and key < 0:
|
||||
key = len(self) + key
|
||||
value = super().__getitem__(key)
|
||||
|
||||
if isinstance(key, slice):
|
||||
# When receiving a slice operator, we don't convert the structure and bind
|
||||
@ -132,19 +133,18 @@ class BaseList(list):
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
# Replace dict by BaseDict
|
||||
value = BaseDict(value, None, "%s.%s" % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
value = BaseDict(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
# Replace list by BaseList
|
||||
value = BaseList(value, None, "%s.%s" % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
value = BaseList(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __iter__(self):
|
||||
for v in super(BaseList, self).__iter__():
|
||||
yield v
|
||||
yield from super().__iter__()
|
||||
|
||||
def __getstate__(self):
|
||||
self.instance = None
|
||||
@ -162,7 +162,7 @@ class BaseList(list):
|
||||
# instead, we simply marks the whole list as changed
|
||||
changed_key = None
|
||||
|
||||
result = super(BaseList, self).__setitem__(key, value)
|
||||
result = super().__setitem__(key, value)
|
||||
self._mark_as_changed(changed_key)
|
||||
return result
|
||||
|
||||
@ -177,30 +177,17 @@ class BaseList(list):
|
||||
__iadd__ = mark_as_changed_wrapper(list.__iadd__)
|
||||
__imul__ = mark_as_changed_wrapper(list.__imul__)
|
||||
|
||||
if six.PY2:
|
||||
# Under py3 __setslice__, __delslice__ and __getslice__
|
||||
# are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter
|
||||
# so we mimic this under python 2
|
||||
def __setslice__(self, i, j, sequence):
|
||||
return self.__setitem__(slice(i, j), sequence)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
return self.__delitem__(slice(i, j))
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
return self.__getitem__(slice(i, j))
|
||||
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, "_mark_as_changed"):
|
||||
if key:
|
||||
self._instance._mark_as_changed("%s.%s" % (self._name, key % len(self)))
|
||||
if key is not None:
|
||||
self._instance._mark_as_changed(f"{self._name}.{key % len(self)}")
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
|
||||
class EmbeddedDocumentList(BaseList):
|
||||
def __init__(self, list_items, instance, name):
|
||||
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||
super().__init__(list_items, instance, name)
|
||||
self._instance = instance
|
||||
|
||||
@classmethod
|
||||
@ -210,7 +197,7 @@ class EmbeddedDocumentList(BaseList):
|
||||
"""
|
||||
for key, expected_value in kwargs.items():
|
||||
doc_val = getattr(embedded_doc, key)
|
||||
if doc_val != expected_value and six.text_type(doc_val) != expected_value:
|
||||
if doc_val != expected_value and str(doc_val) != expected_value:
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -226,7 +213,7 @@ class EmbeddedDocumentList(BaseList):
|
||||
Filters the list by only including embedded documents with the
|
||||
given keyword arguments.
|
||||
|
||||
This method only supports simple comparison (e.g: .filter(name='John Doe'))
|
||||
This method only supports simple comparison (e.g. .filter(name='John Doe'))
|
||||
and does not support operators like __gte, __lte, __icontains like queryset.filter does
|
||||
|
||||
:param kwargs: The keyword arguments corresponding to the fields to
|
||||
@ -300,11 +287,11 @@ class EmbeddedDocumentList(BaseList):
|
||||
|
||||
def create(self, **values):
|
||||
"""
|
||||
Creates a new embedded document and saves it to the database.
|
||||
Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList.
|
||||
|
||||
.. note::
|
||||
The embedded document changes are not automatically saved
|
||||
to the database after calling this method.
|
||||
the instance of the EmbeddedDocument is not automatically saved to the database.
|
||||
You still need to call .save() on the parent Document.
|
||||
|
||||
:param values: A dictionary of values for the embedded document.
|
||||
:return: The new embedded document instance.
|
||||
@ -365,13 +352,13 @@ class EmbeddedDocumentList(BaseList):
|
||||
return len(values)
|
||||
|
||||
|
||||
class StrictDict(object):
|
||||
class StrictDict:
|
||||
__slots__ = ()
|
||||
_special_fields = {"get", "pop", "iteritems", "items", "keys", "create"}
|
||||
_classes = {}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for k, v in iteritems(kwargs):
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
def __getitem__(self, key):
|
||||
@ -419,13 +406,13 @@ class StrictDict(object):
|
||||
return (key for key in self.__slots__ if hasattr(self, key))
|
||||
|
||||
def __len__(self):
|
||||
return len(list(iteritems(self)))
|
||||
return len(list(self.items()))
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.items() == other.items()
|
||||
return list(self.items()) == list(other.items())
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.items() != other.items()
|
||||
return not (self == other)
|
||||
|
||||
@classmethod
|
||||
def create(cls, allowed_keys):
|
||||
@ -440,7 +427,7 @@ class StrictDict(object):
|
||||
|
||||
def __repr__(self):
|
||||
return "{%s}" % ", ".join(
|
||||
'"{0!s}": {1!r}'.format(k, v) for k, v in self.items()
|
||||
f'"{k!s}": {v!r}' for k, v in self.items()
|
||||
)
|
||||
|
||||
cls._classes[allowed_keys] = SpecificStrictDict
|
||||
@ -465,9 +452,7 @@ class LazyReference(DBRef):
|
||||
self.document_type = document_type
|
||||
self._cached_doc = cached_doc
|
||||
self.passthrough = passthrough
|
||||
super(LazyReference, self).__init__(
|
||||
self.document_type._get_collection_name(), pk
|
||||
)
|
||||
super().__init__(self.document_type._get_collection_name(), pk)
|
||||
|
||||
def __getitem__(self, name):
|
||||
if not self.passthrough:
|
||||
@ -485,4 +470,4 @@ class LazyReference(DBRef):
|
||||
raise AttributeError()
|
||||
|
||||
def __repr__(self):
|
||||
return "<LazyReference(%s, %r)>" % (self.document_type, self.pk)
|
||||
return f"<LazyReference({self.document_type}, {self.pk!r})>"
|
||||
|
@ -2,10 +2,8 @@ import copy
|
||||
import numbers
|
||||
from functools import partial
|
||||
|
||||
from bson import DBRef, ObjectId, SON, json_util
|
||||
import pymongo
|
||||
import six
|
||||
from six import iteritems
|
||||
from bson import SON, DBRef, ObjectId, json_util
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.base.common import get_document
|
||||
@ -25,14 +23,13 @@ from mongoengine.errors import (
|
||||
OperationError,
|
||||
ValidationError,
|
||||
)
|
||||
from mongoengine.python_support import Hashable
|
||||
|
||||
__all__ = ("BaseDocument", "NON_FIELD_ERRORS")
|
||||
|
||||
NON_FIELD_ERRORS = "__all__"
|
||||
|
||||
|
||||
class BaseDocument(object):
|
||||
class BaseDocument:
|
||||
# TODO simplify how `_changed_fields` is used.
|
||||
# Currently, handling of `_changed_fields` seems unnecessarily convoluted:
|
||||
# 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's
|
||||
@ -62,13 +59,11 @@ class BaseDocument(object):
|
||||
"""
|
||||
Initialise a document or an embedded document.
|
||||
|
||||
:param dict values: A dictionary of keys and values for the document.
|
||||
:param values: A dictionary of keys and values for the document.
|
||||
It may contain additional reserved keywords, e.g. "__auto_convert".
|
||||
:param bool __auto_convert: If True, supplied values will be converted
|
||||
:param __auto_convert: If True, supplied values will be converted
|
||||
to Python-type values via each field's `to_python` method.
|
||||
:param set __only_fields: A set of fields that have been loaded for
|
||||
this document. Empty if all fields have been loaded.
|
||||
:param bool _created: Indicates whether this is a brand new document
|
||||
:param _created: Indicates whether this is a brand new document
|
||||
or whether it's already been persisted before. Defaults to true.
|
||||
"""
|
||||
self._initialised = False
|
||||
@ -82,8 +77,6 @@ class BaseDocument(object):
|
||||
|
||||
__auto_convert = values.pop("__auto_convert", True)
|
||||
|
||||
__only_fields = set(values.pop("__only_fields", values))
|
||||
|
||||
_created = values.pop("_created", True)
|
||||
|
||||
signals.pre_init.send(self.__class__, document=self, values=values)
|
||||
@ -92,12 +85,10 @@ class BaseDocument(object):
|
||||
# if so raise an Exception.
|
||||
if not self._dynamic and (self._meta.get("strict", True) or _created):
|
||||
_undefined_fields = set(values.keys()) - set(
|
||||
self._fields.keys() + ["id", "pk", "_cls", "_text_score"]
|
||||
list(self._fields.keys()) + ["id", "pk", "_cls", "_text_score"]
|
||||
)
|
||||
if _undefined_fields:
|
||||
msg = ('The fields "{0}" do not exist on the document "{1}"').format(
|
||||
_undefined_fields, self._class_name
|
||||
)
|
||||
msg = f'The fields "{_undefined_fields}" do not exist on the document "{self._class_name}"'
|
||||
raise FieldDoesNotExist(msg)
|
||||
|
||||
if self.STRICT and not self._dynamic:
|
||||
@ -107,37 +98,32 @@ class BaseDocument(object):
|
||||
|
||||
self._dynamic_fields = SON()
|
||||
|
||||
# Assign default values to the instance.
|
||||
# We set default values only for fields loaded from DB. See
|
||||
# https://github.com/mongoengine/mongoengine/issues/399 for more info.
|
||||
for key, field in iteritems(self._fields):
|
||||
if self._db_field_map.get(key, key) in __only_fields:
|
||||
# Assign default values for fields
|
||||
# not set in the constructor
|
||||
for field_name in self._fields:
|
||||
if field_name in values:
|
||||
continue
|
||||
value = getattr(self, key, None)
|
||||
setattr(self, key, value)
|
||||
value = getattr(self, field_name, None)
|
||||
setattr(self, field_name, value)
|
||||
|
||||
if "_cls" not in values:
|
||||
self._cls = self._class_name
|
||||
|
||||
# Set passed values after initialisation
|
||||
if self._dynamic:
|
||||
# Set actual values
|
||||
dynamic_data = {}
|
||||
for key, value in iteritems(values):
|
||||
if key in self._fields or key == "_id":
|
||||
setattr(self, key, value)
|
||||
else:
|
||||
dynamic_data[key] = value
|
||||
else:
|
||||
FileField = _import_class("FileField")
|
||||
for key, value in iteritems(values):
|
||||
key = self._reverse_db_field_map.get(key, key)
|
||||
if key in self._fields or key in ("id", "pk", "_cls"):
|
||||
if __auto_convert and value is not None:
|
||||
for key, value in values.items():
|
||||
field = self._fields.get(key)
|
||||
if field or key in ("id", "pk", "_cls"):
|
||||
if __auto_convert and value is not None:
|
||||
if field and not isinstance(field, FileField):
|
||||
value = field.to_python(value)
|
||||
setattr(self, key, value)
|
||||
else:
|
||||
if self._dynamic:
|
||||
dynamic_data[key] = value
|
||||
else:
|
||||
# For strict Document
|
||||
self._data[key] = value
|
||||
|
||||
# Set any get_<field>_display methods
|
||||
@ -145,7 +131,7 @@ class BaseDocument(object):
|
||||
|
||||
if self._dynamic:
|
||||
self._dynamic_lock = False
|
||||
for key, value in iteritems(dynamic_data):
|
||||
for key, value in dynamic_data.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
# Flag initialised
|
||||
@ -163,13 +149,13 @@ class BaseDocument(object):
|
||||
default = default()
|
||||
setattr(self, field_name, default)
|
||||
else:
|
||||
super(BaseDocument, self).__delattr__(*args, **kwargs)
|
||||
super().__delattr__(*args, **kwargs)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
# Handle dynamic data only if an initialised dynamic document
|
||||
if self._dynamic and not self._dynamic_lock:
|
||||
|
||||
if not hasattr(self, name) and not name.startswith("_"):
|
||||
if name not in self._fields_ordered and not name.startswith("_"):
|
||||
DynamicField = _import_class("DynamicField")
|
||||
field = DynamicField(db_field=name, null=True)
|
||||
field.name = name
|
||||
@ -210,9 +196,9 @@ class BaseDocument(object):
|
||||
and self__created
|
||||
and name == self._meta.get("id_field")
|
||||
):
|
||||
super(BaseDocument, self).__setattr__("_created", False)
|
||||
super().__setattr__("_created", False)
|
||||
|
||||
super(BaseDocument, self).__setattr__(name, value)
|
||||
super().__setattr__(name, value)
|
||||
|
||||
def __getstate__(self):
|
||||
data = {}
|
||||
@ -242,10 +228,10 @@ class BaseDocument(object):
|
||||
setattr(self, k, data[k])
|
||||
if "_fields_ordered" in data:
|
||||
if self._dynamic:
|
||||
setattr(self, "_fields_ordered", data["_fields_ordered"])
|
||||
self._fields_ordered = data["_fields_ordered"]
|
||||
else:
|
||||
_super_fields_ordered = type(self)._fields_ordered
|
||||
setattr(self, "_fields_ordered", _super_fields_ordered)
|
||||
self._fields_ordered = _super_fields_ordered
|
||||
|
||||
dynamic_fields = data.get("_dynamic_fields") or SON()
|
||||
for k in dynamic_fields.keys():
|
||||
@ -255,8 +241,7 @@ class BaseDocument(object):
|
||||
return iter(self._fields_ordered)
|
||||
|
||||
def __getitem__(self, name):
|
||||
"""Dictionary-style field access, return a field's value if present.
|
||||
"""
|
||||
"""Dictionary-style field access, return a field's value if present."""
|
||||
try:
|
||||
if name in self._fields_ordered:
|
||||
return getattr(self, name)
|
||||
@ -265,8 +250,7 @@ class BaseDocument(object):
|
||||
raise KeyError(name)
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
"""Dictionary-style field access, set a field's value.
|
||||
"""
|
||||
"""Dictionary-style field access, set a field's value."""
|
||||
# Ensure that the field exists before settings its value
|
||||
if not self._dynamic and name not in self._fields:
|
||||
raise KeyError(name)
|
||||
@ -288,16 +272,13 @@ class BaseDocument(object):
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
u = "[Bad Unicode data]"
|
||||
repr_type = str if u is None else type(u)
|
||||
return repr_type("<%s: %s>" % (self.__class__.__name__, u))
|
||||
return repr_type(f"<{self.__class__.__name__}: {u}>")
|
||||
|
||||
def __str__(self):
|
||||
# TODO this could be simpler?
|
||||
if hasattr(self, "__unicode__"):
|
||||
if six.PY3:
|
||||
return self.__unicode__()
|
||||
else:
|
||||
return six.text_type(self).encode("utf-8")
|
||||
return six.text_type("%s object" % self.__class__.__name__)
|
||||
return "%s object" % self.__class__.__name__
|
||||
|
||||
def __eq__(self, other):
|
||||
if (
|
||||
@ -319,7 +300,8 @@ class BaseDocument(object):
|
||||
|
||||
def clean(self):
|
||||
"""
|
||||
Hook for doing document level data cleaning before validation is run.
|
||||
Hook for doing document level data cleaning (usually validation or assignment)
|
||||
before validation is run.
|
||||
|
||||
Any ValidationError raised by this method will not be associated with
|
||||
a particular field; it will have a special-case association with the
|
||||
@ -446,7 +428,7 @@ class BaseDocument(object):
|
||||
pk = self.pk
|
||||
elif self._instance and hasattr(self._instance, "pk"):
|
||||
pk = self._instance.pk
|
||||
message = "ValidationError (%s:%s) " % (self._class_name, pk)
|
||||
message = f"ValidationError ({self._class_name}:{pk}) "
|
||||
raise ValidationError(message, errors=errors)
|
||||
|
||||
def to_json(self, *args, **kwargs):
|
||||
@ -519,7 +501,7 @@ class BaseDocument(object):
|
||||
if "." in key:
|
||||
key, rest = key.split(".", 1)
|
||||
key = self._db_field_map.get(key, key)
|
||||
key = "%s.%s" % (key, rest)
|
||||
key = f"{key}.{rest}"
|
||||
else:
|
||||
key = self._db_field_map.get(key, key)
|
||||
|
||||
@ -542,6 +524,9 @@ class BaseDocument(object):
|
||||
"""Using _get_changed_fields iterate and remove any fields that
|
||||
are marked as changed.
|
||||
"""
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
|
||||
for changed in self._get_changed_fields():
|
||||
parts = changed.split(".")
|
||||
data = self
|
||||
@ -554,7 +539,8 @@ class BaseDocument(object):
|
||||
elif isinstance(data, dict):
|
||||
data = data.get(part, None)
|
||||
else:
|
||||
data = getattr(data, part, None)
|
||||
field_name = data._reverse_db_field_map.get(part, part)
|
||||
data = getattr(data, field_name, None)
|
||||
|
||||
if not isinstance(data, LazyReference) and hasattr(
|
||||
data, "_changed_fields"
|
||||
@ -563,10 +549,40 @@ class BaseDocument(object):
|
||||
continue
|
||||
|
||||
data._changed_fields = []
|
||||
elif isinstance(data, (list, tuple, dict)):
|
||||
if hasattr(data, "field") and isinstance(
|
||||
data.field, (ReferenceField, GenericReferenceField)
|
||||
):
|
||||
continue
|
||||
BaseDocument._nestable_types_clear_changed_fields(data)
|
||||
|
||||
self._changed_fields = []
|
||||
|
||||
def _nestable_types_changed_fields(self, changed_fields, base_key, data):
|
||||
@staticmethod
|
||||
def _nestable_types_clear_changed_fields(data):
|
||||
"""Inspect nested data for changed fields
|
||||
|
||||
:param data: data to inspect for changes
|
||||
"""
|
||||
Document = _import_class("Document")
|
||||
|
||||
# Loop list / dict fields as they contain documents
|
||||
# Determine the iterator to use
|
||||
if not hasattr(data, "items"):
|
||||
iterator = enumerate(data)
|
||||
else:
|
||||
iterator = data.items()
|
||||
|
||||
for _index_or_key, value in iterator:
|
||||
if hasattr(value, "_get_changed_fields") and not isinstance(
|
||||
value, Document
|
||||
): # don't follow references
|
||||
value._clear_changed_fields()
|
||||
elif isinstance(value, (list, tuple, dict)):
|
||||
BaseDocument._nestable_types_clear_changed_fields(value)
|
||||
|
||||
@staticmethod
|
||||
def _nestable_types_changed_fields(changed_fields, base_key, data):
|
||||
"""Inspect nested data for changed fields
|
||||
|
||||
:param changed_fields: Previously collected changed fields
|
||||
@ -578,10 +594,10 @@ class BaseDocument(object):
|
||||
if not hasattr(data, "items"):
|
||||
iterator = enumerate(data)
|
||||
else:
|
||||
iterator = iteritems(data)
|
||||
iterator = data.items()
|
||||
|
||||
for index_or_key, value in iterator:
|
||||
item_key = "%s%s." % (base_key, index_or_key)
|
||||
item_key = f"{base_key}{index_or_key}."
|
||||
# don't check anything lower if this key is already marked
|
||||
# as changed.
|
||||
if item_key[:-1] in changed_fields:
|
||||
@ -589,15 +605,18 @@ class BaseDocument(object):
|
||||
|
||||
if hasattr(value, "_get_changed_fields"):
|
||||
changed = value._get_changed_fields()
|
||||
changed_fields += ["%s%s" % (item_key, k) for k in changed if k]
|
||||
changed_fields += [f"{item_key}{k}" for k in changed if k]
|
||||
elif isinstance(value, (list, tuple, dict)):
|
||||
self._nestable_types_changed_fields(changed_fields, item_key, value)
|
||||
BaseDocument._nestable_types_changed_fields(
|
||||
changed_fields, item_key, value
|
||||
)
|
||||
|
||||
def _get_changed_fields(self):
|
||||
"""Return a list of all fields that have explicitly been changed.
|
||||
"""
|
||||
"""Return a list of all fields that have explicitly been changed."""
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
LazyReferenceField = _import_class("LazyReferenceField")
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
GenericLazyReferenceField = _import_class("GenericLazyReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
SortedListField = _import_class("SortedListField")
|
||||
|
||||
@ -620,10 +639,16 @@ class BaseDocument(object):
|
||||
if isinstance(data, EmbeddedDocument):
|
||||
# Find all embedded fields that have been changed
|
||||
changed = data._get_changed_fields()
|
||||
changed_fields += ["%s%s" % (key, k) for k in changed if k]
|
||||
changed_fields += [f"{key}{k}" for k in changed if k]
|
||||
elif isinstance(data, (list, tuple, dict)):
|
||||
if hasattr(field, "field") and isinstance(
|
||||
field.field, (ReferenceField, GenericReferenceField)
|
||||
field.field,
|
||||
(
|
||||
LazyReferenceField,
|
||||
ReferenceField,
|
||||
GenericLazyReferenceField,
|
||||
GenericReferenceField,
|
||||
),
|
||||
):
|
||||
continue
|
||||
elif isinstance(field, SortedListField) and field._ordering:
|
||||
@ -670,7 +695,7 @@ class BaseDocument(object):
|
||||
del set_data["_id"]
|
||||
|
||||
# Determine if any changed items were actually unset.
|
||||
for path, value in set_data.items():
|
||||
for path, value in list(set_data.items()):
|
||||
if value or isinstance(
|
||||
value, (numbers.Number, bool)
|
||||
): # Account for 0 and True that are truthy
|
||||
@ -726,13 +751,13 @@ class BaseDocument(object):
|
||||
return cls._meta.get("collection", None)
|
||||
|
||||
@classmethod
|
||||
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
|
||||
"""Create an instance of a Document (subclass) from a PyMongo SON."""
|
||||
if not only_fields:
|
||||
only_fields = []
|
||||
|
||||
def _from_son(cls, son, _auto_dereference=True, created=False):
|
||||
"""Create an instance of a Document (subclass) from a PyMongo SON (dict)"""
|
||||
if son and not isinstance(son, dict):
|
||||
raise ValueError("The source SON object needs to be of type 'dict'")
|
||||
raise ValueError(
|
||||
"The source SON object needs to be of type 'dict' but a '%s' was found"
|
||||
% type(son)
|
||||
)
|
||||
|
||||
# Get the class name from the document, falling back to the given
|
||||
# class if unavailable
|
||||
@ -740,8 +765,10 @@ class BaseDocument(object):
|
||||
|
||||
# Convert SON to a data dict, making sure each key is a string and
|
||||
# corresponds to the right db field.
|
||||
# This is needed as _from_son is currently called both from BaseDocument.__init__
|
||||
# and from EmbeddedDocumentField.to_python
|
||||
data = {}
|
||||
for key, value in iteritems(son):
|
||||
for key, value in son.items():
|
||||
key = str(key)
|
||||
key = cls._db_field_map.get(key, key)
|
||||
data[key] = value
|
||||
@ -756,7 +783,7 @@ class BaseDocument(object):
|
||||
if not _auto_dereference:
|
||||
fields = copy.deepcopy(fields)
|
||||
|
||||
for field_name, field in iteritems(fields):
|
||||
for field_name, field in fields.items():
|
||||
field._auto_dereference = _auto_dereference
|
||||
if field.db_field in data:
|
||||
value = data[field.db_field]
|
||||
@ -770,8 +797,8 @@ class BaseDocument(object):
|
||||
errors_dict[field_name] = e
|
||||
|
||||
if errors_dict:
|
||||
errors = "\n".join(["%s - %s" % (k, v) for k, v in errors_dict.items()])
|
||||
msg = "Invalid data to create a `%s` instance.\n%s" % (
|
||||
errors = "\n".join([f"Field '{k}' - {v}" for k, v in errors_dict.items()])
|
||||
msg = "Invalid data to create a `{}` instance.\n{}".format(
|
||||
cls._class_name,
|
||||
errors,
|
||||
)
|
||||
@ -779,11 +806,9 @@ class BaseDocument(object):
|
||||
|
||||
# In STRICT documents, remove any keys that aren't in cls._fields
|
||||
if cls.STRICT:
|
||||
data = {k: v for k, v in iteritems(data) if k in cls._fields}
|
||||
data = {k: v for k, v in data.items() if k in cls._fields}
|
||||
|
||||
obj = cls(
|
||||
__auto_convert=False, _created=created, __only_fields=only_fields, **data
|
||||
)
|
||||
obj = cls(__auto_convert=False, _created=created, **data)
|
||||
obj._changed_fields = []
|
||||
if not _auto_dereference:
|
||||
obj._fields = fields
|
||||
@ -826,7 +851,7 @@ class BaseDocument(object):
|
||||
@classmethod
|
||||
def _build_index_spec(cls, spec):
|
||||
"""Build a PyMongo index spec from a MongoEngine index spec."""
|
||||
if isinstance(spec, six.string_types):
|
||||
if isinstance(spec, str):
|
||||
spec = {"fields": [spec]}
|
||||
elif isinstance(spec, (list, tuple)):
|
||||
spec = {"fields": list(spec)}
|
||||
@ -923,7 +948,7 @@ class BaseDocument(object):
|
||||
|
||||
# Add any unique_with fields to the back of the index spec
|
||||
if field.unique_with:
|
||||
if isinstance(field.unique_with, six.string_types):
|
||||
if isinstance(field.unique_with, str):
|
||||
field.unique_with = [field.unique_with]
|
||||
|
||||
# Convert unique_with field names to real field names
|
||||
@ -943,9 +968,7 @@ class BaseDocument(object):
|
||||
unique_fields += unique_with
|
||||
|
||||
# Add the new index to the list
|
||||
fields = [
|
||||
("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields
|
||||
]
|
||||
fields = [(f"{namespace}{f}", pymongo.ASCENDING) for f in unique_fields]
|
||||
index = {"fields": fields, "unique": True, "sparse": sparse}
|
||||
unique_indexes.append(index)
|
||||
|
||||
@ -981,9 +1004,7 @@ class BaseDocument(object):
|
||||
"PolygonField",
|
||||
)
|
||||
|
||||
geo_field_types = tuple(
|
||||
[_import_class(field) for field in geo_field_type_names]
|
||||
)
|
||||
geo_field_types = tuple(_import_class(field) for field in geo_field_type_names)
|
||||
|
||||
for field in cls._fields.values():
|
||||
if not isinstance(field, geo_field_types):
|
||||
@ -1001,7 +1022,7 @@ class BaseDocument(object):
|
||||
elif field._geo_index:
|
||||
field_name = field.db_field
|
||||
if parent_field:
|
||||
field_name = "%s.%s" % (parent_field, field_name)
|
||||
field_name = f"{parent_field}.{field_name}"
|
||||
geo_indices.append({"fields": [(field_name, field._geo_index)]})
|
||||
|
||||
return geo_indices
|
||||
@ -1139,8 +1160,7 @@ class BaseDocument(object):
|
||||
|
||||
@classmethod
|
||||
def _translate_field_name(cls, field, sep="."):
|
||||
"""Translate a field attribute name to a database field name.
|
||||
"""
|
||||
"""Translate a field attribute name to a database field name."""
|
||||
parts = field.split(sep)
|
||||
parts = [f.db_field for f in cls._lookup_field(parts)]
|
||||
return ".".join(parts)
|
||||
@ -1170,9 +1190,6 @@ class BaseDocument(object):
|
||||
else [value]
|
||||
)
|
||||
return sep.join(
|
||||
[
|
||||
six.text_type(dict(field.choices).get(val, val))
|
||||
for val in values or []
|
||||
]
|
||||
[str(dict(field.choices).get(val, val)) for val in values or []]
|
||||
)
|
||||
return value
|
||||
|
@ -1,28 +1,27 @@
|
||||
import operator
|
||||
import warnings
|
||||
import weakref
|
||||
|
||||
from bson import DBRef, ObjectId, SON
|
||||
import pymongo
|
||||
import six
|
||||
from six import iteritems
|
||||
from bson import SON, DBRef, ObjectId
|
||||
|
||||
from mongoengine.base.common import UPDATE_OPERATORS
|
||||
from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList
|
||||
from mongoengine.base.datastructures import (
|
||||
BaseDict,
|
||||
BaseList,
|
||||
EmbeddedDocumentList,
|
||||
)
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import DeprecatedError, ValidationError
|
||||
|
||||
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
class BaseField:
|
||||
"""A base class for fields in a MongoDB document. Instances of this class
|
||||
may be added to subclasses of `Document` to define a document's schema.
|
||||
|
||||
.. versionchanged:: 0.5 - added verbose and help text
|
||||
"""
|
||||
|
||||
name = None
|
||||
name = None # set in TopLevelDocumentMetaclass
|
||||
_geo_index = False
|
||||
_auto_gen = False # Call `generate` to generate a value
|
||||
_auto_dereference = True
|
||||
@ -36,7 +35,6 @@ class BaseField(object):
|
||||
def __init__(
|
||||
self,
|
||||
db_field=None,
|
||||
name=None,
|
||||
required=False,
|
||||
default=None,
|
||||
unique=False,
|
||||
@ -46,12 +44,11 @@ class BaseField(object):
|
||||
choices=None,
|
||||
null=False,
|
||||
sparse=False,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
:param db_field: The database field to store this field in
|
||||
(defaults to the name of the field)
|
||||
:param name: Deprecated - use db_field
|
||||
:param required: If the field is required. Whether it has to have a
|
||||
value or not. Defaults to False.
|
||||
:param default: (optional) The default value for this field if no value
|
||||
@ -75,11 +72,8 @@ class BaseField(object):
|
||||
existing attributes. Common metadata includes `verbose_name` and
|
||||
`help_text`.
|
||||
"""
|
||||
self.db_field = (db_field or name) if not primary_key else "_id"
|
||||
self.db_field = db_field if not primary_key else "_id"
|
||||
|
||||
if name:
|
||||
msg = 'Field\'s "name" attribute deprecated in favour of "db_field"'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self.required = required or primary_key
|
||||
self.default = default
|
||||
self.unique = bool(unique or unique_with)
|
||||
@ -92,13 +86,11 @@ class BaseField(object):
|
||||
self._owner_document = None
|
||||
|
||||
# Make sure db_field is a string (if it's explicitly defined).
|
||||
if self.db_field is not None and not isinstance(
|
||||
self.db_field, six.string_types
|
||||
):
|
||||
if self.db_field is not None and not isinstance(self.db_field, str):
|
||||
raise TypeError("db_field should be a string.")
|
||||
|
||||
# Make sure db_field doesn't contain any forbidden characters.
|
||||
if isinstance(self.db_field, six.string_types) and (
|
||||
if isinstance(self.db_field, str) and (
|
||||
"." in self.db_field
|
||||
or "\0" in self.db_field
|
||||
or self.db_field.startswith("$")
|
||||
@ -129,8 +121,7 @@ class BaseField(object):
|
||||
BaseField.creation_counter += 1
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
"""Descriptor for retrieving a value from a field in a document.
|
||||
"""
|
||||
"""Descriptor for retrieving a value from a field in a document."""
|
||||
if instance is None:
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
@ -221,14 +212,12 @@ class BaseField(object):
|
||||
# Choices which are other types of Documents
|
||||
if isinstance(value, (Document, EmbeddedDocument)):
|
||||
if not any(isinstance(value, c) for c in choice_list):
|
||||
self.error(
|
||||
"Value must be an instance of %s" % (six.text_type(choice_list))
|
||||
)
|
||||
self.error("Value must be an instance of %s" % (choice_list))
|
||||
# Choices which are types other than Documents
|
||||
else:
|
||||
values = value if isinstance(value, (list, tuple)) else [value]
|
||||
if len(set(values) - set(choice_list)):
|
||||
self.error("Value must be one of %s" % six.text_type(choice_list))
|
||||
self.error("Value must be one of %s" % str(choice_list))
|
||||
|
||||
def _validate(self, value, **kwargs):
|
||||
# Check the Choices Constraint
|
||||
@ -276,11 +265,22 @@ class ComplexBaseField(BaseField):
|
||||
Allows for nesting of embedded documents inside complex types.
|
||||
Handles the lazy dereferencing of a queryset by lazily dereferencing all
|
||||
items in a list / dict rather than one at a time.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
field = None
|
||||
def __init__(self, field=None, **kwargs):
|
||||
self.field = field
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def _lazy_load_refs(instance, name, ref_values, *, max_depth):
|
||||
_dereference = _import_class("DeReference")()
|
||||
documents = _dereference(
|
||||
ref_values,
|
||||
max_depth=max_depth,
|
||||
instance=instance,
|
||||
name=name,
|
||||
)
|
||||
return documents
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
"""Descriptor to automatically dereference references."""
|
||||
@ -299,24 +299,20 @@ class ComplexBaseField(BaseField):
|
||||
or isinstance(self.field, (GenericReferenceField, ReferenceField))
|
||||
)
|
||||
|
||||
_dereference = _import_class("DeReference")()
|
||||
|
||||
if (
|
||||
instance._initialised
|
||||
and dereference
|
||||
and instance._data.get(self.name)
|
||||
and not getattr(instance._data[self.name], "_dereferenced", False)
|
||||
):
|
||||
instance._data[self.name] = _dereference(
|
||||
instance._data.get(self.name),
|
||||
max_depth=1,
|
||||
instance=instance,
|
||||
name=self.name,
|
||||
ref_values = instance._data.get(self.name)
|
||||
instance._data[self.name] = self._lazy_load_refs(
|
||||
ref_values=ref_values, instance=instance, name=self.name, max_depth=1
|
||||
)
|
||||
if hasattr(instance._data[self.name], "_dereferenced"):
|
||||
instance._data[self.name]._dereferenced = True
|
||||
|
||||
value = super(ComplexBaseField, self).__get__(instance, owner)
|
||||
value = super().__get__(instance, owner)
|
||||
|
||||
# Convert lists / values so we can watch for any changes on them
|
||||
if isinstance(value, (list, tuple)):
|
||||
@ -337,7 +333,9 @@ class ComplexBaseField(BaseField):
|
||||
and isinstance(value, (BaseList, BaseDict))
|
||||
and not value._dereferenced
|
||||
):
|
||||
value = _dereference(value, max_depth=1, instance=instance, name=self.name)
|
||||
value = self._lazy_load_refs(
|
||||
ref_values=value, instance=instance, name=self.name, max_depth=1
|
||||
)
|
||||
value._dereferenced = True
|
||||
instance._data[self.name] = value
|
||||
|
||||
@ -345,7 +343,7 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert a MongoDB-compatible type to a Python type."""
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
|
||||
if hasattr(value, "to_python"):
|
||||
@ -399,7 +397,7 @@ class ComplexBaseField(BaseField):
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
|
||||
if hasattr(value, "to_mongo"):
|
||||
@ -423,11 +421,11 @@ class ComplexBaseField(BaseField):
|
||||
if self.field:
|
||||
value_dict = {
|
||||
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
||||
for key, item in iteritems(value)
|
||||
for key, item in value.items()
|
||||
}
|
||||
else:
|
||||
value_dict = {}
|
||||
for k, v in iteritems(value):
|
||||
for k, v in value.items():
|
||||
if isinstance(v, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if v.pk is None:
|
||||
@ -466,8 +464,8 @@ class ComplexBaseField(BaseField):
|
||||
"""If field is provided ensure the value is valid."""
|
||||
errors = {}
|
||||
if self.field:
|
||||
if hasattr(value, "iteritems") or hasattr(value, "items"):
|
||||
sequence = iteritems(value)
|
||||
if hasattr(value, "items"):
|
||||
sequence = value.items()
|
||||
else:
|
||||
sequence = enumerate(value)
|
||||
for k, v in sequence:
|
||||
@ -480,7 +478,7 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
if errors:
|
||||
field_class = self.field.__class__.__name__
|
||||
self.error("Invalid %s item (%s)" % (field_class, value), errors=errors)
|
||||
self.error(f"Invalid {field_class} item ({value})", errors=errors)
|
||||
# Don't allow empty values if required
|
||||
if self.required and not value:
|
||||
self.error("Field is required and cannot be empty")
|
||||
@ -513,10 +511,9 @@ class ObjectIdField(BaseField):
|
||||
def to_mongo(self, value):
|
||||
if not isinstance(value, ObjectId):
|
||||
try:
|
||||
return ObjectId(six.text_type(value))
|
||||
return ObjectId(str(value))
|
||||
except Exception as e:
|
||||
# e.message attribute has been deprecated since Python 2.6
|
||||
self.error(six.text_type(e))
|
||||
self.error(str(e))
|
||||
return value
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
@ -524,16 +521,13 @@ class ObjectIdField(BaseField):
|
||||
|
||||
def validate(self, value):
|
||||
try:
|
||||
ObjectId(six.text_type(value))
|
||||
ObjectId(str(value))
|
||||
except Exception:
|
||||
self.error("Invalid Object ID")
|
||||
self.error("Invalid ObjectID")
|
||||
|
||||
|
||||
class GeoJsonBaseField(BaseField):
|
||||
"""A geo json field storing a geojson style object.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
"""A geo json field storing a geojson style object."""
|
||||
|
||||
_geo_index = pymongo.GEOSPHERE
|
||||
_type = "GeoBase"
|
||||
@ -546,14 +540,14 @@ class GeoJsonBaseField(BaseField):
|
||||
self._name = "%sField" % self._type
|
||||
if not auto_index:
|
||||
self._geo_index = False
|
||||
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def validate(self, value):
|
||||
"""Validate the GeoJson object based on its type."""
|
||||
if isinstance(value, dict):
|
||||
if set(value.keys()) == {"type", "coordinates"}:
|
||||
if value["type"] != self._type:
|
||||
self.error('%s type must be "%s"' % (self._name, self._type))
|
||||
self.error(f'{self._name} type must be "{self._type}"')
|
||||
return self.validate(value["coordinates"])
|
||||
else:
|
||||
self.error(
|
||||
|
@ -1,11 +1,12 @@
|
||||
import itertools
|
||||
import warnings
|
||||
|
||||
import six
|
||||
from six import iteritems, itervalues
|
||||
|
||||
from mongoengine.base.common import _document_registry
|
||||
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||
from mongoengine.base.fields import (
|
||||
BaseField,
|
||||
ComplexBaseField,
|
||||
ObjectIdField,
|
||||
)
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import InvalidDocumentError
|
||||
from mongoengine.queryset import (
|
||||
@ -15,7 +16,6 @@ from mongoengine.queryset import (
|
||||
QuerySetManager,
|
||||
)
|
||||
|
||||
|
||||
__all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass")
|
||||
|
||||
|
||||
@ -25,7 +25,7 @@ class DocumentMetaclass(type):
|
||||
# TODO lower complexity of this method
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
flattened_bases = mcs._get_bases(bases)
|
||||
super_new = super(DocumentMetaclass, mcs).__new__
|
||||
super_new = super().__new__
|
||||
|
||||
# If a base class just call super
|
||||
metaclass = attrs.get("my_metaclass")
|
||||
@ -69,7 +69,7 @@ class DocumentMetaclass(type):
|
||||
# Standard object mixin - merge in any Fields
|
||||
if not hasattr(base, "_meta"):
|
||||
base_fields = {}
|
||||
for attr_name, attr_value in iteritems(base.__dict__):
|
||||
for attr_name, attr_value in base.__dict__.items():
|
||||
if not isinstance(attr_value, BaseField):
|
||||
continue
|
||||
attr_value.name = attr_name
|
||||
@ -81,7 +81,7 @@ class DocumentMetaclass(type):
|
||||
|
||||
# Discover any document fields
|
||||
field_names = {}
|
||||
for attr_name, attr_value in iteritems(attrs):
|
||||
for attr_name, attr_value in attrs.items():
|
||||
if not isinstance(attr_value, BaseField):
|
||||
continue
|
||||
attr_value.name = attr_name
|
||||
@ -111,9 +111,7 @@ class DocumentMetaclass(type):
|
||||
|
||||
attrs["_fields_ordered"] = tuple(
|
||||
i[1]
|
||||
for i in sorted(
|
||||
(v.creation_counter, v.name) for v in itervalues(doc_fields)
|
||||
)
|
||||
for i in sorted((v.creation_counter, v.name) for v in doc_fields.values())
|
||||
)
|
||||
|
||||
#
|
||||
@ -173,24 +171,8 @@ class DocumentMetaclass(type):
|
||||
# Add class to the _document_registry
|
||||
_document_registry[new_class._class_name] = new_class
|
||||
|
||||
# In Python 2, User-defined methods objects have special read-only
|
||||
# attributes 'im_func' and 'im_self' which contain the function obj
|
||||
# and class instance object respectively. With Python 3 these special
|
||||
# attributes have been replaced by __func__ and __self__. The Blinker
|
||||
# module continues to use im_func and im_self, so the code below
|
||||
# copies __func__ into im_func and __self__ into im_self for
|
||||
# classmethod objects in Document derived classes.
|
||||
if six.PY3:
|
||||
for val in new_class.__dict__.values():
|
||||
if isinstance(val, classmethod):
|
||||
f = val.__get__(new_class)
|
||||
if hasattr(f, "__func__") and not hasattr(f, "im_func"):
|
||||
f.__dict__.update({"im_func": getattr(f, "__func__")})
|
||||
if hasattr(f, "__self__") and not hasattr(f, "im_self"):
|
||||
f.__dict__.update({"im_self": getattr(f, "__self__")})
|
||||
|
||||
# Handle delete rules
|
||||
for field in itervalues(new_class._fields):
|
||||
for field in new_class._fields.values():
|
||||
f = field
|
||||
if f.owner_document is None:
|
||||
f.owner_document = new_class
|
||||
@ -252,8 +234,7 @@ class DocumentMetaclass(type):
|
||||
if base is object:
|
||||
continue
|
||||
yield base
|
||||
for child_base in mcs.__get_bases(base.__bases__):
|
||||
yield child_base
|
||||
yield from mcs.__get_bases(base.__bases__)
|
||||
|
||||
@classmethod
|
||||
def _import_classes(mcs):
|
||||
@ -271,7 +252,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
flattened_bases = mcs._get_bases(bases)
|
||||
super_new = super(TopLevelDocumentMetaclass, mcs).__new__
|
||||
super_new = super().__new__
|
||||
|
||||
# Set default _meta data if base class, otherwise get user defined meta
|
||||
if attrs.get("my_metaclass") == TopLevelDocumentMetaclass:
|
||||
@ -284,7 +265,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
"indexes": [], # indexes to be ensured at runtime
|
||||
"id_field": None,
|
||||
"index_background": False,
|
||||
"index_drop_dups": False,
|
||||
"index_opts": None,
|
||||
"delete_rules": None,
|
||||
# allow_inheritance can be True, False, and None. True means
|
||||
@ -360,7 +340,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
# allow_inheritance to False. If the base Document allows inheritance,
|
||||
# none of its subclasses can override allow_inheritance to False.
|
||||
simple_class = all(
|
||||
[b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")]
|
||||
b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")
|
||||
)
|
||||
if (
|
||||
not simple_class
|
||||
@ -399,7 +379,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
new_class.objects = QuerySetManager()
|
||||
|
||||
# Validate the fields and set primary key if needed
|
||||
for field_name, field in iteritems(new_class._fields):
|
||||
for field_name, field in new_class._fields.items():
|
||||
if field.primary_key:
|
||||
# Ensure only one primary key is set
|
||||
current_pk = new_class._meta.get("id_field")
|
||||
@ -462,8 +442,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0)
|
||||
for i in itertools.count():
|
||||
id_name = "{0}_{1}".format(id_basename, i)
|
||||
id_db_name = "{0}_{1}".format(id_db_basename, i)
|
||||
id_name = f"{id_basename}_{i}"
|
||||
id_db_name = f"{id_db_basename}_{i}"
|
||||
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
||||
return id_name, id_db_name
|
||||
|
||||
@ -476,7 +456,7 @@ class MetaDict(dict):
|
||||
_merge_options = ("indexes",)
|
||||
|
||||
def merge(self, new_options):
|
||||
for k, v in iteritems(new_options):
|
||||
for k, v in new_options.items():
|
||||
if k in self._merge_options:
|
||||
self[k] = self.get(k, []) + v
|
||||
else:
|
||||
|
@ -1,7 +1,7 @@
|
||||
import re
|
||||
|
||||
|
||||
class LazyRegexCompiler(object):
|
||||
class LazyRegexCompiler:
|
||||
"""Descriptor to allow lazy compilation of regex"""
|
||||
|
||||
def __init__(self, pattern, flags=0):
|
||||
|
@ -1,6 +1,5 @@
|
||||
from pymongo import MongoClient, ReadPreference, uri_parser
|
||||
from pymongo.database import _check_name
|
||||
import six
|
||||
|
||||
__all__ = [
|
||||
"DEFAULT_CONNECTION_NAME",
|
||||
@ -39,8 +38,8 @@ def _check_db_name(name):
|
||||
"""Check if a database name is valid.
|
||||
This functionality is copied from pymongo Database class constructor.
|
||||
"""
|
||||
if not isinstance(name, six.string_types):
|
||||
raise TypeError("name must be an instance of %s" % six.string_types)
|
||||
if not isinstance(name, str):
|
||||
raise TypeError("name must be an instance of %s" % str)
|
||||
elif name != "$external":
|
||||
_check_name(name)
|
||||
|
||||
@ -55,28 +54,26 @@ def _get_connection_settings(
|
||||
password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""Get the connection settings as a dict
|
||||
|
||||
: param db: the name of the database to use, for compatibility with connect
|
||||
: param name: the name of the specific database to use
|
||||
: param host: the host name of the: program: `mongod` instance to connect to
|
||||
: param port: the port that the: program: `mongod` instance is running on
|
||||
: param read_preference: The read preference for the collection
|
||||
: param username: username to authenticate with
|
||||
: param password: password to authenticate with
|
||||
: param authentication_source: database to authenticate against
|
||||
: param authentication_mechanism: database authentication mechanisms.
|
||||
:param db: the name of the database to use, for compatibility with connect
|
||||
:param name: the name of the specific database to use
|
||||
:param host: the host name of the: program: `mongod` instance to connect to
|
||||
:param port: the port that the: program: `mongod` instance is running on
|
||||
:param read_preference: The read preference for the collection
|
||||
:param username: username to authenticate with
|
||||
:param password: password to authenticate with
|
||||
:param authentication_source: database to authenticate against
|
||||
:param authentication_mechanism: database authentication mechanisms.
|
||||
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
||||
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
||||
: param is_mock: explicitly use mongomock for this connection
|
||||
:param is_mock: explicitly use mongomock for this connection
|
||||
(can also be done by using `mongomock: // ` as db host prefix)
|
||||
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||
:param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||
for pymongo's `MongoClient` for a full list.
|
||||
|
||||
.. versionchanged:: 0.10.6 - added mongomock support
|
||||
"""
|
||||
conn_settings = {
|
||||
"name": name or db or DEFAULT_DATABASE_NAME,
|
||||
@ -93,7 +90,7 @@ def _get_connection_settings(
|
||||
conn_host = conn_settings["host"]
|
||||
|
||||
# Host can be a list or a string, so if string, force to a list.
|
||||
if isinstance(conn_host, six.string_types):
|
||||
if isinstance(conn_host, str):
|
||||
conn_host = [conn_host]
|
||||
|
||||
resolved_hosts = []
|
||||
@ -148,7 +145,7 @@ def _get_connection_settings(
|
||||
# TODO simplify the code below once we drop support for
|
||||
# PyMongo v3.4.
|
||||
read_pf_mode = uri_options["readpreference"]
|
||||
if isinstance(read_pf_mode, six.string_types):
|
||||
if isinstance(read_pf_mode, str):
|
||||
read_pf_mode = read_pf_mode.lower()
|
||||
for preference in read_preferences:
|
||||
if (
|
||||
@ -180,30 +177,27 @@ def register_connection(
|
||||
password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""Register the connection settings.
|
||||
|
||||
: param alias: the name that will be used to refer to this connection
|
||||
throughout MongoEngine
|
||||
: param db: the name of the database to use, for compatibility with connect
|
||||
: param name: the name of the specific database to use
|
||||
: param host: the host name of the: program: `mongod` instance to connect to
|
||||
: param port: the port that the: program: `mongod` instance is running on
|
||||
: param read_preference: The read preference for the collection
|
||||
: param username: username to authenticate with
|
||||
: param password: password to authenticate with
|
||||
: param authentication_source: database to authenticate against
|
||||
: param authentication_mechanism: database authentication mechanisms.
|
||||
:param alias: the name that will be used to refer to this connection throughout MongoEngine
|
||||
:param db: the name of the database to use, for compatibility with connect
|
||||
:param name: the name of the specific database to use
|
||||
:param host: the host name of the: program: `mongod` instance to connect to
|
||||
:param port: the port that the: program: `mongod` instance is running on
|
||||
:param read_preference: The read preference for the collection
|
||||
:param username: username to authenticate with
|
||||
:param password: password to authenticate with
|
||||
:param authentication_source: database to authenticate against
|
||||
:param authentication_mechanism: database authentication mechanisms.
|
||||
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
||||
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
||||
: param is_mock: explicitly use mongomock for this connection
|
||||
:param is_mock: explicitly use mongomock for this connection
|
||||
(can also be done by using `mongomock: // ` as db host prefix)
|
||||
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||
:param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||
for pymongo's `MongoClient` for a full list.
|
||||
|
||||
.. versionchanged:: 0.10.6 - added mongomock support
|
||||
"""
|
||||
conn_settings = _get_connection_settings(
|
||||
db=db,
|
||||
@ -215,15 +209,15 @@ def register_connection(
|
||||
password=password,
|
||||
authentication_source=authentication_source,
|
||||
authentication_mechanism=authentication_mechanism,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
_connection_settings[alias] = conn_settings
|
||||
|
||||
|
||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||
"""Close the connection with a given alias."""
|
||||
from mongoengine.base.common import _get_documents_by_db
|
||||
from mongoengine import Document
|
||||
from mongoengine.base.common import _get_documents_by_db
|
||||
|
||||
if alias in _connections:
|
||||
get_connection(alias=alias).close()
|
||||
@ -318,7 +312,7 @@ def _create_connection(alias, connection_class, **connection_settings):
|
||||
try:
|
||||
return connection_class(**connection_settings)
|
||||
except Exception as e:
|
||||
raise ConnectionFailure("Cannot connect to database %s :\n%s" % (alias, e))
|
||||
raise ConnectionFailure(f"Cannot connect to database {alias} :\n{e}")
|
||||
|
||||
|
||||
def _find_existing_connection(connection_settings):
|
||||
@ -387,8 +381,6 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
|
||||
See the docstring for `register_connection` for more details about all
|
||||
supported kwargs.
|
||||
|
||||
.. versionchanged:: 0.6 - added multiple database support.
|
||||
"""
|
||||
if alias in _connections:
|
||||
prev_conn_setting = _connection_settings[alias]
|
||||
@ -396,8 +388,8 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
|
||||
if new_conn_settings != prev_conn_setting:
|
||||
err_msg = (
|
||||
u"A different connection with alias `{}` was already "
|
||||
u"registered. Use disconnect() first"
|
||||
"A different connection with alias `{}` was already "
|
||||
"registered. Use disconnect() first"
|
||||
).format(alias)
|
||||
raise ConnectionFailure(err_msg)
|
||||
else:
|
||||
|
@ -1,7 +1,7 @@
|
||||
from contextlib import contextmanager
|
||||
|
||||
from pymongo.read_concern import ReadConcern
|
||||
from pymongo.write_concern import WriteConcern
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
@ -14,10 +14,11 @@ __all__ = (
|
||||
"no_sub_classes",
|
||||
"query_counter",
|
||||
"set_write_concern",
|
||||
"set_read_write_concern",
|
||||
)
|
||||
|
||||
|
||||
class switch_db(object):
|
||||
class switch_db:
|
||||
"""switch_db alias context manager.
|
||||
|
||||
Example ::
|
||||
@ -58,7 +59,7 @@ class switch_db(object):
|
||||
self.cls._collection = self.collection
|
||||
|
||||
|
||||
class switch_collection(object):
|
||||
class switch_collection:
|
||||
"""switch_collection alias context manager.
|
||||
|
||||
Example ::
|
||||
@ -100,7 +101,7 @@ class switch_collection(object):
|
||||
self.cls._get_collection_name = self.ori_get_collection_name
|
||||
|
||||
|
||||
class no_dereference(object):
|
||||
class no_dereference:
|
||||
"""no_dereference context manager.
|
||||
|
||||
Turns off all dereferencing in Documents for the duration of the context
|
||||
@ -123,7 +124,7 @@ class no_dereference(object):
|
||||
|
||||
self.deref_fields = [
|
||||
k
|
||||
for k, v in iteritems(self.cls._fields)
|
||||
for k, v in self.cls._fields.items()
|
||||
if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField))
|
||||
]
|
||||
|
||||
@ -140,7 +141,7 @@ class no_dereference(object):
|
||||
return self.cls
|
||||
|
||||
|
||||
class no_sub_classes(object):
|
||||
class no_sub_classes:
|
||||
"""no_sub_classes context manager.
|
||||
|
||||
Only returns instances of this class and no sub (inherited) classes::
|
||||
@ -168,24 +169,37 @@ class no_sub_classes(object):
|
||||
self.cls._subclasses = self.cls_initial_subclasses
|
||||
|
||||
|
||||
class query_counter(object):
|
||||
class query_counter:
|
||||
"""Query_counter context manager to get the number of queries.
|
||||
This works by updating the `profiling_level` of the database so that all queries get logged,
|
||||
resetting the db.system.profile collection at the beginnig of the context and counting the new entries.
|
||||
resetting the db.system.profile collection at the beginning of the context and counting the new entries.
|
||||
|
||||
This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes
|
||||
can interfere with it
|
||||
|
||||
Usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
with query_counter() as q:
|
||||
user = User(name='Bob')
|
||||
assert q == 0 # no query fired yet
|
||||
user.save()
|
||||
assert q == 1 # 1 query was fired, an 'insert'
|
||||
user_bis = User.objects().first()
|
||||
assert q == 2 # a 2nd query was fired, a 'find_one'
|
||||
|
||||
Be aware that:
|
||||
- Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of
|
||||
documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches)
|
||||
|
||||
- Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches)
|
||||
- Some queries are ignored by default by the counter (killcursors, db.system.indexes)
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Construct the query_counter
|
||||
"""
|
||||
self.db = get_db()
|
||||
def __init__(self, alias=DEFAULT_CONNECTION_NAME):
|
||||
self.db = get_db(alias=alias)
|
||||
self.initial_profiling_level = None
|
||||
self._ctx_query_counter = 0 # number of queries issued by the context
|
||||
|
||||
@ -235,7 +249,7 @@ class query_counter(object):
|
||||
|
||||
def __repr__(self):
|
||||
"""repr query_counter as the number of queries."""
|
||||
return u"%s" % self._get_count()
|
||||
return "%s" % self._get_count()
|
||||
|
||||
def _get_count(self):
|
||||
"""Get the number of queries by counting the current number of entries in db.system.profile
|
||||
@ -247,8 +261,8 @@ class query_counter(object):
|
||||
- self._ctx_query_counter
|
||||
)
|
||||
self._ctx_query_counter += (
|
||||
1
|
||||
) # Account for the query we just issued to gather the information
|
||||
1 # Account for the query we just issued to gather the information
|
||||
)
|
||||
return count
|
||||
|
||||
|
||||
@ -257,3 +271,21 @@ def set_write_concern(collection, write_concerns):
|
||||
combined_concerns = dict(collection.write_concern.document.items())
|
||||
combined_concerns.update(write_concerns)
|
||||
yield collection.with_options(write_concern=WriteConcern(**combined_concerns))
|
||||
|
||||
|
||||
@contextmanager
|
||||
def set_read_write_concern(collection, write_concerns, read_concerns):
|
||||
combined_write_concerns = dict(collection.write_concern.document.items())
|
||||
|
||||
if write_concerns is not None:
|
||||
combined_write_concerns.update(write_concerns)
|
||||
|
||||
combined_read_concerns = dict(collection.read_concern.document.items())
|
||||
|
||||
if read_concerns is not None:
|
||||
combined_read_concerns.update(read_concerns)
|
||||
|
||||
yield collection.with_options(
|
||||
write_concern=WriteConcern(**combined_write_concerns),
|
||||
read_concern=ReadConcern(**combined_read_concerns),
|
||||
)
|
||||
|
@ -1,6 +1,4 @@
|
||||
from bson import DBRef, SON
|
||||
import six
|
||||
from six import iteritems
|
||||
from bson import SON, DBRef
|
||||
|
||||
from mongoengine.base import (
|
||||
BaseDict,
|
||||
@ -12,11 +10,16 @@ from mongoengine.base import (
|
||||
from mongoengine.base.datastructures import LazyReference
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.document import Document, EmbeddedDocument
|
||||
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
|
||||
from mongoengine.fields import (
|
||||
DictField,
|
||||
ListField,
|
||||
MapField,
|
||||
ReferenceField,
|
||||
)
|
||||
from mongoengine.queryset import QuerySet
|
||||
|
||||
|
||||
class DeReference(object):
|
||||
class DeReference:
|
||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||
"""
|
||||
Cheaply dereferences the items to a set depth.
|
||||
@ -30,7 +33,7 @@ class DeReference(object):
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param get: A boolean determining if being called by __get__
|
||||
"""
|
||||
if items is None or isinstance(items, six.string_types):
|
||||
if items is None or isinstance(items, str):
|
||||
return items
|
||||
|
||||
# cheapest way to convert a queryset to a list
|
||||
@ -53,10 +56,10 @@ class DeReference(object):
|
||||
doc_type = doc_type.document_type
|
||||
is_list = not hasattr(items, "items")
|
||||
|
||||
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||
if is_list and all(i.__class__ == doc_type for i in items):
|
||||
return items
|
||||
elif not is_list and all(
|
||||
[i.__class__ == doc_type for i in items.values()]
|
||||
i.__class__ == doc_type for i in items.values()
|
||||
):
|
||||
return items
|
||||
elif not field.dbref:
|
||||
@ -79,7 +82,7 @@ class DeReference(object):
|
||||
|
||||
def _get_items_from_dict(items):
|
||||
new_items = {}
|
||||
for k, v in iteritems(items):
|
||||
for k, v in items.items():
|
||||
value = v
|
||||
if isinstance(v, list):
|
||||
value = _get_items_from_list(v)
|
||||
@ -120,7 +123,7 @@ class DeReference(object):
|
||||
depth += 1
|
||||
for item in iterator:
|
||||
if isinstance(item, (Document, EmbeddedDocument)):
|
||||
for field_name, field in iteritems(item._fields):
|
||||
for field_name, field in item._fields.items():
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, LazyReference):
|
||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
||||
@ -136,7 +139,7 @@ class DeReference(object):
|
||||
getattr(field, "field", None), "document_type", None
|
||||
)
|
||||
references = self._find_references(v, depth)
|
||||
for key, refs in iteritems(references):
|
||||
for key, refs in references.items():
|
||||
if isinstance(
|
||||
field_cls, (Document, TopLevelDocumentMetaclass)
|
||||
):
|
||||
@ -153,16 +156,15 @@ class DeReference(object):
|
||||
)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||
references = self._find_references(item, depth - 1)
|
||||
for key, refs in iteritems(references):
|
||||
for key, refs in references.items():
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
|
||||
return reference_map
|
||||
|
||||
def _fetch_objects(self, doc_type=None):
|
||||
"""Fetch all references and convert to their document objects
|
||||
"""
|
||||
"""Fetch all references and convert to their document objects"""
|
||||
object_map = {}
|
||||
for collection, dbrefs in iteritems(self.reference_map):
|
||||
for collection, dbrefs in self.reference_map.items():
|
||||
|
||||
# we use getattr instead of hasattr because hasattr swallows any exception under python2
|
||||
# so it could hide nasty things without raising exceptions (cfr bug #1688))
|
||||
@ -174,7 +176,7 @@ class DeReference(object):
|
||||
dbref for dbref in dbrefs if (col_name, dbref) not in object_map
|
||||
]
|
||||
references = collection.objects.in_bulk(refs)
|
||||
for key, doc in iteritems(references):
|
||||
for key, doc in references.items():
|
||||
object_map[(col_name, key)] = doc
|
||||
else: # Generic reference: use the refs data to convert to document
|
||||
if isinstance(doc_type, (ListField, DictField, MapField)):
|
||||
@ -250,7 +252,7 @@ class DeReference(object):
|
||||
data = []
|
||||
else:
|
||||
is_list = False
|
||||
iterator = iteritems(items)
|
||||
iterator = items.items()
|
||||
data = {}
|
||||
|
||||
depth += 1
|
||||
@ -274,14 +276,12 @@ class DeReference(object):
|
||||
(v["_ref"].collection, v["_ref"].id), v
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = six.text_type("{0}.{1}.{2}").format(
|
||||
name, k, field_name
|
||||
)
|
||||
item_name = f"{name}.{k}.{field_name}"
|
||||
data[k]._data[field_name] = self._attach_objects(
|
||||
v, depth, instance=instance, name=item_name
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = "%s.%s" % (name, k) if name else name
|
||||
item_name = f"{name}.{k}" if name else name
|
||||
data[k] = self._attach_objects(
|
||||
v, depth - 1, instance=instance, name=item_name
|
||||
)
|
||||
|
@ -1,11 +1,8 @@
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from bson.dbref import DBRef
|
||||
import pymongo
|
||||
from bson.dbref import DBRef
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.base import (
|
||||
@ -19,14 +16,23 @@ from mongoengine.base import (
|
||||
)
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
from mongoengine.context_managers import set_write_concern, switch_collection, switch_db
|
||||
from mongoengine.context_managers import (
|
||||
set_write_concern,
|
||||
switch_collection,
|
||||
switch_db,
|
||||
)
|
||||
from mongoengine.errors import (
|
||||
InvalidDocumentError,
|
||||
InvalidQueryError,
|
||||
SaveConditionError,
|
||||
)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from mongoengine.queryset import NotUniqueError, OperationError, QuerySet, transform
|
||||
from mongoengine.queryset import (
|
||||
NotUniqueError,
|
||||
OperationError,
|
||||
QuerySet,
|
||||
transform,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"Document",
|
||||
@ -44,7 +50,7 @@ def includes_cls(fields):
|
||||
"""Helper function used for ensuring and comparing indexes."""
|
||||
first_field = None
|
||||
if len(fields):
|
||||
if isinstance(fields[0], six.string_types):
|
||||
if isinstance(fields[0], str):
|
||||
first_field = fields[0]
|
||||
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||
first_field = fields[0][0]
|
||||
@ -55,8 +61,8 @@ class InvalidCollectionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||
"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||
class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass):
|
||||
r"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
||||
fields on :class:`~mongoengine.Document`\ s through the
|
||||
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
||||
@ -71,7 +77,6 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||
|
||||
__slots__ = ("_instance",)
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = DocumentMetaclass
|
||||
|
||||
@ -82,7 +87,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||
__hash__ = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(EmbeddedDocument, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self._instance = None
|
||||
self._changed_fields = []
|
||||
|
||||
@ -94,8 +99,17 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __getstate__(self):
|
||||
data = super().__getstate__()
|
||||
data["_instance"] = None
|
||||
return data
|
||||
|
||||
def __setstate__(self, state):
|
||||
super().__setstate__(state)
|
||||
self._instance = state["_instance"]
|
||||
|
||||
def to_mongo(self, *args, **kwargs):
|
||||
data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs)
|
||||
data = super().to_mongo(*args, **kwargs)
|
||||
|
||||
# remove _id from the SON if it's in it and it's None
|
||||
if "_id" in data and data["_id"] is None:
|
||||
@ -104,7 +118,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||
return data
|
||||
|
||||
|
||||
class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
||||
"""The base class used for defining the structure and properties of
|
||||
collections of documents stored in MongoDB. Inherit from this class, and
|
||||
add fields as class attributes to define a document's structure.
|
||||
@ -113,7 +127,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
By default, the MongoDB collection used to store documents created using a
|
||||
:class:`~mongoengine.Document` subclass will be the name of the subclass
|
||||
converted to lowercase. A different collection may be specified by
|
||||
converted to snake_case. A different collection may be specified by
|
||||
providing :attr:`collection` to the :attr:`meta` dictionary in the class
|
||||
definition.
|
||||
|
||||
@ -121,7 +135,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
create a specialised version of the document that will be stored in the
|
||||
same collection. To facilitate this behaviour a `_cls`
|
||||
field is added to documents (hidden though the MongoEngine interface).
|
||||
To enable this behaviourset :attr:`allow_inheritance` to ``True`` in the
|
||||
To enable this behaviour set :attr:`allow_inheritance` to ``True`` in the
|
||||
:attr:`meta` dictionary.
|
||||
|
||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
||||
@ -156,7 +170,6 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
in the :attr:`meta` dictionary.
|
||||
"""
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
|
||||
@ -260,7 +273,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
return db.create_collection(collection_name, **opts)
|
||||
|
||||
def to_mongo(self, *args, **kwargs):
|
||||
data = super(Document, self).to_mongo(*args, **kwargs)
|
||||
data = super().to_mongo(*args, **kwargs)
|
||||
|
||||
# If '_id' is None, try and set it from self._data. If that
|
||||
# doesn't exist either, remove '_id' from the SON completely.
|
||||
@ -328,11 +341,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
_refs=None,
|
||||
save_condition=None,
|
||||
signal_kwargs=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||
document already exists, it will be updated, otherwise it will be
|
||||
created.
|
||||
created. Returns the saved object instance.
|
||||
|
||||
:param force_insert: only try to create a new document, don't allow
|
||||
updates of existing documents.
|
||||
@ -371,15 +384,6 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
meta['cascade'] = True. Also you can pass different kwargs to
|
||||
the cascade save using cascade_kwargs which overwrites the
|
||||
existing kwargs with custom values.
|
||||
.. versionchanged:: 0.8.5
|
||||
Optional save_condition that only overwrites existing documents
|
||||
if the condition is satisfied in the current db record.
|
||||
.. versionchanged:: 0.10
|
||||
:class:`OperationError` exception raised if save_condition fails.
|
||||
.. versionchanged:: 0.10.1
|
||||
:class: save_condition failure now raises a `SaveConditionError`
|
||||
.. versionchanged:: 0.10.7
|
||||
Add signal_kwargs argument
|
||||
"""
|
||||
signal_kwargs = signal_kwargs or {}
|
||||
|
||||
@ -431,16 +435,16 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
self.cascade_save(**kwargs)
|
||||
|
||||
except pymongo.errors.DuplicateKeyError as err:
|
||||
message = u"Tried to save duplicate unique keys (%s)"
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
message = "Tried to save duplicate unique keys (%s)"
|
||||
raise NotUniqueError(message % err)
|
||||
except pymongo.errors.OperationFailure as err:
|
||||
message = "Could not save document (%s)"
|
||||
if re.match("^E1100[01] duplicate key", six.text_type(err)):
|
||||
if re.match("^E1100[01] duplicate key", str(err)):
|
||||
# E11000 - duplicate key error index
|
||||
# E11001 - duplicate key on update
|
||||
message = u"Tried to save duplicate unique keys (%s)"
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
raise OperationError(message % six.text_type(err))
|
||||
message = "Tried to save duplicate unique keys (%s)"
|
||||
raise NotUniqueError(message % err)
|
||||
raise OperationError(message % err)
|
||||
|
||||
# Make sure we store the PK on this document now that it's saved
|
||||
id_field = self._meta["id_field"]
|
||||
@ -468,9 +472,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
# insert_one will provoke UniqueError alongside save does not
|
||||
# therefore, it need to catch and call replace_one.
|
||||
if "_id" in doc:
|
||||
raw_object = wc_collection.find_one_and_replace(
|
||||
{"_id": doc["_id"]}, doc
|
||||
)
|
||||
select_dict = {"_id": doc["_id"]}
|
||||
select_dict = self._integrate_shard_key(doc, select_dict)
|
||||
raw_object = wc_collection.find_one_and_replace(select_dict, doc)
|
||||
if raw_object:
|
||||
return doc["_id"]
|
||||
|
||||
@ -493,6 +497,23 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
return update_doc
|
||||
|
||||
def _integrate_shard_key(self, doc, select_dict):
|
||||
"""Integrates the collection's shard key to the `select_dict`, which will be used for the query.
|
||||
The value from the shard key is taken from the `doc` and finally the select_dict is returned.
|
||||
"""
|
||||
|
||||
# Need to add shard key to query, or you get an error
|
||||
shard_key = self._meta.get("shard_key", tuple())
|
||||
for k in shard_key:
|
||||
path = self._lookup_field(k.split("."))
|
||||
actual_key = [p.db_field for p in path]
|
||||
val = doc
|
||||
for ak in actual_key:
|
||||
val = val[ak]
|
||||
select_dict[".".join(actual_key)] = val
|
||||
|
||||
return select_dict
|
||||
|
||||
def _save_update(self, doc, save_condition, write_concern):
|
||||
"""Update an existing document.
|
||||
|
||||
@ -508,15 +529,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
select_dict["_id"] = object_id
|
||||
|
||||
# Need to add shard key to query, or you get an error
|
||||
shard_key = self._meta.get("shard_key", tuple())
|
||||
for k in shard_key:
|
||||
path = self._lookup_field(k.split("."))
|
||||
actual_key = [p.db_field for p in path]
|
||||
val = doc
|
||||
for ak in actual_key:
|
||||
val = val[ak]
|
||||
select_dict[".".join(actual_key)] = val
|
||||
select_dict = self._integrate_shard_key(doc, select_dict)
|
||||
|
||||
update_doc = self._get_update_doc()
|
||||
if update_doc:
|
||||
@ -559,7 +572,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
if not getattr(ref, "_changed_fields", True):
|
||||
continue
|
||||
|
||||
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
||||
ref_id = f"{ref.__class__.__name__},{str(ref._data)}"
|
||||
if ref and ref_id not in _refs:
|
||||
_refs.append(ref_id)
|
||||
kwargs["_refs"] = _refs
|
||||
@ -570,7 +583,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
def _qs(self):
|
||||
"""Return the default queryset corresponding to this document."""
|
||||
if not hasattr(self, "__objects"):
|
||||
self.__objects = QuerySet(self, self._get_collection())
|
||||
self.__objects = QuerySet(self.__class__, self._get_collection())
|
||||
return self.__objects
|
||||
|
||||
@property
|
||||
@ -625,16 +638,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
For example, ``save(..., w: 2, fsync: True)`` will
|
||||
wait until at least two servers have recorded the write and
|
||||
will force an fsync on the primary server.
|
||||
|
||||
.. versionchanged:: 0.10.7
|
||||
Add signal_kwargs argument
|
||||
"""
|
||||
signal_kwargs = signal_kwargs or {}
|
||||
signals.pre_delete.send(self.__class__, document=self, **signal_kwargs)
|
||||
|
||||
# Delete FileFields separately
|
||||
FileField = _import_class("FileField")
|
||||
for name, field in iteritems(self._fields):
|
||||
for name, field in self._fields.items():
|
||||
if isinstance(field, FileField):
|
||||
getattr(self, name).delete()
|
||||
|
||||
@ -643,7 +653,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
write_concern=write_concern, _from_doc_delete=True
|
||||
)
|
||||
except pymongo.errors.OperationFailure as err:
|
||||
message = u"Could not delete document (%s)" % err.message
|
||||
message = "Could not delete document (%s)" % err.args
|
||||
raise OperationError(message)
|
||||
signals.post_delete.send(self.__class__, document=self, **signal_kwargs)
|
||||
|
||||
@ -709,8 +719,6 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
def select_related(self, max_depth=1):
|
||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
||||
a maximum depth in order to cut down the number queries to mongodb.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
DeReference = _import_class("DeReference")
|
||||
DeReference()([self], max_depth + 1)
|
||||
@ -721,10 +729,6 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
:param fields: (optional) args list of fields to reload
|
||||
:param max_depth: (optional) depth of dereferencing to follow
|
||||
|
||||
.. versionadded:: 0.1.2
|
||||
.. versionchanged:: 0.6 Now chainable
|
||||
.. versionchanged:: 0.9 Can provide specific fields to reload
|
||||
"""
|
||||
max_depth = 1
|
||||
if fields and isinstance(fields[0], int):
|
||||
@ -826,9 +830,6 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
Raises :class:`OperationError` if the document has no collection set
|
||||
(i.g. if it is `abstract`)
|
||||
|
||||
.. versionchanged:: 0.10.7
|
||||
:class:`OperationError` exception raised if no collection available
|
||||
"""
|
||||
coll_name = cls._get_collection_name()
|
||||
if not coll_name:
|
||||
@ -851,17 +852,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
index_spec = cls._build_index_spec(keys)
|
||||
index_spec = index_spec.copy()
|
||||
fields = index_spec.pop("fields")
|
||||
drop_dups = kwargs.get("drop_dups", False)
|
||||
if drop_dups:
|
||||
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
index_spec["background"] = background
|
||||
index_spec.update(kwargs)
|
||||
|
||||
return cls._get_collection().create_index(fields, **index_spec)
|
||||
|
||||
@classmethod
|
||||
def ensure_index(cls, key_or_list, drop_dups=False, background=False, **kwargs):
|
||||
def ensure_index(cls, key_or_list, background=False, **kwargs):
|
||||
"""Ensure that the given indexes are in place. Deprecated in favour
|
||||
of create_index.
|
||||
|
||||
@ -869,12 +866,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
construct a multi-field index); keys may be prefixed with a **+**
|
||||
or a **-** to determine the index ordering
|
||||
:param background: Allows index creation in the background
|
||||
:param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value
|
||||
will be removed if PyMongo3+ is used
|
||||
"""
|
||||
if drop_dups:
|
||||
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
return cls.create_index(key_or_list, background=background, **kwargs)
|
||||
|
||||
@classmethod
|
||||
@ -883,16 +875,16 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
Global defaults can be set in the meta - see :doc:`guide/defining-documents`
|
||||
|
||||
By default, this will get called automatically upon first interaction with the
|
||||
Document collection (query, save, etc) so unless you disabled `auto_create_index`, you
|
||||
shouldn't have to call this manually.
|
||||
|
||||
.. note:: You can disable automatic index creation by setting
|
||||
`auto_create_index` to False in the documents meta data
|
||||
"""
|
||||
background = cls._meta.get("index_background", False)
|
||||
drop_dups = cls._meta.get("index_drop_dups", False)
|
||||
index_opts = cls._meta.get("index_opts") or {}
|
||||
index_cls = cls._meta.get("index_cls", True)
|
||||
if drop_dups:
|
||||
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
|
||||
collection = cls._get_collection()
|
||||
# 746: when connection is via mongos, the read preference is not necessarily an indication that
|
||||
@ -936,8 +928,10 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
@classmethod
|
||||
def list_indexes(cls):
|
||||
""" Lists all of the indexes that should be created for given
|
||||
collection. It includes all the indexes from super- and sub-classes.
|
||||
"""Lists all indexes that should be created for the Document collection.
|
||||
It includes all the indexes from super- and sub-classes.
|
||||
|
||||
Note that it will only return the indexes' fields, not the indexes' options
|
||||
"""
|
||||
if cls._meta.get("abstract"):
|
||||
return []
|
||||
@ -992,16 +986,16 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
indexes.append(index)
|
||||
|
||||
# finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed
|
||||
if [(u"_id", 1)] not in indexes:
|
||||
indexes.append([(u"_id", 1)])
|
||||
if [("_id", 1)] not in indexes:
|
||||
indexes.append([("_id", 1)])
|
||||
if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"):
|
||||
indexes.append([(u"_cls", 1)])
|
||||
indexes.append([("_cls", 1)])
|
||||
|
||||
return indexes
|
||||
|
||||
@classmethod
|
||||
def compare_indexes(cls):
|
||||
""" Compares the indexes defined in MongoEngine with the ones
|
||||
"""Compares the indexes defined in MongoEngine with the ones
|
||||
existing in the database. Returns any missing/extra indexes.
|
||||
"""
|
||||
|
||||
@ -1019,19 +1013,19 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
extra = [index for index in existing if index not in required]
|
||||
|
||||
# if { _cls: 1 } is missing, make sure it's *really* necessary
|
||||
if [(u"_cls", 1)] in missing:
|
||||
if [("_cls", 1)] in missing:
|
||||
cls_obsolete = False
|
||||
for index in existing:
|
||||
if includes_cls(index) and index not in extra:
|
||||
cls_obsolete = True
|
||||
break
|
||||
if cls_obsolete:
|
||||
missing.remove([(u"_cls", 1)])
|
||||
missing.remove([("_cls", 1)])
|
||||
|
||||
return {"missing": missing, "extra": extra}
|
||||
|
||||
|
||||
class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)):
|
||||
class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass):
|
||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||
way as an ordinary document but has expanded style properties. Any data
|
||||
@ -1045,7 +1039,6 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)):
|
||||
There is one caveat on Dynamic Documents: undeclared fields cannot start with `_`
|
||||
"""
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
|
||||
@ -1060,16 +1053,15 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)):
|
||||
setattr(self, field_name, None)
|
||||
self._dynamic_fields[field_name].null = False
|
||||
else:
|
||||
super(DynamicDocument, self).__delattr__(*args, **kwargs)
|
||||
super().__delattr__(*args, **kwargs)
|
||||
|
||||
|
||||
class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)):
|
||||
class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass):
|
||||
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
||||
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
||||
information about dynamic documents.
|
||||
"""
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = DocumentMetaclass
|
||||
|
||||
@ -1089,7 +1081,7 @@ class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocu
|
||||
setattr(self, field_name, None)
|
||||
|
||||
|
||||
class MapReduceDocument(object):
|
||||
class MapReduceDocument:
|
||||
"""A document returned from a map/reduce query.
|
||||
|
||||
:param collection: An instance of :class:`~pymongo.Collection`
|
||||
@ -1098,8 +1090,6 @@ class MapReduceDocument(object):
|
||||
an ``ObjectId`` found in the given ``collection``,
|
||||
the object can be accessed via the ``object`` property.
|
||||
:param value: The result(s) for this key.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
|
||||
def __init__(self, document, collection, key, value):
|
||||
|
@ -1,8 +1,5 @@
|
||||
from collections import defaultdict
|
||||
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
__all__ = (
|
||||
"NotRegistered",
|
||||
"InvalidDocumentError",
|
||||
@ -20,11 +17,15 @@ __all__ = (
|
||||
)
|
||||
|
||||
|
||||
class NotRegistered(Exception):
|
||||
class MongoEngineException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidDocumentError(Exception):
|
||||
class NotRegistered(MongoEngineException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidDocumentError(MongoEngineException):
|
||||
pass
|
||||
|
||||
|
||||
@ -32,19 +33,19 @@ class LookUpError(AttributeError):
|
||||
pass
|
||||
|
||||
|
||||
class DoesNotExist(Exception):
|
||||
class DoesNotExist(MongoEngineException):
|
||||
pass
|
||||
|
||||
|
||||
class MultipleObjectsReturned(Exception):
|
||||
class MultipleObjectsReturned(MongoEngineException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidQueryError(Exception):
|
||||
class InvalidQueryError(MongoEngineException):
|
||||
pass
|
||||
|
||||
|
||||
class OperationError(Exception):
|
||||
class OperationError(MongoEngineException):
|
||||
pass
|
||||
|
||||
|
||||
@ -60,7 +61,7 @@ class SaveConditionError(OperationError):
|
||||
pass
|
||||
|
||||
|
||||
class FieldDoesNotExist(Exception):
|
||||
class FieldDoesNotExist(MongoEngineException):
|
||||
"""Raised when trying to set a field
|
||||
not declared in a :class:`~mongoengine.Document`
|
||||
or an :class:`~mongoengine.EmbeddedDocument`.
|
||||
@ -87,24 +88,24 @@ class ValidationError(AssertionError):
|
||||
_message = None
|
||||
|
||||
def __init__(self, message="", **kwargs):
|
||||
super(ValidationError, self).__init__(message)
|
||||
super().__init__(message)
|
||||
self.errors = kwargs.get("errors", {})
|
||||
self.field_name = kwargs.get("field_name")
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return six.text_type(self.message)
|
||||
return str(self.message)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s,)" % (self.__class__.__name__, self.message)
|
||||
return f"{self.__class__.__name__}({self.message},)"
|
||||
|
||||
def __getattribute__(self, name):
|
||||
message = super(ValidationError, self).__getattribute__(name)
|
||||
message = super().__getattribute__(name)
|
||||
if name == "message":
|
||||
if self.field_name:
|
||||
message = "%s" % message
|
||||
if self.errors:
|
||||
message = "%s(%s)" % (message, self._format_errors())
|
||||
message = f"{message}({self._format_errors()})"
|
||||
return message
|
||||
|
||||
def _get_message(self):
|
||||
@ -126,12 +127,12 @@ class ValidationError(AssertionError):
|
||||
def build_dict(source):
|
||||
errors_dict = {}
|
||||
if isinstance(source, dict):
|
||||
for field_name, error in iteritems(source):
|
||||
for field_name, error in source.items():
|
||||
errors_dict[field_name] = build_dict(error)
|
||||
elif isinstance(source, ValidationError) and source.errors:
|
||||
return build_dict(source.errors)
|
||||
else:
|
||||
return six.text_type(source)
|
||||
return str(source)
|
||||
|
||||
return errors_dict
|
||||
|
||||
@ -147,18 +148,18 @@ class ValidationError(AssertionError):
|
||||
if isinstance(value, list):
|
||||
value = " ".join([generate_key(k) for k in value])
|
||||
elif isinstance(value, dict):
|
||||
value = " ".join([generate_key(v, k) for k, v in iteritems(value)])
|
||||
value = " ".join([generate_key(v, k) for k, v in value.items()])
|
||||
|
||||
results = "%s.%s" % (prefix, value) if prefix else value
|
||||
results = f"{prefix}.{value}" if prefix else value
|
||||
return results
|
||||
|
||||
error_dict = defaultdict(list)
|
||||
for k, v in iteritems(self.to_dict()):
|
||||
for k, v in self.to_dict().items():
|
||||
error_dict[generate_key(v)].append(k)
|
||||
return " ".join(["%s: %s" % (k, v) for k, v in iteritems(error_dict)])
|
||||
return " ".join([f"{k}: {v}" for k, v in error_dict.items()])
|
||||
|
||||
|
||||
class DeprecatedError(Exception):
|
||||
class DeprecatedError(MongoEngineException):
|
||||
"""Raise when a user uses a feature that has been Deprecated"""
|
||||
|
||||
pass
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -3,15 +3,16 @@ Helper functions, constants, and types to aid with MongoDB version support
|
||||
"""
|
||||
from mongoengine.connection import get_connection
|
||||
|
||||
|
||||
# Constant that can be used to compare the version retrieved with
|
||||
# get_mongodb_version()
|
||||
MONGODB_34 = (3, 4)
|
||||
MONGODB_36 = (3, 6)
|
||||
MONGODB_42 = (4, 2)
|
||||
MONGODB_44 = (4, 4)
|
||||
|
||||
|
||||
def get_mongodb_version():
|
||||
"""Return the version of the connected mongoDB (first 2 digits)
|
||||
"""Return the version of the default connected mongoDB (first 2 digits)
|
||||
|
||||
:return: tuple(int, int)
|
||||
"""
|
||||
|
@ -2,6 +2,7 @@
|
||||
Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support.
|
||||
"""
|
||||
import pymongo
|
||||
from pymongo.errors import OperationFailure
|
||||
|
||||
_PYMONGO_37 = (3, 7)
|
||||
|
||||
@ -10,13 +11,40 @@ PYMONGO_VERSION = tuple(pymongo.version_tuple[:2])
|
||||
IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37
|
||||
|
||||
|
||||
def count_documents(collection, filter):
|
||||
def count_documents(
|
||||
collection, filter, skip=None, limit=None, hint=None, collation=None
|
||||
):
|
||||
"""Pymongo>3.7 deprecates count in favour of count_documents"""
|
||||
if limit == 0:
|
||||
return 0 # Pymongo raises an OperationFailure if called with limit=0
|
||||
|
||||
kwargs = {}
|
||||
if skip is not None:
|
||||
kwargs["skip"] = skip
|
||||
if limit is not None:
|
||||
kwargs["limit"] = limit
|
||||
if hint not in (-1, None):
|
||||
kwargs["hint"] = hint
|
||||
if collation is not None:
|
||||
kwargs["collation"] = collation
|
||||
|
||||
# count_documents appeared in pymongo 3.7
|
||||
if IS_PYMONGO_GTE_37:
|
||||
return collection.count_documents(filter)
|
||||
else:
|
||||
count = collection.find(filter).count()
|
||||
return count
|
||||
try:
|
||||
return collection.count_documents(filter=filter, **kwargs)
|
||||
except OperationFailure:
|
||||
# OperationFailure - accounts for some operators that used to work
|
||||
# with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere)
|
||||
# fallback to deprecated Cursor.count
|
||||
# Keeping this should be reevaluated the day pymongo removes .count entirely
|
||||
pass
|
||||
|
||||
cursor = collection.find(filter)
|
||||
for option, option_value in kwargs.items():
|
||||
cursor_method = getattr(cursor, option)
|
||||
cursor = cursor_method(option_value)
|
||||
with_limit_and_skip = "skip" in kwargs or "limit" in kwargs
|
||||
return cursor.count(with_limit_and_skip=with_limit_and_skip)
|
||||
|
||||
|
||||
def list_collection_names(db, include_system_collections=False):
|
||||
|
@ -1,23 +0,0 @@
|
||||
"""
|
||||
Helper functions, constants, and types to aid with Python v2.7 - v3.x support
|
||||
"""
|
||||
import six
|
||||
|
||||
# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3.
|
||||
StringIO = six.BytesIO
|
||||
|
||||
# Additionally for Py2, try to use the faster cStringIO, if available
|
||||
if not six.PY3:
|
||||
try:
|
||||
import cStringIO
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
StringIO = cStringIO.StringIO
|
||||
|
||||
|
||||
if six.PY3:
|
||||
from collections.abc import Hashable
|
||||
else:
|
||||
# raises DeprecationWarnings in Python >=3.7
|
||||
from collections import Hashable
|
@ -1,24 +1,26 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import copy
|
||||
import itertools
|
||||
import re
|
||||
import warnings
|
||||
from collections.abc import Mapping
|
||||
|
||||
from bson import SON, json_util
|
||||
from bson.code import Code
|
||||
import pymongo
|
||||
import pymongo.errors
|
||||
from bson import SON, json_util
|
||||
from bson.code import Code
|
||||
from pymongo.collection import ReturnDocument
|
||||
from pymongo.common import validate_read_preference
|
||||
import six
|
||||
from six import iteritems
|
||||
from pymongo.read_concern import ReadConcern
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.base import get_document
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.context_managers import set_write_concern, switch_db
|
||||
from mongoengine.context_managers import (
|
||||
set_read_write_concern,
|
||||
set_write_concern,
|
||||
switch_db,
|
||||
)
|
||||
from mongoengine.errors import (
|
||||
BulkWriteError,
|
||||
InvalidQueryError,
|
||||
@ -26,11 +28,11 @@ from mongoengine.errors import (
|
||||
NotUniqueError,
|
||||
OperationError,
|
||||
)
|
||||
from mongoengine.pymongo_support import count_documents
|
||||
from mongoengine.queryset import transform
|
||||
from mongoengine.queryset.field_list import QueryFieldList
|
||||
from mongoengine.queryset.visitor import Q, QNode
|
||||
|
||||
|
||||
__all__ = ("BaseQuerySet", "DO_NOTHING", "NULLIFY", "CASCADE", "DENY", "PULL")
|
||||
|
||||
# Delete rules
|
||||
@ -41,7 +43,7 @@ DENY = 3
|
||||
PULL = 4
|
||||
|
||||
|
||||
class BaseQuerySet(object):
|
||||
class BaseQuerySet:
|
||||
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
||||
providing :class:`~mongoengine.Document` objects as the results.
|
||||
"""
|
||||
@ -60,8 +62,9 @@ class BaseQuerySet(object):
|
||||
self._ordering = None
|
||||
self._snapshot = False
|
||||
self._timeout = True
|
||||
self._slave_okay = False
|
||||
self._allow_disk_use = False
|
||||
self._read_preference = None
|
||||
self._read_concern = None
|
||||
self._iter = False
|
||||
self._scalar = []
|
||||
self._none = False
|
||||
@ -80,12 +83,20 @@ class BaseQuerySet(object):
|
||||
self._cursor_obj = None
|
||||
self._limit = None
|
||||
self._skip = None
|
||||
|
||||
self._hint = -1 # Using -1 as None is a valid value for hint
|
||||
self._collation = None
|
||||
self._batch_size = None
|
||||
self.only_fields = []
|
||||
self._max_time_ms = None
|
||||
self._comment = None
|
||||
|
||||
# Hack - As people expect cursor[5:5] to return
|
||||
# an empty result set. It's hard to do that right, though, because the
|
||||
# server uses limit(0) to mean 'no limit'. So we set _empty
|
||||
# in that case and check for it when iterating. We also unset
|
||||
# it anytime we change _limit. Inspired by how it is done in pymongo.Cursor
|
||||
self._empty = False
|
||||
|
||||
def __call__(self, q_obj=None, **query):
|
||||
"""Filter the selected documents by calling the
|
||||
:class:`~mongoengine.queryset.QuerySet` with a query.
|
||||
@ -158,6 +169,7 @@ class BaseQuerySet(object):
|
||||
[<User: User object>, <User: User object>]
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._empty = False
|
||||
|
||||
# Handle a slice
|
||||
if isinstance(key, slice):
|
||||
@ -165,6 +177,8 @@ class BaseQuerySet(object):
|
||||
queryset._skip, queryset._limit = key.start, key.stop
|
||||
if key.start and key.stop:
|
||||
queryset._limit = key.stop - key.start
|
||||
if queryset._limit == 0:
|
||||
queryset._empty = True
|
||||
|
||||
# Allow further QuerySet modifications to be performed
|
||||
return queryset
|
||||
@ -176,7 +190,6 @@ class BaseQuerySet(object):
|
||||
queryset._document._from_son(
|
||||
queryset._cursor[key],
|
||||
_auto_dereference=self._auto_dereference,
|
||||
only_fields=self.only_fields,
|
||||
)
|
||||
)
|
||||
|
||||
@ -186,7 +199,6 @@ class BaseQuerySet(object):
|
||||
return queryset._document._from_son(
|
||||
queryset._cursor[key],
|
||||
_auto_dereference=self._auto_dereference,
|
||||
only_fields=self.only_fields,
|
||||
)
|
||||
|
||||
raise TypeError("Provide a slice or an integer index")
|
||||
@ -203,8 +215,6 @@ class BaseQuerySet(object):
|
||||
"""Avoid to open all records in an if stmt in Py3."""
|
||||
return self._has_data()
|
||||
|
||||
__nonzero__ = __bool__ # For Py2 support
|
||||
|
||||
# Core functions
|
||||
|
||||
def all(self):
|
||||
@ -247,34 +257,30 @@ class BaseQuerySet(object):
|
||||
`DocumentName.MultipleObjectsReturned` exception if multiple results
|
||||
and :class:`~mongoengine.queryset.DoesNotExist` or
|
||||
`DocumentName.DoesNotExist` if no results are found.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset = queryset.order_by().limit(2)
|
||||
queryset = queryset.filter(*q_objs, **query)
|
||||
|
||||
try:
|
||||
result = six.next(queryset)
|
||||
result = next(queryset)
|
||||
except StopIteration:
|
||||
msg = "%s matching query does not exist." % queryset._document._class_name
|
||||
raise queryset._document.DoesNotExist(msg)
|
||||
|
||||
try:
|
||||
six.next(queryset)
|
||||
# Check if there is another match
|
||||
next(queryset)
|
||||
except StopIteration:
|
||||
return result
|
||||
|
||||
# If we were able to retrieve the 2nd doc, rewind the cursor and
|
||||
# raise the MultipleObjectsReturned exception.
|
||||
queryset.rewind()
|
||||
message = u"%d items returned, instead of 1" % queryset.count()
|
||||
raise queryset._document.MultipleObjectsReturned(message)
|
||||
# If we were able to retrieve the 2nd doc, raise the MultipleObjectsReturned exception.
|
||||
raise queryset._document.MultipleObjectsReturned(
|
||||
"2 or more items returned, instead of 1"
|
||||
)
|
||||
|
||||
def create(self, **kwargs):
|
||||
"""Create new object. Returns the saved object instance.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
"""
|
||||
"""Create new object. Returns the saved object instance."""
|
||||
return self._document(**kwargs).save(force_insert=True)
|
||||
|
||||
def first(self):
|
||||
@ -301,15 +307,11 @@ class BaseQuerySet(object):
|
||||
``insert(..., {w: 2, fsync: True})`` will wait until at least
|
||||
two servers have recorded the write and will force an fsync on
|
||||
each server being written to.
|
||||
:parm signal_kwargs: (optional) kwargs dictionary to be passed to
|
||||
:param signal_kwargs: (optional) kwargs dictionary to be passed to
|
||||
the signal calls.
|
||||
|
||||
By default returns document instances, set ``load_bulk`` to False to
|
||||
return just ``ObjectIds``
|
||||
|
||||
.. versionadded:: 0.5
|
||||
.. versionchanged:: 0.10.7
|
||||
Add signal_kwargs argument
|
||||
"""
|
||||
Document = _import_class("Document")
|
||||
|
||||
@ -352,20 +354,20 @@ class BaseQuerySet(object):
|
||||
)
|
||||
except pymongo.errors.DuplicateKeyError as err:
|
||||
message = "Could not save document (%s)"
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
raise NotUniqueError(message % err)
|
||||
except pymongo.errors.BulkWriteError as err:
|
||||
# inserting documents that already have an _id field will
|
||||
# give huge performance debt or raise
|
||||
message = u"Bulk write error: (%s)"
|
||||
raise BulkWriteError(message % six.text_type(err.details))
|
||||
message = "Bulk write error: (%s)"
|
||||
raise BulkWriteError(message % err.details)
|
||||
except pymongo.errors.OperationFailure as err:
|
||||
message = "Could not save document (%s)"
|
||||
if re.match("^E1100[01] duplicate key", six.text_type(err)):
|
||||
if re.match("^E1100[01] duplicate key", str(err)):
|
||||
# E11000 - duplicate key error index
|
||||
# E11001 - duplicate key on update
|
||||
message = u"Tried to save duplicate unique keys (%s)"
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
raise OperationError(message % six.text_type(err))
|
||||
message = "Tried to save duplicate unique keys (%s)"
|
||||
raise NotUniqueError(message % err)
|
||||
raise OperationError(message % err)
|
||||
|
||||
# Apply inserted_ids to documents
|
||||
for doc, doc_id in zip(docs, ids):
|
||||
@ -391,9 +393,36 @@ class BaseQuerySet(object):
|
||||
:meth:`skip` that has been applied to this cursor into account when
|
||||
getting the count
|
||||
"""
|
||||
if self._limit == 0 and with_limit_and_skip is False or self._none:
|
||||
# mimic the fact that setting .limit(0) in pymongo sets no limit
|
||||
# https://docs.mongodb.com/manual/reference/method/cursor.limit/#zero-value
|
||||
if (
|
||||
self._limit == 0
|
||||
and with_limit_and_skip is False
|
||||
or self._none
|
||||
or self._empty
|
||||
):
|
||||
return 0
|
||||
count = self._cursor.count(with_limit_and_skip=with_limit_and_skip)
|
||||
|
||||
kwargs = (
|
||||
{"limit": self._limit, "skip": self._skip} if with_limit_and_skip else {}
|
||||
)
|
||||
|
||||
if self._limit == 0:
|
||||
# mimic the fact that historically .limit(0) sets no limit
|
||||
kwargs.pop("limit", None)
|
||||
|
||||
if self._hint not in (-1, None):
|
||||
kwargs["hint"] = self._hint
|
||||
|
||||
if self._collation:
|
||||
kwargs["collation"] = self._collation
|
||||
|
||||
count = count_documents(
|
||||
collection=self._cursor.collection,
|
||||
filter=self._query,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
self._cursor_obj = None
|
||||
return count
|
||||
|
||||
@ -489,7 +518,13 @@ class BaseQuerySet(object):
|
||||
return result.deleted_count
|
||||
|
||||
def update(
|
||||
self, upsert=False, multi=True, write_concern=None, full_result=False, **update
|
||||
self,
|
||||
upsert=False,
|
||||
multi=True,
|
||||
write_concern=None,
|
||||
read_concern=None,
|
||||
full_result=False,
|
||||
**update,
|
||||
):
|
||||
"""Perform an atomic update on the fields matched by the query.
|
||||
|
||||
@ -501,13 +536,12 @@ class BaseQuerySet(object):
|
||||
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||
wait until at least two servers have recorded the write and
|
||||
will force an fsync on the primary server.
|
||||
:param read_concern: Override the read concern for the operation
|
||||
:param full_result: Return the associated ``pymongo.UpdateResult`` rather than just the number
|
||||
updated items
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
:returns the number of updated documents (unless ``full_result`` is True)
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
if not update and not upsert:
|
||||
raise OperationError("No update parameters, would remove data")
|
||||
@ -527,7 +561,9 @@ class BaseQuerySet(object):
|
||||
else:
|
||||
update["$set"] = {"_cls": queryset._document._class_name}
|
||||
try:
|
||||
with set_write_concern(queryset._collection, write_concern) as collection:
|
||||
with set_read_write_concern(
|
||||
queryset._collection, write_concern, read_concern
|
||||
) as collection:
|
||||
update_func = collection.update_one
|
||||
if multi:
|
||||
update_func = collection.update_many
|
||||
@ -537,14 +573,14 @@ class BaseQuerySet(object):
|
||||
elif result.raw_result:
|
||||
return result.raw_result["n"]
|
||||
except pymongo.errors.DuplicateKeyError as err:
|
||||
raise NotUniqueError(u"Update failed (%s)" % six.text_type(err))
|
||||
raise NotUniqueError("Update failed (%s)" % err)
|
||||
except pymongo.errors.OperationFailure as err:
|
||||
if six.text_type(err) == u"multi not coded yet":
|
||||
message = u"update() method requires MongoDB 1.1.3+"
|
||||
if str(err) == "multi not coded yet":
|
||||
message = "update() method requires MongoDB 1.1.3+"
|
||||
raise OperationError(message)
|
||||
raise OperationError(u"Update failed (%s)" % six.text_type(err))
|
||||
raise OperationError("Update failed (%s)" % err)
|
||||
|
||||
def upsert_one(self, write_concern=None, **update):
|
||||
def upsert_one(self, write_concern=None, read_concern=None, **update):
|
||||
"""Overwrite or add the first document matched by the query.
|
||||
|
||||
:param write_concern: Extra keyword arguments are passed down which
|
||||
@ -553,19 +589,19 @@ class BaseQuerySet(object):
|
||||
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||
wait until at least two servers have recorded the write and
|
||||
will force an fsync on the primary server.
|
||||
:param read_concern: Override the read concern for the operation
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
:returns the new or overwritten document
|
||||
|
||||
.. versionadded:: 0.10.2
|
||||
"""
|
||||
|
||||
atomic_update = self.update(
|
||||
multi=False,
|
||||
upsert=True,
|
||||
write_concern=write_concern,
|
||||
read_concern=read_concern,
|
||||
full_result=True,
|
||||
**update
|
||||
**update,
|
||||
)
|
||||
|
||||
if atomic_update.raw_result["updatedExisting"]:
|
||||
@ -590,14 +626,13 @@ class BaseQuerySet(object):
|
||||
:param update: Django-style update keyword arguments
|
||||
full_result
|
||||
:returns the number of updated documents (unless ``full_result`` is True)
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
return self.update(
|
||||
upsert=upsert,
|
||||
multi=False,
|
||||
write_concern=write_concern,
|
||||
full_result=full_result,
|
||||
**update
|
||||
**update,
|
||||
)
|
||||
|
||||
def modify(
|
||||
@ -622,8 +657,6 @@ class BaseQuerySet(object):
|
||||
:param new: return updated rather than original document
|
||||
(default ``False``)
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
if remove and new:
|
||||
@ -657,21 +690,19 @@ class BaseQuerySet(object):
|
||||
upsert=upsert,
|
||||
sort=sort,
|
||||
return_document=return_doc,
|
||||
**self._cursor_args
|
||||
**self._cursor_args,
|
||||
)
|
||||
except pymongo.errors.DuplicateKeyError as err:
|
||||
raise NotUniqueError(u"Update failed (%s)" % err)
|
||||
raise NotUniqueError("Update failed (%s)" % err)
|
||||
except pymongo.errors.OperationFailure as err:
|
||||
raise OperationError(u"Update failed (%s)" % err)
|
||||
raise OperationError("Update failed (%s)" % err)
|
||||
|
||||
if full_response:
|
||||
if result["value"] is not None:
|
||||
result["value"] = self._document._from_son(
|
||||
result["value"], only_fields=self.only_fields
|
||||
)
|
||||
result["value"] = self._document._from_son(result["value"])
|
||||
else:
|
||||
if result is not None:
|
||||
result = self._document._from_son(result, only_fields=self.only_fields)
|
||||
result = self._document._from_son(result)
|
||||
|
||||
return result
|
||||
|
||||
@ -681,8 +712,6 @@ class BaseQuerySet(object):
|
||||
`None` if no document exists with that id.
|
||||
|
||||
:param object_id: the value for the id of the document to look up
|
||||
|
||||
.. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set
|
||||
"""
|
||||
queryset = self.clone()
|
||||
if not queryset._query_obj.empty:
|
||||
@ -693,20 +722,16 @@ class BaseQuerySet(object):
|
||||
def in_bulk(self, object_ids):
|
||||
"""Retrieve a set of documents by their ids.
|
||||
|
||||
:param object_ids: a list or tuple of ``ObjectId``\ s
|
||||
:rtype: dict of ObjectIds as keys and collection-specific
|
||||
:param object_ids: a list or tuple of ObjectId's
|
||||
:rtype: dict of ObjectId's as keys and collection-specific
|
||||
Document subclasses as values.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
doc_map = {}
|
||||
|
||||
docs = self._collection.find({"_id": {"$in": object_ids}}, **self._cursor_args)
|
||||
if self._scalar:
|
||||
for doc in docs:
|
||||
doc_map[doc["_id"]] = self._get_scalar(
|
||||
self._document._from_son(doc, only_fields=self.only_fields)
|
||||
)
|
||||
doc_map[doc["_id"]] = self._get_scalar(self._document._from_son(doc))
|
||||
elif self._as_pymongo:
|
||||
for doc in docs:
|
||||
doc_map[doc["_id"]] = doc
|
||||
@ -714,14 +739,15 @@ class BaseQuerySet(object):
|
||||
for doc in docs:
|
||||
doc_map[doc["_id"]] = self._document._from_son(
|
||||
doc,
|
||||
only_fields=self.only_fields,
|
||||
_auto_dereference=self._auto_dereference,
|
||||
)
|
||||
|
||||
return doc_map
|
||||
|
||||
def none(self):
|
||||
"""Helper that just returns a list"""
|
||||
"""Returns a queryset that never returns any objects and no query will be executed when accessing the results
|
||||
inspired by django none() https://docs.djangoproject.com/en/dev/ref/models/querysets/#none
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._none = True
|
||||
return queryset
|
||||
@ -741,8 +767,6 @@ class BaseQuerySet(object):
|
||||
evaluated against if you are using more than one database.
|
||||
|
||||
:param alias: The database alias
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
with switch_db(self._document, alias) as cls:
|
||||
@ -774,17 +798,19 @@ class BaseQuerySet(object):
|
||||
"_ordering",
|
||||
"_snapshot",
|
||||
"_timeout",
|
||||
"_slave_okay",
|
||||
"_allow_disk_use",
|
||||
"_read_preference",
|
||||
"_read_concern",
|
||||
"_iter",
|
||||
"_scalar",
|
||||
"_as_pymongo",
|
||||
"_limit",
|
||||
"_skip",
|
||||
"_empty",
|
||||
"_hint",
|
||||
"_collation",
|
||||
"_auto_dereference",
|
||||
"_search_text",
|
||||
"only_fields",
|
||||
"_max_time_ms",
|
||||
"_comment",
|
||||
"_batch_size",
|
||||
@ -803,8 +829,6 @@ class BaseQuerySet(object):
|
||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects or
|
||||
:class:`~bson.object_id.ObjectId` a maximum depth in order to cut down
|
||||
the number queries to mongodb.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
# Make select related work the same for querysets
|
||||
max_depth += 1
|
||||
@ -820,6 +844,7 @@ class BaseQuerySet(object):
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._limit = n
|
||||
queryset._empty = False # cancels the effect of empty
|
||||
|
||||
# If a cursor object has already been created, apply the limit to it.
|
||||
if queryset._cursor_obj:
|
||||
@ -852,8 +877,6 @@ class BaseQuerySet(object):
|
||||
|
||||
Hinting will not do anything if the corresponding index does not exist.
|
||||
The last hint applied to this cursor takes precedence over all others.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._hint = index
|
||||
@ -864,6 +887,32 @@ class BaseQuerySet(object):
|
||||
|
||||
return queryset
|
||||
|
||||
def collation(self, collation=None):
|
||||
"""
|
||||
Collation allows users to specify language-specific rules for string
|
||||
comparison, such as rules for lettercase and accent marks.
|
||||
:param collation: `~pymongo.collation.Collation` or dict with
|
||||
following fields:
|
||||
{
|
||||
locale: str,
|
||||
caseLevel: bool,
|
||||
caseFirst: str,
|
||||
strength: int,
|
||||
numericOrdering: bool,
|
||||
alternate: str,
|
||||
maxVariable: str,
|
||||
backwards: str
|
||||
}
|
||||
Collation should be added to indexes like in test example
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._collation = collation
|
||||
|
||||
if queryset._cursor_obj:
|
||||
queryset._cursor_obj.collation(collation)
|
||||
|
||||
return queryset
|
||||
|
||||
def batch_size(self, size):
|
||||
"""Limit the number of documents returned in a single batch (each
|
||||
batch requires a round trip to the server).
|
||||
@ -889,10 +938,6 @@ class BaseQuerySet(object):
|
||||
|
||||
.. note:: This is a command and won't take ordering or limit into
|
||||
account.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 - Fixed handling references
|
||||
.. versionchanged:: 0.6 - Improved db_field refrence handling
|
||||
"""
|
||||
queryset = self.clone()
|
||||
|
||||
@ -956,12 +1001,8 @@ class BaseQuerySet(object):
|
||||
field filters.
|
||||
|
||||
:param fields: fields to include
|
||||
|
||||
.. versionadded:: 0.3
|
||||
.. versionchanged:: 0.5 - Added subfield support
|
||||
"""
|
||||
fields = {f: QueryFieldList.ONLY for f in fields}
|
||||
self.only_fields = fields.keys()
|
||||
return self.fields(True, **fields)
|
||||
|
||||
def exclude(self, *fields):
|
||||
@ -978,8 +1019,6 @@ class BaseQuerySet(object):
|
||||
field filters.
|
||||
|
||||
:param fields: fields to exclude
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
fields = {f: QueryFieldList.EXCLUDE for f in fields}
|
||||
return self.fields(**fields)
|
||||
@ -998,18 +1037,18 @@ class BaseQuerySet(object):
|
||||
|
||||
posts = BlogPost.objects(...).fields(comments=0)
|
||||
|
||||
To retrieve a subrange of array elements:
|
||||
To retrieve a subrange or sublist of array elements,
|
||||
support exist for both the `slice` and `elemMatch` projection operator:
|
||||
|
||||
posts = BlogPost.objects(...).fields(slice__comments=5)
|
||||
posts = BlogPost.objects(...).fields(elemMatch__comments="test")
|
||||
|
||||
:param kwargs: A set of keyword arguments identifying what to
|
||||
include, exclude, or slice.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
operators = ["slice"]
|
||||
operators = ["slice", "elemMatch"]
|
||||
cleaned_fields = []
|
||||
for key, value in kwargs.items():
|
||||
parts = key.split("__")
|
||||
@ -1047,8 +1086,6 @@ class BaseQuerySet(object):
|
||||
.exclude(). ::
|
||||
|
||||
post = BlogPost.objects.exclude('comments').all_fields()
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._loaded_fields = QueryFieldList(
|
||||
@ -1112,7 +1149,7 @@ class BaseQuerySet(object):
|
||||
|
||||
def explain(self):
|
||||
"""Return an explain plan record for the
|
||||
:class:`~mongoengine.queryset.QuerySet`\ 's cursor.
|
||||
:class:`~mongoengine.queryset.QuerySet` cursor.
|
||||
"""
|
||||
return self._cursor.explain()
|
||||
|
||||
@ -1121,9 +1158,6 @@ class BaseQuerySet(object):
|
||||
"""Enable or disable snapshot mode when querying.
|
||||
|
||||
:param enabled: whether or not snapshot mode is enabled
|
||||
|
||||
..versionchanged:: 0.5 - made chainable
|
||||
.. deprecated:: Ignored with PyMongo 3+
|
||||
"""
|
||||
msg = "snapshot is deprecated as it has no impact when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
@ -1131,31 +1165,25 @@ class BaseQuerySet(object):
|
||||
queryset._snapshot = enabled
|
||||
return queryset
|
||||
|
||||
def allow_disk_use(self, enabled):
|
||||
"""Enable or disable the use of temporary files on disk while processing a blocking sort operation.
|
||||
(To store data exceeding the 100 megabyte system memory limit)
|
||||
|
||||
:param enabled: whether or not temporary files on disk are used
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._allow_disk_use = enabled
|
||||
return queryset
|
||||
|
||||
def timeout(self, enabled):
|
||||
"""Enable or disable the default mongod timeout when querying. (no_cursor_timeout option)
|
||||
|
||||
:param enabled: whether or not the timeout is used
|
||||
|
||||
..versionchanged:: 0.5 - made chainable
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._timeout = enabled
|
||||
return queryset
|
||||
|
||||
# DEPRECATED. Has no more impact on PyMongo 3+
|
||||
def slave_okay(self, enabled):
|
||||
"""Enable or disable the slave_okay when querying.
|
||||
|
||||
:param enabled: whether or not the slave_okay is enabled
|
||||
|
||||
.. deprecated:: Ignored with PyMongo 3+
|
||||
"""
|
||||
msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
queryset = self.clone()
|
||||
queryset._slave_okay = enabled
|
||||
return queryset
|
||||
|
||||
def read_preference(self, read_preference):
|
||||
"""Change the read_preference when querying.
|
||||
|
||||
@ -1165,9 +1193,23 @@ class BaseQuerySet(object):
|
||||
validate_read_preference("read_preference", read_preference)
|
||||
queryset = self.clone()
|
||||
queryset._read_preference = read_preference
|
||||
queryset._cursor_obj = (
|
||||
None
|
||||
) # we need to re-create the cursor object whenever we apply read_preference
|
||||
queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference
|
||||
return queryset
|
||||
|
||||
def read_concern(self, read_concern):
|
||||
"""Change the read_concern when querying.
|
||||
|
||||
:param read_concern: override ReplicaSetConnection-level
|
||||
preference.
|
||||
"""
|
||||
if read_concern is not None and not isinstance(read_concern, Mapping):
|
||||
raise TypeError(f"{read_concern!r} is not a valid read concern.")
|
||||
|
||||
queryset = self.clone()
|
||||
queryset._read_concern = (
|
||||
ReadConcern(**read_concern) if read_concern is not None else None
|
||||
)
|
||||
queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_concern
|
||||
return queryset
|
||||
|
||||
def scalar(self, *fields):
|
||||
@ -1224,21 +1266,28 @@ class BaseQuerySet(object):
|
||||
def from_json(self, json_data):
|
||||
"""Converts json data to unsaved objects"""
|
||||
son_data = json_util.loads(json_data)
|
||||
return [
|
||||
self._document._from_son(data, only_fields=self.only_fields)
|
||||
for data in son_data
|
||||
]
|
||||
return [self._document._from_son(data) for data in son_data]
|
||||
|
||||
def aggregate(self, pipeline, *suppl_pipeline, **kwargs):
|
||||
"""Perform a aggregate function based in your queryset params
|
||||
|
||||
def aggregate(self, *pipeline, **kwargs):
|
||||
"""
|
||||
Perform a aggregate function based in your queryset params
|
||||
:param pipeline: list of aggregation commands,\
|
||||
see: http://docs.mongodb.org/manual/core/aggregation-pipeline/
|
||||
|
||||
.. versionadded:: 0.9
|
||||
:param suppl_pipeline: unpacked list of pipeline (added to support deprecation of the old interface)
|
||||
parameter will be removed shortly
|
||||
:param kwargs: (optional) kwargs dictionary to be passed to pymongo's aggregate call
|
||||
See https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.aggregate
|
||||
"""
|
||||
initial_pipeline = []
|
||||
using_deprecated_interface = isinstance(pipeline, dict) or bool(suppl_pipeline)
|
||||
user_pipeline = [pipeline] if isinstance(pipeline, dict) else list(pipeline)
|
||||
|
||||
if using_deprecated_interface:
|
||||
msg = "Calling .aggregate() with un unpacked list (*pipeline) is deprecated, it will soon change and will expect a list (similar to pymongo.Collection.aggregate interface), see documentation"
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
|
||||
user_pipeline += suppl_pipeline
|
||||
|
||||
initial_pipeline = []
|
||||
if self._query:
|
||||
initial_pipeline.append({"$match": self._query})
|
||||
|
||||
@ -1255,14 +1304,15 @@ class BaseQuerySet(object):
|
||||
if self._skip is not None:
|
||||
initial_pipeline.append({"$skip": self._skip})
|
||||
|
||||
pipeline = initial_pipeline + list(pipeline)
|
||||
final_pipeline = initial_pipeline + user_pipeline
|
||||
|
||||
if self._read_preference is not None:
|
||||
return self._collection.with_options(
|
||||
read_preference=self._read_preference
|
||||
).aggregate(pipeline, cursor={}, **kwargs)
|
||||
collection = self._collection
|
||||
if self._read_preference is not None or self._read_concern is not None:
|
||||
collection = self._collection.with_options(
|
||||
read_preference=self._read_preference, read_concern=self._read_concern
|
||||
)
|
||||
|
||||
return self._collection.aggregate(pipeline, cursor={}, **kwargs)
|
||||
return collection.aggregate(final_pipeline, cursor={}, **kwargs)
|
||||
|
||||
# JS functionality
|
||||
def map_reduce(
|
||||
@ -1298,32 +1348,23 @@ class BaseQuerySet(object):
|
||||
Map/Reduce changed in server version **>= 1.7.4**. The PyMongo
|
||||
:meth:`~pymongo.collection.Collection.map_reduce` helper requires
|
||||
PyMongo version **>= 1.11**.
|
||||
|
||||
.. versionchanged:: 0.5
|
||||
- removed ``keep_temp`` keyword argument, which was only relevant
|
||||
for MongoDB server versions older than 1.7.4
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
queryset = self.clone()
|
||||
|
||||
MapReduceDocument = _import_class("MapReduceDocument")
|
||||
|
||||
if not hasattr(self._collection, "map_reduce"):
|
||||
raise NotImplementedError("Requires MongoDB >= 1.7.1")
|
||||
|
||||
map_f_scope = {}
|
||||
if isinstance(map_f, Code):
|
||||
map_f_scope = map_f.scope
|
||||
map_f = six.text_type(map_f)
|
||||
map_f = Code(queryset._sub_js_fields(map_f), map_f_scope)
|
||||
map_f = str(map_f)
|
||||
map_f = Code(queryset._sub_js_fields(map_f), map_f_scope or None)
|
||||
|
||||
reduce_f_scope = {}
|
||||
if isinstance(reduce_f, Code):
|
||||
reduce_f_scope = reduce_f.scope
|
||||
reduce_f = six.text_type(reduce_f)
|
||||
reduce_f = str(reduce_f)
|
||||
reduce_f_code = queryset._sub_js_fields(reduce_f)
|
||||
reduce_f = Code(reduce_f_code, reduce_f_scope)
|
||||
reduce_f = Code(reduce_f_code, reduce_f_scope or None)
|
||||
|
||||
mr_args = {"query": queryset._query}
|
||||
|
||||
@ -1331,9 +1372,9 @@ class BaseQuerySet(object):
|
||||
finalize_f_scope = {}
|
||||
if isinstance(finalize_f, Code):
|
||||
finalize_f_scope = finalize_f.scope
|
||||
finalize_f = six.text_type(finalize_f)
|
||||
finalize_f = str(finalize_f)
|
||||
finalize_f_code = queryset._sub_js_fields(finalize_f)
|
||||
finalize_f = Code(finalize_f_code, finalize_f_scope)
|
||||
finalize_f = Code(finalize_f_code, finalize_f_scope or None)
|
||||
mr_args["finalize"] = finalize_f
|
||||
|
||||
if scope:
|
||||
@ -1347,7 +1388,7 @@ class BaseQuerySet(object):
|
||||
else:
|
||||
map_reduce_function = "map_reduce"
|
||||
|
||||
if isinstance(output, six.string_types):
|
||||
if isinstance(output, str):
|
||||
mr_args["out"] = output
|
||||
|
||||
elif isinstance(output, dict):
|
||||
@ -1440,8 +1481,6 @@ class BaseQuerySet(object):
|
||||
.. note:: When using this mode of query, the database will call your
|
||||
function, or evaluate your predicate clause, for each object
|
||||
in the collection.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
queryset = self.clone()
|
||||
where_clause = queryset._sub_js_fields(where_clause)
|
||||
@ -1518,9 +1557,6 @@ class BaseQuerySet(object):
|
||||
:param field: the field to use
|
||||
:param normalize: normalize the results so they add to 1.0
|
||||
:param map_reduce: Use map_reduce over exec_js
|
||||
|
||||
.. versionchanged:: 0.5 defaults to map_reduce and can handle embedded
|
||||
document lookups
|
||||
"""
|
||||
if map_reduce:
|
||||
return self._item_frequencies_map_reduce(field, normalize=normalize)
|
||||
@ -1529,12 +1565,11 @@ class BaseQuerySet(object):
|
||||
# Iterator helpers
|
||||
|
||||
def __next__(self):
|
||||
"""Wrap the result in a :class:`~mongoengine.Document` object.
|
||||
"""
|
||||
if self._limit == 0 or self._none:
|
||||
"""Wrap the result in a :class:`~mongoengine.Document` object."""
|
||||
if self._none or self._empty:
|
||||
raise StopIteration
|
||||
|
||||
raw_doc = six.next(self._cursor)
|
||||
raw_doc = next(self._cursor)
|
||||
|
||||
if self._as_pymongo:
|
||||
return raw_doc
|
||||
@ -1542,7 +1577,6 @@ class BaseQuerySet(object):
|
||||
doc = self._document._from_son(
|
||||
raw_doc,
|
||||
_auto_dereference=self._auto_dereference,
|
||||
only_fields=self.only_fields,
|
||||
)
|
||||
|
||||
if self._scalar:
|
||||
@ -1550,13 +1584,8 @@ class BaseQuerySet(object):
|
||||
|
||||
return doc
|
||||
|
||||
next = __next__ # For Python2 support
|
||||
|
||||
def rewind(self):
|
||||
"""Rewind the cursor to its unevaluated state.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
"""Rewind the cursor to its unevaluated state."""
|
||||
self._iter = False
|
||||
self._cursor.rewind()
|
||||
|
||||
@ -1577,7 +1606,13 @@ class BaseQuerySet(object):
|
||||
if self._snapshot:
|
||||
msg = "The snapshot option is not anymore available with PyMongo 3+"
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
cursor_args = {"no_cursor_timeout": not self._timeout}
|
||||
|
||||
cursor_args = {}
|
||||
if not self._timeout:
|
||||
cursor_args["no_cursor_timeout"] = True
|
||||
|
||||
if self._allow_disk_use:
|
||||
cursor_args["allow_disk_use"] = True
|
||||
|
||||
if self._loaded_fields:
|
||||
cursor_args[fields_name] = self._loaded_fields.as_dict()
|
||||
@ -1602,12 +1637,13 @@ class BaseQuerySet(object):
|
||||
# XXX In PyMongo 3+, we define the read preference on a collection
|
||||
# level, not a cursor level. Thus, we need to get a cloned collection
|
||||
# object using `with_options` first.
|
||||
if self._read_preference is not None:
|
||||
if self._read_preference is not None or self._read_concern is not None:
|
||||
self._cursor_obj = self._collection.with_options(
|
||||
read_preference=self._read_preference
|
||||
read_preference=self._read_preference, read_concern=self._read_concern
|
||||
).find(self._query, **self._cursor_args)
|
||||
else:
|
||||
self._cursor_obj = self._collection.find(self._query, **self._cursor_args)
|
||||
|
||||
# Apply "where" clauses to cursor
|
||||
if self._where_clause:
|
||||
where_clause = self._sub_js_fields(self._where_clause)
|
||||
@ -1637,6 +1673,9 @@ class BaseQuerySet(object):
|
||||
if self._hint != -1:
|
||||
self._cursor_obj.hint(self._hint)
|
||||
|
||||
if self._collation is not None:
|
||||
self._cursor_obj.collation(self._collation)
|
||||
|
||||
if self._batch_size is not None:
|
||||
self._cursor_obj.batch_size(self._batch_size)
|
||||
|
||||
@ -1772,13 +1811,13 @@ class BaseQuerySet(object):
|
||||
}
|
||||
"""
|
||||
total, data, types = self.exec_js(freq_func, field)
|
||||
values = {types.get(k): int(v) for k, v in iteritems(data)}
|
||||
values = {types.get(k): int(v) for k, v in data.items()}
|
||||
|
||||
if normalize:
|
||||
values = {k: float(v) / total for k, v in values.items()}
|
||||
|
||||
frequencies = {}
|
||||
for k, v in iteritems(values):
|
||||
for k, v in values.items():
|
||||
if isinstance(k, float):
|
||||
if int(k) == k:
|
||||
k = int(k)
|
||||
@ -1798,7 +1837,7 @@ class BaseQuerySet(object):
|
||||
field_parts = field.split(".")
|
||||
try:
|
||||
field = ".".join(
|
||||
f if isinstance(f, six.string_types) else f.db_field
|
||||
f if isinstance(f, str) else f.db_field
|
||||
for f in self._document._lookup_field(field_parts)
|
||||
)
|
||||
db_field_paths.append(field)
|
||||
@ -1810,7 +1849,7 @@ class BaseQuerySet(object):
|
||||
for subdoc in subclasses:
|
||||
try:
|
||||
subfield = ".".join(
|
||||
f if isinstance(f, six.string_types) else f.db_field
|
||||
f if isinstance(f, str) else f.db_field
|
||||
for f in subdoc._lookup_field(field_parts)
|
||||
)
|
||||
db_field_paths.append(subfield)
|
||||
@ -1884,7 +1923,7 @@ class BaseQuerySet(object):
|
||||
field_name = match.group(1).split(".")
|
||||
fields = self._document._lookup_field(field_name)
|
||||
# Substitute the correct name for the field into the javascript
|
||||
return u'["%s"]' % fields[-1].db_field
|
||||
return '["%s"]' % fields[-1].db_field
|
||||
|
||||
def field_path_sub(match):
|
||||
# Extract just the field name, and look up the field objects
|
||||
@ -1914,23 +1953,3 @@ class BaseQuerySet(object):
|
||||
setattr(queryset, "_" + method_name, val)
|
||||
|
||||
return queryset
|
||||
|
||||
# Deprecated
|
||||
def ensure_index(self, **kwargs):
|
||||
"""Deprecated use :func:`Document.ensure_index`"""
|
||||
msg = (
|
||||
"Doc.objects()._ensure_index() is deprecated. "
|
||||
"Use Doc.ensure_index() instead."
|
||||
)
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self._document.__class__.ensure_index(**kwargs)
|
||||
return self
|
||||
|
||||
def _ensure_indexes(self):
|
||||
"""Deprecated use :func:`~Document.ensure_indexes`"""
|
||||
msg = (
|
||||
"Doc.objects()._ensure_indexes() is deprecated. "
|
||||
"Use Doc.ensure_indexes() instead."
|
||||
)
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self._document.__class__.ensure_indexes()
|
||||
|
@ -1,7 +1,7 @@
|
||||
__all__ = ("QueryFieldList",)
|
||||
|
||||
|
||||
class QueryFieldList(object):
|
||||
class QueryFieldList:
|
||||
"""Object that handles combinations of .only() and .exclude() calls"""
|
||||
|
||||
ONLY = 1
|
||||
@ -69,8 +69,6 @@ class QueryFieldList(object):
|
||||
def __bool__(self):
|
||||
return bool(self.fields)
|
||||
|
||||
__nonzero__ = __bool__ # For Py2 support
|
||||
|
||||
def as_dict(self):
|
||||
field_list = {field: self.value for field in self.fields}
|
||||
if self.slice:
|
||||
@ -80,7 +78,7 @@ class QueryFieldList(object):
|
||||
return field_list
|
||||
|
||||
def reset(self):
|
||||
self.fields = set([])
|
||||
self.fields = set()
|
||||
self.slice = {}
|
||||
self.value = self.ONLY
|
||||
|
||||
|
@ -1,10 +1,11 @@
|
||||
from functools import partial
|
||||
|
||||
from mongoengine.queryset.queryset import QuerySet
|
||||
|
||||
__all__ = ("queryset_manager", "QuerySetManager")
|
||||
|
||||
|
||||
class QuerySetManager(object):
|
||||
class QuerySetManager:
|
||||
"""
|
||||
The default QuerySet Manager.
|
||||
|
||||
|
@ -1,13 +1,11 @@
|
||||
import six
|
||||
|
||||
from mongoengine.errors import OperationError
|
||||
from mongoengine.queryset.base import (
|
||||
BaseQuerySet,
|
||||
CASCADE,
|
||||
DENY,
|
||||
DO_NOTHING,
|
||||
NULLIFY,
|
||||
PULL,
|
||||
BaseQuerySet,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
@ -127,8 +125,8 @@ class QuerySet(BaseQuerySet):
|
||||
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
||||
# the result cache.
|
||||
try:
|
||||
for _ in six.moves.range(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(six.next(self))
|
||||
for _ in range(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(next(self))
|
||||
except StopIteration:
|
||||
# Getting this exception means there are no more docs in the
|
||||
# db cursor. Set _has_more to False so that we can use that
|
||||
@ -143,18 +141,16 @@ class QuerySet(BaseQuerySet):
|
||||
getting the count
|
||||
"""
|
||||
if with_limit_and_skip is False:
|
||||
return super(QuerySet, self).count(with_limit_and_skip)
|
||||
return super().count(with_limit_and_skip)
|
||||
|
||||
if self._len is None:
|
||||
self._len = super(QuerySet, self).count(with_limit_and_skip)
|
||||
# cache the length
|
||||
self._len = super().count(with_limit_and_skip)
|
||||
|
||||
return self._len
|
||||
|
||||
def no_cache(self):
|
||||
"""Convert to a non-caching queryset
|
||||
|
||||
.. versionadded:: 0.8.3 Convert to non caching queryset
|
||||
"""
|
||||
"""Convert to a non-caching queryset"""
|
||||
if self._result_cache is not None:
|
||||
raise OperationError("QuerySet already cached")
|
||||
|
||||
@ -165,24 +161,18 @@ class QuerySetNoCache(BaseQuerySet):
|
||||
"""A non caching QuerySet"""
|
||||
|
||||
def cache(self):
|
||||
"""Convert to a caching queryset
|
||||
|
||||
.. versionadded:: 0.8.3 Convert to caching queryset
|
||||
"""
|
||||
"""Convert to a caching queryset"""
|
||||
return self._clone_into(QuerySet(self._document, self._collection))
|
||||
|
||||
def __repr__(self):
|
||||
"""Provides the string representation of the QuerySet
|
||||
|
||||
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
||||
"""
|
||||
"""Provides the string representation of the QuerySet"""
|
||||
if self._iter:
|
||||
return ".. queryset mid-iteration .."
|
||||
|
||||
data = []
|
||||
for _ in six.moves.range(REPR_OUTPUT_SIZE + 1):
|
||||
for _ in range(REPR_OUTPUT_SIZE + 1):
|
||||
try:
|
||||
data.append(six.next(self))
|
||||
data.append(next(self))
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
|
@ -1,16 +1,14 @@
|
||||
from collections import defaultdict
|
||||
|
||||
from bson import ObjectId, SON
|
||||
from bson.dbref import DBRef
|
||||
import pymongo
|
||||
import six
|
||||
from six import iteritems
|
||||
from bson import SON, ObjectId
|
||||
from bson.dbref import DBRef
|
||||
|
||||
from mongoengine.base import UPDATE_OPERATORS
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
|
||||
__all__ = ("query", "update")
|
||||
__all__ = ("query", "update", "STRING_OPERATORS")
|
||||
|
||||
COMPARISON_OPERATORS = (
|
||||
"ne",
|
||||
@ -53,6 +51,10 @@ STRING_OPERATORS = (
|
||||
"iendswith",
|
||||
"exact",
|
||||
"iexact",
|
||||
"regex",
|
||||
"iregex",
|
||||
"wholeword",
|
||||
"iwholeword",
|
||||
)
|
||||
CUSTOM_OPERATORS = ("match",)
|
||||
MATCH_OPERATORS = (
|
||||
@ -101,7 +103,7 @@ def query(_doc_cls=None, **kwargs):
|
||||
cleaned_fields = []
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, six.string_types):
|
||||
if isinstance(field, str):
|
||||
parts.append(field)
|
||||
append_field = False
|
||||
# is last and CachedReferenceField
|
||||
@ -169,9 +171,9 @@ def query(_doc_cls=None, **kwargs):
|
||||
|
||||
key = ".".join(parts)
|
||||
|
||||
if op is None or key not in mongo_query:
|
||||
if key not in mongo_query:
|
||||
mongo_query[key] = value
|
||||
elif key in mongo_query:
|
||||
else:
|
||||
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
||||
mongo_query[key].update(value)
|
||||
# $max/minDistance needs to come last - convert to SON
|
||||
@ -180,7 +182,7 @@ def query(_doc_cls=None, **kwargs):
|
||||
"$near" in value_dict or "$nearSphere" in value_dict
|
||||
):
|
||||
value_son = SON()
|
||||
for k, v in iteritems(value_dict):
|
||||
for k, v in value_dict.items():
|
||||
if k == "$maxDistance" or k == "$minDistance":
|
||||
continue
|
||||
value_son[k] = v
|
||||
@ -281,7 +283,7 @@ def update(_doc_cls=None, **update):
|
||||
appended_sub_field = False
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, six.string_types):
|
||||
if isinstance(field, str):
|
||||
# Convert the S operator to $
|
||||
if field == "S":
|
||||
field = "$"
|
||||
@ -435,7 +437,9 @@ def _geo_operator(field, op, value):
|
||||
value = {"$near": _infer_geometry(value)}
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Geo method "%s" has not been implemented for a %s ' % (op, field._name)
|
||||
'Geo method "{}" has not been implemented for a {} '.format(
|
||||
op, field._name
|
||||
)
|
||||
)
|
||||
return value
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
import copy
|
||||
import warnings
|
||||
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.queryset import transform
|
||||
@ -6,18 +7,20 @@ from mongoengine.queryset import transform
|
||||
__all__ = ("Q", "QNode")
|
||||
|
||||
|
||||
class QNodeVisitor(object):
|
||||
"""Base visitor class for visiting Q-object nodes in a query tree.
|
||||
"""
|
||||
def warn_empty_is_deprecated():
|
||||
msg = "'empty' property is deprecated in favour of using 'not bool(filter)'"
|
||||
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
||||
|
||||
|
||||
class QNodeVisitor:
|
||||
"""Base visitor class for visiting Q-object nodes in a query tree."""
|
||||
|
||||
def visit_combination(self, combination):
|
||||
"""Called by QCombination objects.
|
||||
"""
|
||||
"""Called by QCombination objects."""
|
||||
return combination
|
||||
|
||||
def visit_query(self, query):
|
||||
"""Called by (New)Q objects.
|
||||
"""
|
||||
"""Called by (New)Q objects."""
|
||||
return query
|
||||
|
||||
|
||||
@ -43,8 +46,7 @@ class SimplificationVisitor(QNodeVisitor):
|
||||
return combination
|
||||
|
||||
def _query_conjunction(self, queries):
|
||||
"""Merges query dicts - effectively &ing them together.
|
||||
"""
|
||||
"""Merges query dicts - effectively &ing them together."""
|
||||
query_ops = set()
|
||||
combined_query = {}
|
||||
for query in queries:
|
||||
@ -78,7 +80,7 @@ class QueryCompilerVisitor(QNodeVisitor):
|
||||
return transform.query(self.document, **query.query)
|
||||
|
||||
|
||||
class QNode(object):
|
||||
class QNode:
|
||||
"""Base class for nodes in query trees."""
|
||||
|
||||
AND = 0
|
||||
@ -96,16 +98,19 @@ class QNode(object):
|
||||
"""Combine this node with another node into a QCombination
|
||||
object.
|
||||
"""
|
||||
if getattr(other, "empty", True):
|
||||
# If the other Q() is empty, ignore it and just use `self`.
|
||||
if not bool(other):
|
||||
return self
|
||||
|
||||
if self.empty:
|
||||
# Or if this Q is empty, ignore it and just use `other`.
|
||||
if not bool(self):
|
||||
return other
|
||||
|
||||
return QCombination(operation, [self, other])
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
warn_empty_is_deprecated()
|
||||
return False
|
||||
|
||||
def __or__(self, other):
|
||||
@ -135,6 +140,9 @@ class QCombination(QNode):
|
||||
op = " & " if self.operation is self.AND else " | "
|
||||
return "(%s)" % op.join([repr(node) for node in self.children])
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.children)
|
||||
|
||||
def accept(self, visitor):
|
||||
for i in range(len(self.children)):
|
||||
if isinstance(self.children[i], QNode):
|
||||
@ -144,8 +152,16 @@ class QCombination(QNode):
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
warn_empty_is_deprecated()
|
||||
return not bool(self.children)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
self.__class__ == other.__class__
|
||||
and self.operation == other.operation
|
||||
and self.children == other.children
|
||||
)
|
||||
|
||||
|
||||
class Q(QNode):
|
||||
"""A simple query object, used in a query tree to build up more complex
|
||||
@ -158,9 +174,16 @@ class Q(QNode):
|
||||
def __repr__(self):
|
||||
return "Q(**%s)" % repr(self.query)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.query)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__ == other.__class__ and self.query == other.query
|
||||
|
||||
def accept(self, visitor):
|
||||
return visitor.visit_query(self)
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
warn_empty_is_deprecated()
|
||||
return not bool(self.query)
|
||||
|
@ -15,11 +15,11 @@ try:
|
||||
signals_available = True
|
||||
except ImportError:
|
||||
|
||||
class Namespace(object):
|
||||
class Namespace:
|
||||
def signal(self, name, doc=None):
|
||||
return _FakeSignal(name, doc)
|
||||
|
||||
class _FakeSignal(object):
|
||||
class _FakeSignal:
|
||||
"""If blinker is unavailable, create a fake class with the same
|
||||
interface that allows sending of signals but will fail with an
|
||||
error on anything else. Instead of doing anything on send, it
|
||||
|
7
requirements-dev.txt
Normal file
7
requirements-dev.txt
Normal file
@ -0,0 +1,7 @@
|
||||
black
|
||||
flake8
|
||||
pre-commit
|
||||
pytest
|
||||
ipdb
|
||||
ipython
|
||||
tox
|
@ -1,3 +0,0 @@
|
||||
black
|
||||
flake8
|
||||
flake8-import-order
|
@ -1,5 +0,0 @@
|
||||
nose
|
||||
pymongo>=3.4
|
||||
six==1.10.0
|
||||
Sphinx==1.5.5
|
||||
sphinx-rtd-theme==0.2.4
|
23
setup.cfg
23
setup.cfg
@ -1,11 +1,18 @@
|
||||
[nosetests]
|
||||
verbosity=2
|
||||
detailed-errors=1
|
||||
#tests=tests
|
||||
cover-package=mongoengine
|
||||
|
||||
[flake8]
|
||||
ignore=E501,F401,F403,F405,I201,I202,W504, W605, W503
|
||||
ignore=E501,F403,F405,I201,I202,W504,W605,W503,B007
|
||||
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
||||
max-complexity=47
|
||||
application-import-names=mongoengine,tests
|
||||
|
||||
[tool:pytest]
|
||||
# Limits the discovery to tests directory
|
||||
# avoids that it runs for instance the benchmark
|
||||
testpaths = tests
|
||||
|
||||
[isort]
|
||||
known_first_party = mongoengine,tests
|
||||
default_section = THIRDPARTY
|
||||
multi_line_output = 3
|
||||
include_trailing_comma = True
|
||||
combine_as_imports = True
|
||||
line_length = 70
|
||||
ensure_newline_before_comments = 1
|
||||
|
90
setup.py
90
setup.py
@ -1,10 +1,13 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pkg_resources import normalize_path
|
||||
from setuptools import find_packages, setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
# Hack to silence atexit traceback in newer python versions
|
||||
try:
|
||||
import multiprocessing
|
||||
import multiprocessing # noqa: F401
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@ -24,6 +27,62 @@ def get_version(version_tuple):
|
||||
return ".".join(map(str, version_tuple))
|
||||
|
||||
|
||||
class PyTest(TestCommand):
|
||||
"""Will force pytest to search for tests inside the build directory
|
||||
for 2to3 converted code (used by tox), instead of the current directory.
|
||||
Required as long as we need 2to3
|
||||
|
||||
Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations
|
||||
Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html
|
||||
"""
|
||||
|
||||
# https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands
|
||||
# Allows to provide pytest command argument through the test runner command `python setup.py test`
|
||||
# e.g: `python setup.py test -a "-k=test"`
|
||||
# This only works for 1 argument though
|
||||
user_options = [("pytest-args=", "a", "Arguments to pass to py.test")]
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ""
|
||||
|
||||
def finalize_options(self):
|
||||
TestCommand.finalize_options(self)
|
||||
self.test_args = ["tests"]
|
||||
self.test_suite = True
|
||||
|
||||
def run_tests(self):
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import pytest
|
||||
from pkg_resources import _namespace_packages
|
||||
|
||||
# Purge modules under test from sys.modules. The test loader will
|
||||
# re-import them from the build location. Required when 2to3 is used
|
||||
# with namespace packages.
|
||||
if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False):
|
||||
module = self.test_args[-1].split(".")[0]
|
||||
if module in _namespace_packages:
|
||||
del_modules = []
|
||||
if module in sys.modules:
|
||||
del_modules.append(module)
|
||||
module += "."
|
||||
for name in sys.modules:
|
||||
if name.startswith(module):
|
||||
del_modules.append(name)
|
||||
map(sys.modules.__delitem__, del_modules)
|
||||
|
||||
# Run on the build directory for 2to3-built code
|
||||
# This will prevent the old 2.x code from being found
|
||||
# by py.test discovery mechanism, that apparently
|
||||
# ignores sys.path..
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
self.test_args = [normalize_path(ei_cmd.egg_base)]
|
||||
|
||||
cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else [])
|
||||
errno = pytest.main(cmd_args)
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||
# file is read
|
||||
@ -33,16 +92,17 @@ version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0]
|
||||
VERSION = get_version(eval(version_line.split("=")[-1]))
|
||||
|
||||
CLASSIFIERS = [
|
||||
"Development Status :: 4 - Beta",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Topic :: Database",
|
||||
@ -51,17 +111,20 @@ CLASSIFIERS = [
|
||||
|
||||
extra_opts = {
|
||||
"packages": find_packages(exclude=["tests", "tests.*"]),
|
||||
"tests_require": ["nose", "coverage==4.2", "blinker", "Pillow>=2.0.0"],
|
||||
"tests_require": [
|
||||
"pytest",
|
||||
"pytest-cov",
|
||||
"coverage",
|
||||
"blinker",
|
||||
"Pillow>=7.0.0",
|
||||
],
|
||||
}
|
||||
if sys.version_info[0] == 3:
|
||||
extra_opts["use_2to3"] = True
|
||||
if "test" in sys.argv or "nosetests" in sys.argv:
|
||||
|
||||
if "test" in sys.argv:
|
||||
extra_opts["packages"] = find_packages()
|
||||
extra_opts["package_data"] = {
|
||||
"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]
|
||||
}
|
||||
else:
|
||||
extra_opts["tests_require"] += ["python-dateutil"]
|
||||
|
||||
setup(
|
||||
name="mongoengine",
|
||||
@ -78,7 +141,8 @@ setup(
|
||||
long_description=LONG_DESCRIPTION,
|
||||
platforms=["any"],
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=["pymongo>=3.4", "six"],
|
||||
test_suite="nose.collector",
|
||||
python_requires=">=3.6",
|
||||
install_requires=["pymongo>=3.4, <4.0"],
|
||||
cmdclass={"test": PyTest},
|
||||
**extra_opts
|
||||
)
|
||||
|
@ -1,4 +0,0 @@
|
||||
from .all_warnings import AllWarnings
|
||||
from .document import *
|
||||
from .queryset import *
|
||||
from .fields import *
|
@ -1,40 +0,0 @@
|
||||
"""
|
||||
This test has been put into a module. This is because it tests warnings that
|
||||
only get triggered on first hit. This way we can ensure its imported into the
|
||||
top level and called first by the test suite.
|
||||
"""
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
|
||||
__all__ = ("AllWarnings",)
|
||||
|
||||
|
||||
class AllWarnings(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
self.warning_list = []
|
||||
self.showwarning_default = warnings.showwarning
|
||||
warnings.showwarning = self.append_to_warning_list
|
||||
|
||||
def append_to_warning_list(self, message, category, *args):
|
||||
self.warning_list.append({"message": message, "category": category})
|
||||
|
||||
def tearDown(self):
|
||||
# restore default handling of warnings
|
||||
warnings.showwarning = self.showwarning_default
|
||||
|
||||
def test_document_collection_syntax_warning(self):
|
||||
class NonAbstractBase(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class InheritedDocumentFailTest(NonAbstractBase):
|
||||
meta = {"collection": "fail"}
|
||||
|
||||
warning = self.warning_list[0]
|
||||
self.assertEqual(SyntaxWarning, warning["category"])
|
||||
self.assertEqual(
|
||||
"non_abstract_base", InheritedDocumentFailTest._get_collection_name()
|
||||
)
|
35
tests/all_warnings/test_warnings.py
Normal file
35
tests/all_warnings/test_warnings.py
Normal file
@ -0,0 +1,35 @@
|
||||
"""
|
||||
This test has been put into a module. This is because it tests warnings that
|
||||
only get triggered on first hit. This way we can ensure its imported into the
|
||||
top level and called first by the test suite.
|
||||
"""
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
|
||||
class TestAllWarnings(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
self.warning_list = []
|
||||
self.showwarning_default = warnings.showwarning
|
||||
warnings.showwarning = self.append_to_warning_list
|
||||
|
||||
def append_to_warning_list(self, message, category, *args):
|
||||
self.warning_list.append({"message": message, "category": category})
|
||||
|
||||
def tearDown(self):
|
||||
# restore default handling of warnings
|
||||
warnings.showwarning = self.showwarning_default
|
||||
|
||||
def test_document_collection_syntax_warning(self):
|
||||
class NonAbstractBase(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class InheritedDocumentFailTest(NonAbstractBase):
|
||||
meta = {"collection": "fail"}
|
||||
|
||||
warning = self.warning_list[0]
|
||||
assert SyntaxWarning == warning["category"]
|
||||
assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name()
|
@ -1,13 +0,0 @@
|
||||
import unittest
|
||||
|
||||
from .class_methods import *
|
||||
from .delta import *
|
||||
from .dynamic import *
|
||||
from .indexes import *
|
||||
from .inheritance import *
|
||||
from .instance import *
|
||||
from .json_serialisation import *
|
||||
from .validation import *
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -1,16 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("ClassMethodsTest",)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
|
||||
|
||||
class ClassMethodsTest(unittest.TestCase):
|
||||
class TestClassMethods(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
self.db = get_db()
|
||||
@ -30,45 +26,38 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_definition(self):
|
||||
"""Ensure that document may be defined using fields.
|
||||
"""
|
||||
self.assertEqual(
|
||||
["_cls", "age", "id", "name"], sorted(self.Person._fields.keys())
|
||||
)
|
||||
self.assertEqual(
|
||||
["IntField", "ObjectIdField", "StringField", "StringField"],
|
||||
sorted([x.__class__.__name__ for x in self.Person._fields.values()]),
|
||||
"""Ensure that document may be defined using fields."""
|
||||
assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys())
|
||||
assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted(
|
||||
x.__class__.__name__ for x in self.Person._fields.values()
|
||||
)
|
||||
|
||||
def test_get_db(self):
|
||||
"""Ensure that get_db returns the expected db.
|
||||
"""
|
||||
"""Ensure that get_db returns the expected db."""
|
||||
db = self.Person._get_db()
|
||||
self.assertEqual(self.db, db)
|
||||
assert self.db == db
|
||||
|
||||
def test_get_collection_name(self):
|
||||
"""Ensure that get_collection_name returns the expected collection
|
||||
name.
|
||||
"""
|
||||
collection_name = "person"
|
||||
self.assertEqual(collection_name, self.Person._get_collection_name())
|
||||
assert collection_name == self.Person._get_collection_name()
|
||||
|
||||
def test_get_collection(self):
|
||||
"""Ensure that get_collection returns the expected collection.
|
||||
"""
|
||||
"""Ensure that get_collection returns the expected collection."""
|
||||
collection_name = "person"
|
||||
collection = self.Person._get_collection()
|
||||
self.assertEqual(self.db[collection_name], collection)
|
||||
assert self.db[collection_name] == collection
|
||||
|
||||
def test_drop_collection(self):
|
||||
"""Ensure that the collection may be dropped from the database.
|
||||
"""
|
||||
"""Ensure that the collection may be dropped from the database."""
|
||||
collection_name = "person"
|
||||
self.Person(name="Test").save()
|
||||
self.assertIn(collection_name, list_collection_names(self.db))
|
||||
assert collection_name in list_collection_names(self.db)
|
||||
|
||||
self.Person.drop_collection()
|
||||
self.assertNotIn(collection_name, list_collection_names(self.db))
|
||||
assert collection_name not in list_collection_names(self.db)
|
||||
|
||||
def test_register_delete_rule(self):
|
||||
"""Ensure that register delete rule adds a delete rule to the document
|
||||
@ -78,15 +67,13 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
class Job(Document):
|
||||
employee = ReferenceField(self.Person)
|
||||
|
||||
self.assertEqual(self.Person._meta.get("delete_rules"), None)
|
||||
assert self.Person._meta.get("delete_rules") is None
|
||||
|
||||
self.Person.register_delete_rule(Job, "employee", NULLIFY)
|
||||
self.assertEqual(
|
||||
self.Person._meta["delete_rules"], {(Job, "employee"): NULLIFY}
|
||||
)
|
||||
assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY}
|
||||
|
||||
def test_compare_indexes(self):
|
||||
""" Ensure that the indexes are properly created and that
|
||||
"""Ensure that the indexes are properly created and that
|
||||
compare_indexes identifies the missing/extra indexes
|
||||
"""
|
||||
|
||||
@ -101,25 +88,25 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []})
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPost.ensure_index(["author", "description"])
|
||||
self.assertEqual(
|
||||
BlogPost.compare_indexes(),
|
||||
{"missing": [], "extra": [[("author", 1), ("description", 1)]]},
|
||||
)
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [],
|
||||
"extra": [[("author", 1), ("description", 1)]],
|
||||
}
|
||||
|
||||
BlogPost._get_collection().drop_index("author_1_description_1")
|
||||
self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []})
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPost._get_collection().drop_index("author_1_title_1")
|
||||
self.assertEqual(
|
||||
BlogPost.compare_indexes(),
|
||||
{"missing": [[("author", 1), ("title", 1)]], "extra": []},
|
||||
)
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [[("author", 1), ("title", 1)]],
|
||||
"extra": [],
|
||||
}
|
||||
|
||||
def test_compare_indexes_inheritance(self):
|
||||
""" Ensure that the indexes are properly created and that
|
||||
"""Ensure that the indexes are properly created and that
|
||||
compare_indexes identifies the missing/extra indexes for subclassed
|
||||
documents (_cls included)
|
||||
"""
|
||||
@ -141,25 +128,25 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []})
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPostWithTags.ensure_index(["author", "tag_list"])
|
||||
self.assertEqual(
|
||||
BlogPost.compare_indexes(),
|
||||
{"missing": [], "extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]]},
|
||||
)
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [],
|
||||
"extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]],
|
||||
}
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1")
|
||||
self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []})
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1")
|
||||
self.assertEqual(
|
||||
BlogPost.compare_indexes(),
|
||||
{"missing": [[("_cls", 1), ("author", 1), ("tags", 1)]], "extra": []},
|
||||
)
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [[("_cls", 1), ("author", 1), ("tags", 1)]],
|
||||
"extra": [],
|
||||
}
|
||||
|
||||
def test_compare_indexes_multiple_subclasses(self):
|
||||
""" Ensure that compare_indexes behaves correctly if called from a
|
||||
"""Ensure that compare_indexes behaves correctly if called from a
|
||||
class, which base class has multiple subclasses
|
||||
"""
|
||||
|
||||
@ -185,16 +172,12 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithCustomField.ensure_indexes()
|
||||
|
||||
self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []})
|
||||
self.assertEqual(
|
||||
BlogPostWithTags.compare_indexes(), {"missing": [], "extra": []}
|
||||
)
|
||||
self.assertEqual(
|
||||
BlogPostWithCustomField.compare_indexes(), {"missing": [], "extra": []}
|
||||
)
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
assert BlogPostWithTags.compare_indexes() == {"missing": [], "extra": []}
|
||||
assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
def test_compare_indexes_for_text_indexes(self):
|
||||
""" Ensure that compare_indexes behaves correctly for text indexes """
|
||||
"""Ensure that compare_indexes behaves correctly for text indexes"""
|
||||
|
||||
class Doc(Document):
|
||||
a = StringField()
|
||||
@ -213,10 +196,10 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
Doc.ensure_indexes()
|
||||
actual = Doc.compare_indexes()
|
||||
expected = {"missing": [], "extra": []}
|
||||
self.assertEqual(actual, expected)
|
||||
assert actual == expected
|
||||
|
||||
def test_list_indexes_inheritance(self):
|
||||
""" ensure that all of the indexes are listed regardless of the super-
|
||||
"""ensure that all of the indexes are listed regardless of the super-
|
||||
or sub-class that we call it from
|
||||
"""
|
||||
|
||||
@ -243,19 +226,14 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithTagsAndExtraText.ensure_indexes()
|
||||
|
||||
self.assertEqual(BlogPost.list_indexes(), BlogPostWithTags.list_indexes())
|
||||
self.assertEqual(
|
||||
BlogPost.list_indexes(), BlogPostWithTagsAndExtraText.list_indexes()
|
||||
)
|
||||
self.assertEqual(
|
||||
BlogPost.list_indexes(),
|
||||
[
|
||||
assert BlogPost.list_indexes() == BlogPostWithTags.list_indexes()
|
||||
assert BlogPost.list_indexes() == BlogPostWithTagsAndExtraText.list_indexes()
|
||||
assert BlogPost.list_indexes() == [
|
||||
[("_cls", 1), ("author", 1), ("tags", 1)],
|
||||
[("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)],
|
||||
[(u"_id", 1)],
|
||||
[("_id", 1)],
|
||||
[("_cls", 1)],
|
||||
],
|
||||
)
|
||||
]
|
||||
|
||||
def test_register_delete_rule_inherited(self):
|
||||
class Vaccine(Document):
|
||||
@ -274,29 +252,26 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
class Cat(Animal):
|
||||
name = StringField(required=True)
|
||||
|
||||
self.assertEqual(Vaccine._meta["delete_rules"][(Animal, "vaccine_made")], PULL)
|
||||
self.assertEqual(Vaccine._meta["delete_rules"][(Cat, "vaccine_made")], PULL)
|
||||
assert Vaccine._meta["delete_rules"][(Animal, "vaccine_made")] == PULL
|
||||
assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL
|
||||
|
||||
def test_collection_naming(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
"""
|
||||
"""Ensure that a collection with a specified name may be used."""
|
||||
|
||||
class DefaultNamingTest(Document):
|
||||
pass
|
||||
|
||||
self.assertEqual(
|
||||
"default_naming_test", DefaultNamingTest._get_collection_name()
|
||||
)
|
||||
assert "default_naming_test" == DefaultNamingTest._get_collection_name()
|
||||
|
||||
class CustomNamingTest(Document):
|
||||
meta = {"collection": "pimp_my_collection"}
|
||||
|
||||
self.assertEqual("pimp_my_collection", CustomNamingTest._get_collection_name())
|
||||
assert "pimp_my_collection" == CustomNamingTest._get_collection_name()
|
||||
|
||||
class DynamicNamingTest(Document):
|
||||
meta = {"collection": lambda c: "DYNAMO"}
|
||||
|
||||
self.assertEqual("DYNAMO", DynamicNamingTest._get_collection_name())
|
||||
assert "DYNAMO" == DynamicNamingTest._get_collection_name()
|
||||
|
||||
# Use Abstract class to handle backwards compatibility
|
||||
class BaseDocument(Document):
|
||||
@ -305,27 +280,26 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
class OldNamingConvention(BaseDocument):
|
||||
pass
|
||||
|
||||
self.assertEqual(
|
||||
"oldnamingconvention", OldNamingConvention._get_collection_name()
|
||||
)
|
||||
assert "oldnamingconvention" == OldNamingConvention._get_collection_name()
|
||||
|
||||
class InheritedAbstractNamingTest(BaseDocument):
|
||||
meta = {"collection": "wibble"}
|
||||
|
||||
self.assertEqual("wibble", InheritedAbstractNamingTest._get_collection_name())
|
||||
assert "wibble" == InheritedAbstractNamingTest._get_collection_name()
|
||||
|
||||
# Mixin tests
|
||||
class BaseMixin(object):
|
||||
class BaseMixin:
|
||||
meta = {"collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class OldMixinNamingConvention(Document, BaseMixin):
|
||||
pass
|
||||
|
||||
self.assertEqual(
|
||||
"oldmixinnamingconvention", OldMixinNamingConvention._get_collection_name()
|
||||
assert (
|
||||
"oldmixinnamingconvention"
|
||||
== OldMixinNamingConvention._get_collection_name()
|
||||
)
|
||||
|
||||
class BaseMixin(object):
|
||||
class BaseMixin:
|
||||
meta = {"collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class BaseDocument(Document, BaseMixin):
|
||||
@ -334,11 +308,10 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
class MyDocument(BaseDocument):
|
||||
pass
|
||||
|
||||
self.assertEqual("basedocument", MyDocument._get_collection_name())
|
||||
assert "basedocument" == MyDocument._get_collection_name()
|
||||
|
||||
def test_custom_collection_name_operations(self):
|
||||
"""Ensure that a collection with a specified name is used as expected.
|
||||
"""
|
||||
"""Ensure that a collection with a specified name is used as expected."""
|
||||
collection_name = "personCollTest"
|
||||
|
||||
class Person(Document):
|
||||
@ -346,20 +319,19 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
meta = {"collection": collection_name}
|
||||
|
||||
Person(name="Test User").save()
|
||||
self.assertIn(collection_name, list_collection_names(self.db))
|
||||
assert collection_name in list_collection_names(self.db)
|
||||
|
||||
user_obj = self.db[collection_name].find_one()
|
||||
self.assertEqual(user_obj["name"], "Test User")
|
||||
assert user_obj["name"] == "Test User"
|
||||
|
||||
user_obj = Person.objects[0]
|
||||
self.assertEqual(user_obj.name, "Test User")
|
||||
assert user_obj.name == "Test User"
|
||||
|
||||
Person.drop_collection()
|
||||
self.assertNotIn(collection_name, list_collection_names(self.db))
|
||||
assert collection_name not in list_collection_names(self.db)
|
||||
|
||||
def test_collection_name_and_primary(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
"""
|
||||
"""Ensure that a collection with a specified name may be used."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField(primary_key=True)
|
||||
@ -368,7 +340,7 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
Person(name="Test User").save()
|
||||
|
||||
user_obj = Person.objects.first()
|
||||
self.assertEqual(user_obj.name, "Test User")
|
||||
assert user_obj.name == "Test User"
|
||||
|
||||
Person.drop_collection()
|
||||
|
@ -1,15 +1,15 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from bson import SON
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class DeltaTest(MongoDBTestCase):
|
||||
class TestDelta(MongoDBTestCase):
|
||||
def setUp(self):
|
||||
super(DeltaTest, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
@ -29,7 +29,8 @@ class DeltaTest(MongoDBTestCase):
|
||||
self.delta(Document)
|
||||
self.delta(DynamicDocument)
|
||||
|
||||
def delta(self, DocClass):
|
||||
@staticmethod
|
||||
def delta(DocClass):
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
@ -41,40 +42,40 @@ class DeltaTest(MongoDBTestCase):
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
self.assertEqual(doc._get_changed_fields(), ["string_field"])
|
||||
self.assertEqual(doc._delta(), ({"string_field": "hello"}, {}))
|
||||
assert doc._get_changed_fields() == ["string_field"]
|
||||
assert doc._delta() == ({"string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ["int_field"])
|
||||
self.assertEqual(doc._delta(), ({"int_field": 1}, {}))
|
||||
assert doc._get_changed_fields() == ["int_field"]
|
||||
assert doc._delta() == ({"int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ["dict_field"])
|
||||
self.assertEqual(doc._delta(), ({"dict_field": dict_value}, {}))
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({"dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ["list_field"])
|
||||
self.assertEqual(doc._delta(), ({"list_field": list_value}, {}))
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({"list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ["dict_field"])
|
||||
self.assertEqual(doc._delta(), ({}, {"dict_field": 1}))
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({}, {"dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ["list_field"])
|
||||
self.assertEqual(doc._delta(), ({}, {"list_field": 1}))
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({}, {"list_field": 1})
|
||||
|
||||
def test_delta_recursive(self):
|
||||
self.delta_recursive(Document, EmbeddedDocument)
|
||||
@ -102,8 +103,8 @@ class DeltaTest(MongoDBTestCase):
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.id = "010101"
|
||||
@ -113,7 +114,7 @@ class DeltaTest(MongoDBTestCase):
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc._get_changed_fields(), ["embedded_field"])
|
||||
assert doc._get_changed_fields() == ["embedded_field"]
|
||||
|
||||
embedded_delta = {
|
||||
"id": "010101",
|
||||
@ -122,27 +123,27 @@ class DeltaTest(MongoDBTestCase):
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
self.assertEqual(doc._delta(), ({"embedded_field": embedded_delta}, {}))
|
||||
assert doc.embedded_field._delta() == (embedded_delta, {})
|
||||
assert doc._delta() == ({"embedded_field": embedded_delta}, {})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ["embedded_field.dict_field"])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({}, {"dict_field": 1}))
|
||||
self.assertEqual(doc._delta(), ({}, {"embedded_field.dict_field": 1}))
|
||||
assert doc._get_changed_fields() == ["embedded_field.dict_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"dict_field": 1})
|
||||
assert doc._delta() == ({}, {"embedded_field.dict_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||
assert doc.embedded_field.dict_field == {}
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ["embedded_field.list_field"])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({}, {"list_field": 1}))
|
||||
self.assertEqual(doc._delta(), ({}, {"embedded_field.list_field": 1}))
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"list_field": 1})
|
||||
assert doc._delta() == ({}, {"embedded_field.list_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field, [])
|
||||
assert doc.embedded_field.list_field == []
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = "hello"
|
||||
@ -151,11 +152,9 @@ class DeltaTest(MongoDBTestCase):
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
||||
self.assertEqual(doc._get_changed_fields(), ["embedded_field.list_field"])
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
||||
|
||||
self.assertEqual(
|
||||
doc.embedded_field._delta(),
|
||||
(
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"list_field": [
|
||||
"1",
|
||||
@ -170,12 +169,9 @@ class DeltaTest(MongoDBTestCase):
|
||||
]
|
||||
},
|
||||
{},
|
||||
),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
doc._delta(),
|
||||
(
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"embedded_field.list_field": [
|
||||
"1",
|
||||
@ -190,37 +186,34 @@ class DeltaTest(MongoDBTestCase):
|
||||
]
|
||||
},
|
||||
{},
|
||||
),
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.embedded_field.list_field[0], "1")
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k])
|
||||
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = "world"
|
||||
self.assertEqual(
|
||||
doc._get_changed_fields(), ["embedded_field.list_field.2.string_field"]
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field.2.string_field"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{"list_field.2.string_field": "world"},
|
||||
{},
|
||||
)
|
||||
self.assertEqual(
|
||||
doc.embedded_field._delta(), ({"list_field.2.string_field": "world"}, {})
|
||||
)
|
||||
self.assertEqual(
|
||||
doc._delta(), ({"embedded_field.list_field.2.string_field": "world"}, {})
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.string_field": "world"},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field, "world")
|
||||
assert doc.embedded_field.list_field[2].string_field == "world"
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = "hello world"
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEqual(doc._get_changed_fields(), ["embedded_field.list_field.2"])
|
||||
self.assertEqual(
|
||||
doc.embedded_field._delta(),
|
||||
(
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field.2"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
@ -231,11 +224,8 @@ class DeltaTest(MongoDBTestCase):
|
||||
}
|
||||
},
|
||||
{},
|
||||
),
|
||||
)
|
||||
self.assertEqual(
|
||||
doc._delta(),
|
||||
(
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"embedded_field.list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
@ -246,53 +236,44 @@ class DeltaTest(MongoDBTestCase):
|
||||
}
|
||||
},
|
||||
{},
|
||||
),
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field, "hello world")
|
||||
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEqual(
|
||||
doc._delta(),
|
||||
({"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]}, {}),
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEqual(
|
||||
doc._delta(),
|
||||
(
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]},
|
||||
{},
|
||||
),
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(
|
||||
doc.embedded_field.list_field[2].list_field, [2, {"hello": "world"}, 1]
|
||||
)
|
||||
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(
|
||||
doc.embedded_field.list_field[2].list_field, [1, 2, {"hello": "world"}]
|
||||
)
|
||||
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
||||
self.assertEqual(
|
||||
doc._delta(), ({}, {"embedded_field.list_field.2.list_field.2.hello": 1})
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"embedded_field.list_field.2.list_field.2.hello": 1},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(
|
||||
doc._delta(), ({}, {"embedded_field.list_field.2.list_field": 1})
|
||||
)
|
||||
assert doc._delta() == ({}, {"embedded_field.list_field.2.list_field": 1})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
@ -302,12 +283,8 @@ class DeltaTest(MongoDBTestCase):
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field["Embedded"].string_field = "Hello World"
|
||||
self.assertEqual(
|
||||
doc._get_changed_fields(), ["dict_field.Embedded.string_field"]
|
||||
)
|
||||
self.assertEqual(
|
||||
doc._delta(), ({"dict_field.Embedded.string_field": "Hello World"}, {})
|
||||
)
|
||||
assert doc._get_changed_fields() == ["dict_field.Embedded.string_field"]
|
||||
assert doc._delta() == ({"dict_field.Embedded.string_field": "Hello World"}, {})
|
||||
|
||||
def test_circular_reference_deltas(self):
|
||||
self.circular_reference_deltas(Document, Document)
|
||||
@ -338,8 +315,8 @@ class DeltaTest(MongoDBTestCase):
|
||||
|
||||
p = Person.objects[0].select_related()
|
||||
o = Organization.objects.first()
|
||||
self.assertEqual(p.owns[0], o)
|
||||
self.assertEqual(o.owner, p)
|
||||
assert p.owns[0] == o
|
||||
assert o.owner == p
|
||||
|
||||
def test_circular_reference_deltas_2(self):
|
||||
self.circular_reference_deltas_2(Document, Document)
|
||||
@ -379,9 +356,9 @@ class DeltaTest(MongoDBTestCase):
|
||||
e = Person.objects.get(name="employee")
|
||||
o = Organization.objects.first()
|
||||
|
||||
self.assertEqual(p.owns[0], o)
|
||||
self.assertEqual(o.owner, p)
|
||||
self.assertEqual(e.employer, o)
|
||||
assert p.owns[0] == o
|
||||
assert o.owner == p
|
||||
assert e.employer == o
|
||||
|
||||
return person, organization, employee
|
||||
|
||||
@ -401,40 +378,40 @@ class DeltaTest(MongoDBTestCase):
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
self.assertEqual(doc._get_changed_fields(), ["db_string_field"])
|
||||
self.assertEqual(doc._delta(), ({"db_string_field": "hello"}, {}))
|
||||
assert doc._get_changed_fields() == ["db_string_field"]
|
||||
assert doc._delta() == ({"db_string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ["db_int_field"])
|
||||
self.assertEqual(doc._delta(), ({"db_int_field": 1}, {}))
|
||||
assert doc._get_changed_fields() == ["db_int_field"]
|
||||
assert doc._delta() == ({"db_int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ["db_dict_field"])
|
||||
self.assertEqual(doc._delta(), ({"db_dict_field": dict_value}, {}))
|
||||
assert doc._get_changed_fields() == ["db_dict_field"]
|
||||
assert doc._delta() == ({"db_dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ["db_list_field"])
|
||||
self.assertEqual(doc._delta(), ({"db_list_field": list_value}, {}))
|
||||
assert doc._get_changed_fields() == ["db_list_field"]
|
||||
assert doc._delta() == ({"db_list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ["db_dict_field"])
|
||||
self.assertEqual(doc._delta(), ({}, {"db_dict_field": 1}))
|
||||
assert doc._get_changed_fields() == ["db_dict_field"]
|
||||
assert doc._delta() == ({}, {"db_dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ["db_list_field"])
|
||||
self.assertEqual(doc._delta(), ({}, {"db_list_field": 1}))
|
||||
assert doc._get_changed_fields() == ["db_list_field"]
|
||||
assert doc._delta() == ({}, {"db_list_field": 1})
|
||||
|
||||
# Test it saves that data
|
||||
doc = Doc()
|
||||
@ -447,18 +424,25 @@ class DeltaTest(MongoDBTestCase):
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.string_field, "hello")
|
||||
self.assertEqual(doc.int_field, 1)
|
||||
self.assertEqual(doc.dict_field, {"hello": "world"})
|
||||
self.assertEqual(doc.list_field, ["1", 2, {"hello": "world"}])
|
||||
assert doc.string_field == "hello"
|
||||
assert doc.int_field == 1
|
||||
assert doc.dict_field == {"hello": "world"}
|
||||
assert doc.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_delta_recursive_db_field(self):
|
||||
def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self):
|
||||
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self):
|
||||
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self):
|
||||
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self):
|
||||
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
def delta_recursive_db_field(self, DocClass, EmbeddedClass):
|
||||
@staticmethod
|
||||
def delta_recursive_db_field(DocClass, EmbeddedClass):
|
||||
class Embedded(EmbeddedClass):
|
||||
string_field = StringField(db_field="db_string_field")
|
||||
int_field = IntField(db_field="db_int_field")
|
||||
@ -479,8 +463,8 @@ class DeltaTest(MongoDBTestCase):
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = "hello"
|
||||
@ -489,7 +473,7 @@ class DeltaTest(MongoDBTestCase):
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc._get_changed_fields(), ["db_embedded_field"])
|
||||
assert doc._get_changed_fields() == ["db_embedded_field"]
|
||||
|
||||
embedded_delta = {
|
||||
"db_string_field": "hello",
|
||||
@ -497,27 +481,28 @@ class DeltaTest(MongoDBTestCase):
|
||||
"db_dict_field": {"hello": "world"},
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
self.assertEqual(doc._delta(), ({"db_embedded_field": embedded_delta}, {}))
|
||||
assert doc.embedded_field._delta() == (embedded_delta, {})
|
||||
assert doc._delta() == ({"db_embedded_field": embedded_delta}, {})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ["db_embedded_field.db_dict_field"])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({}, {"db_dict_field": 1}))
|
||||
self.assertEqual(doc._delta(), ({}, {"db_embedded_field.db_dict_field": 1}))
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_dict_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"db_dict_field": 1})
|
||||
assert doc._delta() == ({}, {"db_embedded_field.db_dict_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||
assert doc.embedded_field.dict_field == {}
|
||||
|
||||
assert doc._get_changed_fields() == []
|
||||
doc.embedded_field.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ["db_embedded_field.db_list_field"])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({}, {"db_list_field": 1}))
|
||||
self.assertEqual(doc._delta(), ({}, {"db_embedded_field.db_list_field": 1}))
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"db_list_field": 1})
|
||||
assert doc._delta() == ({}, {"db_embedded_field.db_list_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field, [])
|
||||
assert doc.embedded_field.list_field == []
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = "hello"
|
||||
@ -526,10 +511,8 @@ class DeltaTest(MongoDBTestCase):
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
||||
self.assertEqual(doc._get_changed_fields(), ["db_embedded_field.db_list_field"])
|
||||
self.assertEqual(
|
||||
doc.embedded_field._delta(),
|
||||
(
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"db_list_field": [
|
||||
"1",
|
||||
@ -544,12 +527,9 @@ class DeltaTest(MongoDBTestCase):
|
||||
]
|
||||
},
|
||||
{},
|
||||
),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
doc._delta(),
|
||||
(
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field": [
|
||||
"1",
|
||||
@ -564,42 +544,37 @@ class DeltaTest(MongoDBTestCase):
|
||||
]
|
||||
},
|
||||
{},
|
||||
),
|
||||
)
|
||||
doc.save()
|
||||
assert doc._get_changed_fields() == []
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.embedded_field.list_field[0], "1")
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k])
|
||||
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = "world"
|
||||
self.assertEqual(
|
||||
doc._get_changed_fields(),
|
||||
["db_embedded_field.db_list_field.2.db_string_field"],
|
||||
assert doc._get_changed_fields() == [
|
||||
"db_embedded_field.db_list_field.2.db_string_field"
|
||||
]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{"db_list_field.2.db_string_field": "world"},
|
||||
{},
|
||||
)
|
||||
self.assertEqual(
|
||||
doc.embedded_field._delta(),
|
||||
({"db_list_field.2.db_string_field": "world"}, {}),
|
||||
)
|
||||
self.assertEqual(
|
||||
doc._delta(),
|
||||
({"db_embedded_field.db_list_field.2.db_string_field": "world"}, {}),
|
||||
assert doc._delta() == (
|
||||
{"db_embedded_field.db_list_field.2.db_string_field": "world"},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field, "world")
|
||||
assert doc.embedded_field.list_field[2].string_field == "world"
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = "hello world"
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEqual(
|
||||
doc._get_changed_fields(), ["db_embedded_field.db_list_field.2"]
|
||||
)
|
||||
self.assertEqual(
|
||||
doc.embedded_field._delta(),
|
||||
(
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field.2"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"db_list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
@ -610,11 +585,8 @@ class DeltaTest(MongoDBTestCase):
|
||||
}
|
||||
},
|
||||
{},
|
||||
),
|
||||
)
|
||||
self.assertEqual(
|
||||
doc._delta(),
|
||||
(
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
@ -625,17 +597,14 @@ class DeltaTest(MongoDBTestCase):
|
||||
}
|
||||
},
|
||||
{},
|
||||
),
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field, "hello world")
|
||||
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEqual(
|
||||
doc._delta(),
|
||||
(
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2.db_list_field": [
|
||||
2,
|
||||
@ -643,15 +612,12 @@ class DeltaTest(MongoDBTestCase):
|
||||
]
|
||||
},
|
||||
{},
|
||||
),
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEqual(
|
||||
doc._delta(),
|
||||
(
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2.db_list_field": [
|
||||
2,
|
||||
@ -660,32 +626,32 @@ class DeltaTest(MongoDBTestCase):
|
||||
]
|
||||
},
|
||||
{},
|
||||
),
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(
|
||||
doc.embedded_field.list_field[2].list_field, [2, {"hello": "world"}, 1]
|
||||
)
|
||||
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(
|
||||
doc.embedded_field.list_field[2].list_field, [1, 2, {"hello": "world"}]
|
||||
)
|
||||
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
||||
self.assertEqual(
|
||||
doc._delta(),
|
||||
({}, {"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1}),
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{},
|
||||
)
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(
|
||||
doc._delta(), ({}, {"db_embedded_field.db_list_field.2.db_list_field": 1})
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"db_embedded_field.db_list_field.2.db_list_field": 1},
|
||||
)
|
||||
|
||||
def test_delta_for_dynamic_documents(self):
|
||||
@ -696,14 +662,16 @@ class DeltaTest(MongoDBTestCase):
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(name="James", age=34)
|
||||
self.assertEqual(
|
||||
p._delta(), (SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), {})
|
||||
assert p._delta() == (
|
||||
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
||||
{},
|
||||
)
|
||||
|
||||
p.doc = 123
|
||||
del p.doc
|
||||
self.assertEqual(
|
||||
p._delta(), (SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), {})
|
||||
assert p._delta() == (
|
||||
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
||||
{},
|
||||
)
|
||||
|
||||
p = Person()
|
||||
@ -712,18 +680,18 @@ class DeltaTest(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
p.age = 24
|
||||
self.assertEqual(p.age, 24)
|
||||
self.assertEqual(p._get_changed_fields(), ["age"])
|
||||
self.assertEqual(p._delta(), ({"age": 24}, {}))
|
||||
assert p.age == 24
|
||||
assert p._get_changed_fields() == ["age"]
|
||||
assert p._delta() == ({"age": 24}, {})
|
||||
|
||||
p = Person.objects(age=22).get()
|
||||
p.age = 24
|
||||
self.assertEqual(p.age, 24)
|
||||
self.assertEqual(p._get_changed_fields(), ["age"])
|
||||
self.assertEqual(p._delta(), ({"age": 24}, {}))
|
||||
assert p.age == 24
|
||||
assert p._get_changed_fields() == ["age"]
|
||||
assert p._delta() == ({"age": 24}, {})
|
||||
|
||||
p.save()
|
||||
self.assertEqual(1, Person.objects(age=24).count())
|
||||
assert 1 == Person.objects(age=24).count()
|
||||
|
||||
def test_dynamic_delta(self):
|
||||
class Doc(DynamicDocument):
|
||||
@ -734,40 +702,40 @@ class DeltaTest(MongoDBTestCase):
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
self.assertEqual(doc._get_changed_fields(), ["string_field"])
|
||||
self.assertEqual(doc._delta(), ({"string_field": "hello"}, {}))
|
||||
assert doc._get_changed_fields() == ["string_field"]
|
||||
assert doc._delta() == ({"string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ["int_field"])
|
||||
self.assertEqual(doc._delta(), ({"int_field": 1}, {}))
|
||||
assert doc._get_changed_fields() == ["int_field"]
|
||||
assert doc._delta() == ({"int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ["dict_field"])
|
||||
self.assertEqual(doc._delta(), ({"dict_field": dict_value}, {}))
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({"dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ["list_field"])
|
||||
self.assertEqual(doc._delta(), ({"list_field": list_value}, {}))
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({"list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ["dict_field"])
|
||||
self.assertEqual(doc._delta(), ({}, {"dict_field": 1}))
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({}, {"dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ["list_field"])
|
||||
self.assertEqual(doc._delta(), ({}, {"list_field": 1}))
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({}, {"list_field": 1})
|
||||
|
||||
def test_delta_with_dbref_true(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(
|
||||
@ -775,16 +743,16 @@ class DeltaTest(MongoDBTestCase):
|
||||
)
|
||||
employee.name = "test"
|
||||
|
||||
self.assertEqual(organization._get_changed_fields(), [])
|
||||
assert organization._get_changed_fields() == []
|
||||
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertEqual({}, updates)
|
||||
assert removals == {}
|
||||
assert updates == {}
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertIn("employees", updates)
|
||||
assert removals == {}
|
||||
assert "employees" in updates
|
||||
|
||||
def test_delta_with_dbref_false(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(
|
||||
@ -792,16 +760,16 @@ class DeltaTest(MongoDBTestCase):
|
||||
)
|
||||
employee.name = "test"
|
||||
|
||||
self.assertEqual(organization._get_changed_fields(), [])
|
||||
assert organization._get_changed_fields() == []
|
||||
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertEqual({}, updates)
|
||||
assert removals == {}
|
||||
assert updates == {}
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertIn("employees", updates)
|
||||
assert removals == {}
|
||||
assert "employees" in updates
|
||||
|
||||
def test_nested_nested_fields_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
@ -813,19 +781,46 @@ class DeltaTest(MongoDBTestCase):
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
mydoc = MyDoc(
|
||||
name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}
|
||||
).save()
|
||||
MyDoc(name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs["a"]["b"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields())
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a.b.name"]
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_nested_nested_fields_db_field_set__gets_mark_as_changed_and_cleaned(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField(db_field="db_name")
|
||||
|
||||
class MyDoc(Document):
|
||||
embed = EmbeddedDocumentField(EmbeddedDoc, db_field="db_embed")
|
||||
name = StringField(db_field="db_name")
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc(name="testcase1", embed=EmbeddedDoc(name="foo")).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
mydoc.embed.name = "foo1"
|
||||
|
||||
assert mydoc.embed._get_changed_fields() == ["db_name"]
|
||||
assert mydoc._get_changed_fields() == ["db_embed.db_name"]
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
embed = EmbeddedDoc(name="foo2")
|
||||
embed.name = "bar"
|
||||
mydoc.embed = embed
|
||||
|
||||
assert embed._get_changed_fields() == ["db_name"]
|
||||
assert mydoc._get_changed_fields() == ["db_embed"]
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_lower_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
@ -840,17 +835,17 @@ class DeltaTest(MongoDBTestCase):
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
mydoc.subs["a"] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
|
||||
subdoc = mydoc.subs["a"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_upper_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
@ -867,15 +862,15 @@ class DeltaTest(MongoDBTestCase):
|
||||
subdoc = mydoc.subs["a"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a.name"], mydoc._get_changed_fields())
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a.name"]
|
||||
|
||||
mydoc.subs["a"] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_referenced_object_changed_attributes(self):
|
||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||
@ -902,22 +897,22 @@ class DeltaTest(MongoDBTestCase):
|
||||
org1.reload()
|
||||
org2.reload()
|
||||
user.reload()
|
||||
self.assertEqual(org1.name, "Org 1")
|
||||
self.assertEqual(org2.name, "Org 2")
|
||||
self.assertEqual(user.name, "Fred")
|
||||
assert org1.name == "Org 1"
|
||||
assert org2.name == "Org 2"
|
||||
assert user.name == "Fred"
|
||||
|
||||
user.name = "Harold"
|
||||
user.org = org2
|
||||
|
||||
org2.name = "New Org 2"
|
||||
self.assertEqual(org2.name, "New Org 2")
|
||||
assert org2.name == "New Org 2"
|
||||
|
||||
user.save()
|
||||
org2.save()
|
||||
|
||||
self.assertEqual(org2.name, "New Org 2")
|
||||
assert org2.name == "New Org 2"
|
||||
org2.reload()
|
||||
self.assertEqual(org2.name, "New Org 2")
|
||||
assert org2.name == "New Org 2"
|
||||
|
||||
def test_delta_for_nested_map_fields(self):
|
||||
class UInfoDocument(Document):
|
||||
@ -950,12 +945,12 @@ class DeltaTest(MongoDBTestCase):
|
||||
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
||||
d.users["007"]["info"] = uinfo
|
||||
delta = d._delta()
|
||||
self.assertEqual(True, "users.007.roles.666" in delta[0])
|
||||
self.assertEqual(True, "users.007.rolist" in delta[0])
|
||||
self.assertEqual(True, "users.007.info" in delta[0])
|
||||
self.assertEqual("superadmin", delta[0]["users.007.roles.666"]["type"])
|
||||
self.assertEqual("oops", delta[0]["users.007.rolist"][0]["type"])
|
||||
self.assertEqual(uinfo.id, delta[0]["users.007.info"])
|
||||
assert True == ("users.007.roles.666" in delta[0])
|
||||
assert True == ("users.007.rolist" in delta[0])
|
||||
assert True == ("users.007.info" in delta[0])
|
||||
assert "superadmin" == delta[0]["users.007.roles.666"]["type"]
|
||||
assert "oops" == delta[0]["users.007.rolist"][0]["type"]
|
||||
assert uinfo.id == delta[0]["users.007.info"]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
@ -1,5 +1,7 @@
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
@ -8,7 +10,7 @@ __all__ = ("TestDynamicDocument",)
|
||||
|
||||
class TestDynamicDocument(MongoDBTestCase):
|
||||
def setUp(self):
|
||||
super(TestDynamicDocument, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
@ -25,15 +27,28 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.name = "James"
|
||||
p.age = 34
|
||||
|
||||
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", "age": 34})
|
||||
self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"])
|
||||
assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34}
|
||||
assert p.to_mongo().keys() == ["_cls", "name", "age"]
|
||||
p.save()
|
||||
self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"])
|
||||
assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"]
|
||||
|
||||
self.assertEqual(self.Person.objects.first().age, 34)
|
||||
assert self.Person.objects.first().age == 34
|
||||
|
||||
# Confirm no changes to self.Person
|
||||
self.assertFalse(hasattr(self.Person, "age"))
|
||||
assert not hasattr(self.Person, "age")
|
||||
|
||||
def test_dynamic_document_parse_values_in_constructor_like_document_do(self):
|
||||
class ProductDynamicDocument(DynamicDocument):
|
||||
title = StringField()
|
||||
price = FloatField()
|
||||
|
||||
class ProductDocument(Document):
|
||||
title = StringField()
|
||||
price = FloatField()
|
||||
|
||||
product = ProductDocument(title="Blabla", price="12.5")
|
||||
dyn_product = ProductDynamicDocument(title="Blabla", price="12.5")
|
||||
assert product.price == dyn_product.price == 12.5
|
||||
|
||||
def test_change_scope_of_variable(self):
|
||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||
@ -47,7 +62,7 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEqual(p.misc, {"hello": "world"})
|
||||
assert p.misc == {"hello": "world"}
|
||||
|
||||
def test_delete_dynamic_field(self):
|
||||
"""Test deleting a dynamic field works"""
|
||||
@ -62,19 +77,19 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEqual(p.misc, {"hello": "world"})
|
||||
assert p.misc == {"hello": "world"}
|
||||
collection = self.db[self.Person._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ["_cls", "_id", "misc", "name"])
|
||||
assert sorted(obj.keys()) == ["_cls", "_id", "misc", "name"]
|
||||
|
||||
del p.misc
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertFalse(hasattr(p, "misc"))
|
||||
assert not hasattr(p, "misc")
|
||||
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ["_cls", "_id", "name"])
|
||||
assert sorted(obj.keys()) == ["_cls", "_id", "name"]
|
||||
|
||||
def test_reload_after_unsetting(self):
|
||||
p = self.Person()
|
||||
@ -88,12 +103,12 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p = self.Person.objects.create()
|
||||
p.update(age=1)
|
||||
|
||||
self.assertEqual(len(p._data), 3)
|
||||
self.assertEqual(sorted(p._data.keys()), ["_cls", "id", "name"])
|
||||
assert len(p._data) == 3
|
||||
assert sorted(p._data.keys()) == ["_cls", "id", "name"]
|
||||
|
||||
p.reload()
|
||||
self.assertEqual(len(p._data), 4)
|
||||
self.assertEqual(sorted(p._data.keys()), ["_cls", "age", "id", "name"])
|
||||
assert len(p._data) == 4
|
||||
assert sorted(p._data.keys()) == ["_cls", "age", "id", "name"]
|
||||
|
||||
def test_fields_without_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
@ -103,16 +118,18 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(raw_p, {"_cls": u"Person", "_id": p.id, "name": u"Dean"})
|
||||
assert raw_p == {"_cls": "Person", "_id": p.id, "name": "Dean"}
|
||||
|
||||
p.name = "OldDean"
|
||||
p.newattr = "garbage"
|
||||
p.save()
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{"_cls": u"Person", "_id": p.id, "name": "OldDean", "newattr": u"garbage"},
|
||||
)
|
||||
assert raw_p == {
|
||||
"_cls": "Person",
|
||||
"_id": p.id,
|
||||
"name": "OldDean",
|
||||
"newattr": "garbage",
|
||||
}
|
||||
|
||||
def test_fields_containing_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
@ -127,14 +144,14 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(raw_p, {"_id": p.id, "_name": u"Dean", "name": u"Dean"})
|
||||
assert raw_p == {"_id": p.id, "_name": "Dean", "name": "Dean"}
|
||||
|
||||
p.name = "OldDean"
|
||||
p._name = "NewDean"
|
||||
p._newattr1 = "garbage" # Unknown fields won't be added
|
||||
p.save()
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(raw_p, {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"})
|
||||
assert raw_p == {"_id": p.id, "_name": "NewDean", "name": "OldDean"}
|
||||
|
||||
def test_dynamic_document_queries(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
@ -143,10 +160,10 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(age=22).count())
|
||||
assert 1 == self.Person.objects(age=22).count()
|
||||
p = self.Person.objects(age=22)
|
||||
p = p.get()
|
||||
self.assertEqual(22, p.age)
|
||||
assert 22 == p.age
|
||||
|
||||
def test_complex_dynamic_document_queries(self):
|
||||
class Person(DynamicDocument):
|
||||
@ -166,8 +183,8 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p2.age = 10
|
||||
p2.save()
|
||||
|
||||
self.assertEqual(Person.objects(age__icontains="ten").count(), 2)
|
||||
self.assertEqual(Person.objects(age__gte=10).count(), 1)
|
||||
assert Person.objects(age__icontains="ten").count() == 2
|
||||
assert Person.objects(age__gte=10).count() == 1
|
||||
|
||||
def test_complex_data_lookups(self):
|
||||
"""Ensure you can query dynamic document dynamic fields"""
|
||||
@ -175,12 +192,12 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(misc__hello="world").count())
|
||||
assert 1 == self.Person.objects(misc__hello="world").count()
|
||||
|
||||
def test_three_level_complex_data_lookups(self):
|
||||
"""Ensure you can query three level document dynamic fields"""
|
||||
p = self.Person.objects.create(misc={"hello": {"hello2": "world"}})
|
||||
self.assertEqual(1, self.Person.objects(misc__hello__hello2="world").count())
|
||||
self.Person.objects.create(misc={"hello": {"hello2": "world"}})
|
||||
assert 1 == self.Person.objects(misc__hello__hello2="world").count()
|
||||
|
||||
def test_complex_embedded_document_validation(self):
|
||||
"""Ensure embedded dynamic documents may be validated"""
|
||||
@ -198,11 +215,13 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
embedded_doc_1.validate()
|
||||
|
||||
embedded_doc_2 = Embedded(content="this is not a url")
|
||||
self.assertRaises(ValidationError, embedded_doc_2.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
embedded_doc_2.validate()
|
||||
|
||||
doc.embedded_field_1 = embedded_doc_1
|
||||
doc.embedded_field_2 = embedded_doc_2
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
doc.validate()
|
||||
|
||||
def test_inheritance(self):
|
||||
"""Ensure that dynamic document plays nice with inheritance"""
|
||||
@ -212,11 +231,9 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
|
||||
Employee.drop_collection()
|
||||
|
||||
self.assertIn("name", Employee._fields)
|
||||
self.assertIn("salary", Employee._fields)
|
||||
self.assertEqual(
|
||||
Employee._get_collection_name(), self.Person._get_collection_name()
|
||||
)
|
||||
assert "name" in Employee._fields
|
||||
assert "salary" in Employee._fields
|
||||
assert Employee._get_collection_name() == self.Person._get_collection_name()
|
||||
|
||||
joe_bloggs = Employee()
|
||||
joe_bloggs.name = "Joe Bloggs"
|
||||
@ -224,11 +241,11 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
joe_bloggs.age = 20
|
||||
joe_bloggs.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(age=20).count())
|
||||
self.assertEqual(1, Employee.objects(age=20).count())
|
||||
assert 1 == self.Person.objects(age=20).count()
|
||||
assert 1 == Employee.objects(age=20).count()
|
||||
|
||||
joe_bloggs = self.Person.objects.first()
|
||||
self.assertIsInstance(joe_bloggs, Employee)
|
||||
assert isinstance(joe_bloggs, Employee)
|
||||
|
||||
def test_embedded_dynamic_document(self):
|
||||
"""Test dynamic embedded documents"""
|
||||
@ -249,9 +266,7 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(
|
||||
doc.to_mongo(),
|
||||
{
|
||||
assert doc.to_mongo() == {
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
@ -259,16 +274,15 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {"hello": "world"})
|
||||
self.assertEqual(doc.embedded_field.list_field, ["1", 2, {"hello": "world"}])
|
||||
assert doc.embedded_field.__class__ == Embedded
|
||||
assert doc.embedded_field.string_field == "hello"
|
||||
assert doc.embedded_field.int_field == 1
|
||||
assert doc.embedded_field.dict_field == {"hello": "world"}
|
||||
assert doc.embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_complex_embedded_documents(self):
|
||||
"""Test complex dynamic embedded documents setups"""
|
||||
@ -296,9 +310,7 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
embedded_1.list_field = ["1", 2, embedded_2]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(
|
||||
doc.to_mongo(),
|
||||
{
|
||||
assert doc.to_mongo() == {
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
@ -316,24 +328,23 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
},
|
||||
],
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
doc.save()
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {"hello": "world"})
|
||||
self.assertEqual(doc.embedded_field.list_field[0], "1")
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
assert doc.embedded_field.__class__ == Embedded
|
||||
assert doc.embedded_field.string_field == "hello"
|
||||
assert doc.embedded_field.int_field == 1
|
||||
assert doc.embedded_field.dict_field == {"hello": "world"}
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
|
||||
embedded_field = doc.embedded_field.list_field[2]
|
||||
|
||||
self.assertEqual(embedded_field.__class__, Embedded)
|
||||
self.assertEqual(embedded_field.string_field, "hello")
|
||||
self.assertEqual(embedded_field.int_field, 1)
|
||||
self.assertEqual(embedded_field.dict_field, {"hello": "world"})
|
||||
self.assertEqual(embedded_field.list_field, ["1", 2, {"hello": "world"}])
|
||||
assert embedded_field.__class__ == Embedded
|
||||
assert embedded_field.string_field == "hello"
|
||||
assert embedded_field.int_field == 1
|
||||
assert embedded_field.dict_field == {"hello": "world"}
|
||||
assert embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_dynamic_and_embedded(self):
|
||||
"""Ensure embedded documents play nicely"""
|
||||
@ -352,18 +363,18 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
person.address.city = "Lundenne"
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||
assert Person.objects.first().address.city == "Lundenne"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.address = Address(city="Londinium")
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||
assert Person.objects.first().address.city == "Londinium"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.age = 35
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
assert Person.objects.first().age == 35
|
||||
|
||||
def test_dynamic_embedded_works_with_only(self):
|
||||
"""Ensure custom fieldnames on a dynamic embedded document are found by qs.only()"""
|
||||
@ -380,10 +391,10 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
name="Eric", address=Address(city="San Francisco", street_number="1337")
|
||||
).save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.street_number, "1337")
|
||||
self.assertEqual(
|
||||
Person.objects.only("address__street_number").first().address.street_number,
|
||||
"1337",
|
||||
assert Person.objects.first().address.street_number == "1337"
|
||||
assert (
|
||||
Person.objects.only("address__street_number").first().address.street_number
|
||||
== "1337"
|
||||
)
|
||||
|
||||
def test_dynamic_and_embedded_dict_access(self):
|
||||
@ -408,20 +419,20 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
person["address"]["city"] = "Lundenne"
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||
assert Person.objects.first().address.city == "Lundenne"
|
||||
|
||||
self.assertEqual(Person.objects.first().phone, "555-1212")
|
||||
assert Person.objects.first().phone == "555-1212"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.address = Address(city="Londinium")
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||
assert Person.objects.first().address.city == "Londinium"
|
||||
|
||||
person = Person.objects.first()
|
||||
person["age"] = 35
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
assert Person.objects.first().age == 35
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
@ -1,19 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
from datetime import datetime
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
import pytest
|
||||
from pymongo.collation import Collation
|
||||
from pymongo.errors import OperationFailure
|
||||
import pymongo
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("IndexesTest",)
|
||||
from mongoengine.mongodb_support import (
|
||||
MONGODB_42,
|
||||
get_mongodb_version,
|
||||
)
|
||||
|
||||
|
||||
class IndexesTest(unittest.TestCase):
|
||||
class TestIndexes(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.connection = connect(db="mongoenginetest")
|
||||
self.db = get_db()
|
||||
@ -55,15 +55,15 @@ class IndexesTest(unittest.TestCase):
|
||||
{"fields": [("tags", 1)]},
|
||||
{"fields": [("category", 1), ("addDate", -1)]},
|
||||
]
|
||||
self.assertEqual(expected_specs, BlogPost._meta["index_specs"])
|
||||
assert expected_specs == BlogPost._meta["index_specs"]
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
# _id, '-date', 'tags', ('cat', 'date')
|
||||
self.assertEqual(len(info), 4)
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
assert len(info) == 4
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
for expected in expected_specs:
|
||||
self.assertIn(expected["fields"], info)
|
||||
assert expected["fields"] in info
|
||||
|
||||
def _index_test_inheritance(self, InheritFrom):
|
||||
class BlogPost(InheritFrom):
|
||||
@ -80,7 +80,7 @@ class IndexesTest(unittest.TestCase):
|
||||
{"fields": [("_cls", 1), ("tags", 1)]},
|
||||
{"fields": [("_cls", 1), ("category", 1), ("addDate", -1)]},
|
||||
]
|
||||
self.assertEqual(expected_specs, BlogPost._meta["index_specs"])
|
||||
assert expected_specs == BlogPost._meta["index_specs"]
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
@ -88,25 +88,25 @@ class IndexesTest(unittest.TestCase):
|
||||
# NB: there is no index on _cls by itself, since
|
||||
# the indices on -date and tags will both contain
|
||||
# _cls as first element in the key
|
||||
self.assertEqual(len(info), 4)
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
assert len(info) == 4
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
for expected in expected_specs:
|
||||
self.assertIn(expected["fields"], info)
|
||||
assert expected["fields"] in info
|
||||
|
||||
class ExtendedBlogPost(BlogPost):
|
||||
title = StringField()
|
||||
meta = {"indexes": ["title"]}
|
||||
|
||||
expected_specs.append({"fields": [("_cls", 1), ("title", 1)]})
|
||||
self.assertEqual(expected_specs, ExtendedBlogPost._meta["index_specs"])
|
||||
assert expected_specs == ExtendedBlogPost._meta["index_specs"]
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
ExtendedBlogPost.ensure_indexes()
|
||||
info = ExtendedBlogPost.objects._collection.index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
for expected in expected_specs:
|
||||
self.assertIn(expected["fields"], info)
|
||||
assert expected["fields"] in info
|
||||
|
||||
def test_indexes_document_inheritance(self):
|
||||
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||
@ -130,10 +130,8 @@ class IndexesTest(unittest.TestCase):
|
||||
class B(A):
|
||||
description = StringField()
|
||||
|
||||
self.assertEqual(A._meta["index_specs"], B._meta["index_specs"])
|
||||
self.assertEqual(
|
||||
[{"fields": [("_cls", 1), ("title", 1)]}], A._meta["index_specs"]
|
||||
)
|
||||
assert A._meta["index_specs"] == B._meta["index_specs"]
|
||||
assert [{"fields": [("_cls", 1), ("title", 1)]}] == A._meta["index_specs"]
|
||||
|
||||
def test_index_no_cls(self):
|
||||
"""Ensure index specs are inhertited correctly"""
|
||||
@ -146,11 +144,11 @@ class IndexesTest(unittest.TestCase):
|
||||
"index_cls": False,
|
||||
}
|
||||
|
||||
self.assertEqual([("title", 1)], A._meta["index_specs"][0]["fields"])
|
||||
assert [("title", 1)] == A._meta["index_specs"][0]["fields"]
|
||||
A._get_collection().drop_indexes()
|
||||
A.ensure_indexes()
|
||||
info = A._get_collection().index_information()
|
||||
self.assertEqual(len(info.keys()), 2)
|
||||
assert len(info.keys()) == 2
|
||||
|
||||
class B(A):
|
||||
c = StringField()
|
||||
@ -160,8 +158,8 @@ class IndexesTest(unittest.TestCase):
|
||||
"allow_inheritance": True,
|
||||
}
|
||||
|
||||
self.assertEqual([("c", 1)], B._meta["index_specs"][1]["fields"])
|
||||
self.assertEqual([("_cls", 1), ("d", 1)], B._meta["index_specs"][2]["fields"])
|
||||
assert [("c", 1)] == B._meta["index_specs"][1]["fields"]
|
||||
assert [("_cls", 1), ("d", 1)] == B._meta["index_specs"][2]["fields"]
|
||||
|
||||
def test_build_index_spec_is_not_destructive(self):
|
||||
class MyDoc(Document):
|
||||
@ -169,16 +167,15 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
meta = {"indexes": ["keywords"], "allow_inheritance": False}
|
||||
|
||||
self.assertEqual(MyDoc._meta["index_specs"], [{"fields": [("keywords", 1)]}])
|
||||
assert MyDoc._meta["index_specs"] == [{"fields": [("keywords", 1)]}]
|
||||
|
||||
# Force index creation
|
||||
MyDoc.ensure_indexes()
|
||||
|
||||
self.assertEqual(MyDoc._meta["index_specs"], [{"fields": [("keywords", 1)]}])
|
||||
assert MyDoc._meta["index_specs"] == [{"fields": [("keywords", 1)]}]
|
||||
|
||||
def test_embedded_document_index_meta(self):
|
||||
"""Ensure that embedded document indexes are created explicitly
|
||||
"""
|
||||
"""Ensure that embedded document indexes are created explicitly"""
|
||||
|
||||
class Rank(EmbeddedDocument):
|
||||
title = StringField(required=True)
|
||||
@ -189,36 +186,32 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
meta = {"indexes": ["rank.title"], "allow_inheritance": False}
|
||||
|
||||
self.assertEqual([{"fields": [("rank.title", 1)]}], Person._meta["index_specs"])
|
||||
assert [{"fields": [("rank.title", 1)]}] == Person._meta["index_specs"]
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(Person.objects)
|
||||
info = Person.objects._collection.index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
self.assertIn([("rank.title", 1)], info)
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("rank.title", 1)] in info
|
||||
|
||||
def test_explicit_geo2d_index(self):
|
||||
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||
"""
|
||||
"""Ensure that geo2d indexes work when created via meta[indexes]"""
|
||||
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
meta = {"allow_inheritance": True, "indexes": ["*location.point"]}
|
||||
|
||||
self.assertEqual(
|
||||
[{"fields": [("location.point", "2d")]}], Place._meta["index_specs"]
|
||||
)
|
||||
assert [{"fields": [("location.point", "2d")]}] == Place._meta["index_specs"]
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
self.assertIn([("location.point", "2d")], info)
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("location.point", "2d")] in info
|
||||
|
||||
def test_explicit_geo2d_index_embedded(self):
|
||||
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||
"""
|
||||
"""Ensure that geo2d indexes work when created via meta[indexes]"""
|
||||
|
||||
class EmbeddedLocation(EmbeddedDocument):
|
||||
location = DictField()
|
||||
@ -227,36 +220,34 @@ class IndexesTest(unittest.TestCase):
|
||||
current = DictField(field=EmbeddedDocumentField("EmbeddedLocation"))
|
||||
meta = {"allow_inheritance": True, "indexes": ["*current.location.point"]}
|
||||
|
||||
self.assertEqual(
|
||||
[{"fields": [("current.location.point", "2d")]}], Place._meta["index_specs"]
|
||||
)
|
||||
assert [{"fields": [("current.location.point", "2d")]}] == Place._meta[
|
||||
"index_specs"
|
||||
]
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
self.assertIn([("current.location.point", "2d")], info)
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("current.location.point", "2d")] in info
|
||||
|
||||
def test_explicit_geosphere_index(self):
|
||||
"""Ensure that geosphere indexes work when created via meta[indexes]
|
||||
"""
|
||||
"""Ensure that geosphere indexes work when created via meta[indexes]"""
|
||||
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
meta = {"allow_inheritance": True, "indexes": ["(location.point"]}
|
||||
|
||||
self.assertEqual(
|
||||
[{"fields": [("location.point", "2dsphere")]}], Place._meta["index_specs"]
|
||||
)
|
||||
assert [{"fields": [("location.point", "2dsphere")]}] == Place._meta[
|
||||
"index_specs"
|
||||
]
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
self.assertIn([("location.point", "2dsphere")], info)
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("location.point", "2dsphere")] in info
|
||||
|
||||
def test_explicit_geohaystack_index(self):
|
||||
"""Ensure that geohaystack indexes work when created via meta[indexes]
|
||||
"""
|
||||
raise SkipTest(
|
||||
"""Ensure that geohaystack indexes work when created via meta[indexes]"""
|
||||
pytest.skip(
|
||||
"GeoHaystack index creation is not supported for now"
|
||||
"from meta, as it requires a bucketSize parameter."
|
||||
)
|
||||
@ -266,19 +257,17 @@ class IndexesTest(unittest.TestCase):
|
||||
name = StringField()
|
||||
meta = {"indexes": [(")location.point", "name")]}
|
||||
|
||||
self.assertEqual(
|
||||
[{"fields": [("location.point", "geoHaystack"), ("name", 1)]}],
|
||||
Place._meta["index_specs"],
|
||||
)
|
||||
assert [
|
||||
{"fields": [("location.point", "geoHaystack"), ("name", 1)]}
|
||||
] == Place._meta["index_specs"]
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
self.assertIn([("location.point", "geoHaystack")], info)
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("location.point", "geoHaystack")] in info
|
||||
|
||||
def test_create_geohaystack_index(self):
|
||||
"""Ensure that geohaystack indexes can be created
|
||||
"""
|
||||
"""Ensure that geohaystack indexes can be created"""
|
||||
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
@ -286,8 +275,8 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
Place.create_index({"fields": (")location.point", "name")}, bucketSize=10)
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
self.assertIn([("location.point", "geoHaystack"), ("name", 1)], info)
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("location.point", "geoHaystack"), ("name", 1)] in info
|
||||
|
||||
def test_dictionary_indexes(self):
|
||||
"""Ensure that indexes are used when meta[indexes] contains
|
||||
@ -300,25 +289,24 @@ class IndexesTest(unittest.TestCase):
|
||||
tags = ListField(StringField())
|
||||
meta = {"indexes": [{"fields": ["-date"], "unique": True, "sparse": True}]}
|
||||
|
||||
self.assertEqual(
|
||||
[{"fields": [("addDate", -1)], "unique": True, "sparse": True}],
|
||||
BlogPost._meta["index_specs"],
|
||||
)
|
||||
assert [
|
||||
{"fields": [("addDate", -1)], "unique": True, "sparse": True}
|
||||
] == BlogPost._meta["index_specs"]
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
# _id, '-date'
|
||||
self.assertEqual(len(info), 2)
|
||||
assert len(info) == 2
|
||||
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(BlogPost.objects)
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
info = [
|
||||
(value["key"], value.get("unique", False), value.get("sparse", False))
|
||||
for key, value in iteritems(info)
|
||||
for key, value in info.items()
|
||||
]
|
||||
self.assertIn(([("addDate", -1)], True, True), info)
|
||||
assert ([("addDate", -1)], True, True) in info
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@ -340,11 +328,9 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
Person(name="test", user_guid="123").save()
|
||||
|
||||
self.assertEqual(1, Person.objects.count())
|
||||
assert 1 == Person.objects.count()
|
||||
info = Person.objects._collection.index_information()
|
||||
self.assertEqual(
|
||||
sorted(info.keys()), ["_cls_1_name_1", "_cls_1_user_guid_1", "_id_"]
|
||||
)
|
||||
assert sorted(info.keys()) == ["_cls_1_name_1", "_cls_1_user_guid_1", "_id_"]
|
||||
|
||||
def test_disable_index_creation(self):
|
||||
"""Tests setting auto_create_index to False on the connection will
|
||||
@ -367,17 +353,16 @@ class IndexesTest(unittest.TestCase):
|
||||
User(user_guid="123").save()
|
||||
MongoUser(user_guid="123").save()
|
||||
|
||||
self.assertEqual(2, User.objects.count())
|
||||
assert 2 == User.objects.count()
|
||||
info = User.objects._collection.index_information()
|
||||
self.assertEqual(list(info.keys()), ["_id_"])
|
||||
assert list(info.keys()) == ["_id_"]
|
||||
|
||||
User.ensure_indexes()
|
||||
info = User.objects._collection.index_information()
|
||||
self.assertEqual(sorted(info.keys()), ["_cls_1_user_guid_1", "_id_"])
|
||||
assert sorted(info.keys()) == ["_cls_1_user_guid_1", "_id_"]
|
||||
|
||||
def test_embedded_document_index(self):
|
||||
"""Tests settings an index on an embedded document
|
||||
"""
|
||||
"""Tests settings an index on an embedded document"""
|
||||
|
||||
class Date(EmbeddedDocument):
|
||||
year = IntField(db_field="yr")
|
||||
@ -391,11 +376,10 @@ class IndexesTest(unittest.TestCase):
|
||||
BlogPost.drop_collection()
|
||||
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
self.assertEqual(sorted(info.keys()), ["_id_", "date.yr_-1"])
|
||||
assert sorted(info.keys()) == ["_id_", "date.yr_-1"]
|
||||
|
||||
def test_list_embedded_document_index(self):
|
||||
"""Ensure list embedded documents can be indexed
|
||||
"""
|
||||
"""Ensure list embedded documents can be indexed"""
|
||||
|
||||
class Tag(EmbeddedDocument):
|
||||
name = StringField(db_field="tag")
|
||||
@ -410,7 +394,7 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
# we don't use _cls in with list fields by default
|
||||
self.assertEqual(sorted(info.keys()), ["_id_", "tags.tag_1"])
|
||||
assert sorted(info.keys()) == ["_id_", "tags.tag_1"]
|
||||
|
||||
post1 = BlogPost(
|
||||
title="Embedded Indexes tests in place",
|
||||
@ -428,11 +412,10 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
RecursiveDocument.ensure_indexes()
|
||||
info = RecursiveDocument._get_collection().index_information()
|
||||
self.assertEqual(sorted(info.keys()), ["_cls_1", "_id_"])
|
||||
assert sorted(info.keys()) == ["_cls_1", "_id_"]
|
||||
|
||||
def test_covered_index(self):
|
||||
"""Ensure that covered indexes can be used
|
||||
"""
|
||||
"""Ensure that covered indexes can be used"""
|
||||
|
||||
class Test(Document):
|
||||
a = IntField()
|
||||
@ -448,46 +431,47 @@ class IndexesTest(unittest.TestCase):
|
||||
# Need to be explicit about covered indexes as mongoDB doesn't know if
|
||||
# the documents returned might have more keys in that here.
|
||||
query_plan = Test.objects(id=obj.id).exclude("a").explain()
|
||||
self.assertEqual(
|
||||
assert (
|
||||
query_plan.get("queryPlanner")
|
||||
.get("winningPlan")
|
||||
.get("inputStage")
|
||||
.get("stage"),
|
||||
"IDHACK",
|
||||
.get("stage")
|
||||
== "IDHACK"
|
||||
)
|
||||
|
||||
query_plan = Test.objects(id=obj.id).only("id").explain()
|
||||
self.assertEqual(
|
||||
assert (
|
||||
query_plan.get("queryPlanner")
|
||||
.get("winningPlan")
|
||||
.get("inputStage")
|
||||
.get("stage"),
|
||||
"IDHACK",
|
||||
.get("stage")
|
||||
== "IDHACK"
|
||||
)
|
||||
|
||||
query_plan = Test.objects(a=1).only("a").exclude("id").explain()
|
||||
self.assertEqual(
|
||||
assert (
|
||||
query_plan.get("queryPlanner")
|
||||
.get("winningPlan")
|
||||
.get("inputStage")
|
||||
.get("stage"),
|
||||
"IXSCAN",
|
||||
.get("stage")
|
||||
== "IXSCAN"
|
||||
)
|
||||
self.assertEqual(
|
||||
query_plan.get("queryPlanner").get("winningPlan").get("stage"), "PROJECTION"
|
||||
mongo_db = get_mongodb_version()
|
||||
PROJECTION_STR = "PROJECTION" if mongo_db < MONGODB_42 else "PROJECTION_COVERED"
|
||||
assert (
|
||||
query_plan.get("queryPlanner").get("winningPlan").get("stage")
|
||||
== PROJECTION_STR
|
||||
)
|
||||
|
||||
query_plan = Test.objects(a=1).explain()
|
||||
self.assertEqual(
|
||||
assert (
|
||||
query_plan.get("queryPlanner")
|
||||
.get("winningPlan")
|
||||
.get("inputStage")
|
||||
.get("stage"),
|
||||
"IXSCAN",
|
||||
)
|
||||
self.assertEqual(
|
||||
query_plan.get("queryPlanner").get("winningPlan").get("stage"), "FETCH"
|
||||
.get("stage")
|
||||
== "IXSCAN"
|
||||
)
|
||||
assert query_plan.get("queryPlanner").get("winningPlan").get("stage") == "FETCH"
|
||||
|
||||
def test_index_on_id(self):
|
||||
class BlogPost(Document):
|
||||
@ -500,9 +484,7 @@ class IndexesTest(unittest.TestCase):
|
||||
BlogPost.drop_collection()
|
||||
|
||||
indexes = BlogPost.objects._collection.index_information()
|
||||
self.assertEqual(
|
||||
indexes["categories_1__id_1"]["key"], [("categories", 1), ("_id", 1)]
|
||||
)
|
||||
assert indexes["categories_1__id_1"]["key"] == [("categories", 1), ("_id", 1)]
|
||||
|
||||
def test_hint(self):
|
||||
TAGS_INDEX_NAME = "tags_1"
|
||||
@ -518,30 +500,62 @@ class IndexesTest(unittest.TestCase):
|
||||
BlogPost(tags=tags).save()
|
||||
|
||||
# Hinting by shape should work.
|
||||
self.assertEqual(BlogPost.objects.hint([("tags", 1)]).count(), 10)
|
||||
assert BlogPost.objects.hint([("tags", 1)]).count() == 10
|
||||
|
||||
# Hinting by index name should work.
|
||||
self.assertEqual(BlogPost.objects.hint(TAGS_INDEX_NAME).count(), 10)
|
||||
assert BlogPost.objects.hint(TAGS_INDEX_NAME).count() == 10
|
||||
|
||||
# Clearing the hint should work fine.
|
||||
self.assertEqual(BlogPost.objects.hint().count(), 10)
|
||||
self.assertEqual(BlogPost.objects.hint([("ZZ", 1)]).hint().count(), 10)
|
||||
assert BlogPost.objects.hint().count() == 10
|
||||
assert BlogPost.objects.hint([("ZZ", 1)]).hint().count() == 10
|
||||
|
||||
# Hinting on a non-existent index shape should fail.
|
||||
with self.assertRaises(OperationFailure):
|
||||
with pytest.raises(OperationFailure):
|
||||
BlogPost.objects.hint([("ZZ", 1)]).count()
|
||||
|
||||
# Hinting on a non-existent index name should fail.
|
||||
with self.assertRaises(OperationFailure):
|
||||
with pytest.raises(OperationFailure):
|
||||
BlogPost.objects.hint("Bad Name").count()
|
||||
|
||||
# Invalid shape argument (missing list brackets) should fail.
|
||||
with self.assertRaises(ValueError):
|
||||
with pytest.raises(ValueError):
|
||||
BlogPost.objects.hint(("tags", 1)).count()
|
||||
|
||||
def test_collation(self):
|
||||
base = {"locale": "en", "strength": 2}
|
||||
|
||||
class BlogPost(Document):
|
||||
name = StringField()
|
||||
meta = {
|
||||
"indexes": [
|
||||
{"fields": ["name"], "name": "name_index", "collation": base}
|
||||
]
|
||||
}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
names = ["tag1", "Tag2", "tag3", "Tag4", "tag5"]
|
||||
for name in names:
|
||||
BlogPost(name=name).save()
|
||||
|
||||
query_result = BlogPost.objects.collation(base).order_by("name")
|
||||
assert [x.name for x in query_result] == sorted(names, key=lambda x: x.lower())
|
||||
assert 5 == query_result.count()
|
||||
|
||||
query_result = BlogPost.objects.collation(Collation(**base)).order_by("name")
|
||||
assert [x.name for x in query_result] == sorted(names, key=lambda x: x.lower())
|
||||
assert 5 == query_result.count()
|
||||
|
||||
incorrect_collation = {"arndom": "wrdo"}
|
||||
with pytest.raises(OperationFailure) as exc_info:
|
||||
BlogPost.objects.collation(incorrect_collation).count()
|
||||
assert "Missing expected field" in str(exc_info.value)
|
||||
|
||||
query_result = BlogPost.objects.collation({}).order_by("name")
|
||||
assert [x.name for x in query_result] == sorted(names)
|
||||
|
||||
def test_unique(self):
|
||||
"""Ensure that uniqueness constraints are applied to fields.
|
||||
"""
|
||||
"""Ensure that uniqueness constraints are applied to fields."""
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
@ -554,11 +568,14 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
# Two posts with the same slug is not allowed
|
||||
post2 = BlogPost(title="test2", slug="test")
|
||||
self.assertRaises(NotUniqueError, post2.save)
|
||||
self.assertRaises(NotUniqueError, BlogPost.objects.insert, post2)
|
||||
with pytest.raises(NotUniqueError):
|
||||
post2.save()
|
||||
with pytest.raises(NotUniqueError):
|
||||
BlogPost.objects.insert(post2)
|
||||
|
||||
# Ensure backwards compatibility for errors
|
||||
self.assertRaises(OperationError, post2.save)
|
||||
with pytest.raises(OperationError):
|
||||
post2.save()
|
||||
|
||||
def test_primary_key_unique_not_working(self):
|
||||
"""Relates to #1445"""
|
||||
@ -568,14 +585,13 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
Blog.drop_collection()
|
||||
|
||||
with self.assertRaises(OperationFailure) as ctx_err:
|
||||
with pytest.raises(OperationFailure) as exc_info:
|
||||
Blog(id="garbage").save()
|
||||
|
||||
# One of the errors below should happen. Which one depends on the
|
||||
# PyMongo version and dict order.
|
||||
err_msg = str(ctx_err.exception)
|
||||
self.assertTrue(
|
||||
any(
|
||||
err_msg = str(exc_info.value)
|
||||
assert any(
|
||||
[
|
||||
"The field 'unique' is not valid for an _id index specification"
|
||||
in err_msg,
|
||||
@ -585,11 +601,9 @@ class IndexesTest(unittest.TestCase):
|
||||
in err_msg,
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def test_unique_with(self):
|
||||
"""Ensure that unique_with constraints are applied to fields.
|
||||
"""
|
||||
"""Ensure that unique_with constraints are applied to fields."""
|
||||
|
||||
class Date(EmbeddedDocument):
|
||||
year = IntField(db_field="yr")
|
||||
@ -610,11 +624,11 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
# Now there will be two docs with the same slug and the same day: fail
|
||||
post3 = BlogPost(title="test3", date=Date(year=2010), slug="test")
|
||||
self.assertRaises(OperationError, post3.save)
|
||||
with pytest.raises(OperationError):
|
||||
post3.save()
|
||||
|
||||
def test_unique_embedded_document(self):
|
||||
"""Ensure that uniqueness constraints are applied to fields on embedded documents.
|
||||
"""
|
||||
"""Ensure that uniqueness constraints are applied to fields on embedded documents."""
|
||||
|
||||
class SubDocument(EmbeddedDocument):
|
||||
year = IntField(db_field="yr")
|
||||
@ -635,7 +649,8 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
# Now there will be two docs with the same sub.slug
|
||||
post3 = BlogPost(title="test3", sub=SubDocument(year=2010, slug="test"))
|
||||
self.assertRaises(NotUniqueError, post3.save)
|
||||
with pytest.raises(NotUniqueError):
|
||||
post3.save()
|
||||
|
||||
def test_unique_embedded_document_in_list(self):
|
||||
"""
|
||||
@ -665,7 +680,8 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")])
|
||||
|
||||
self.assertRaises(NotUniqueError, post2.save)
|
||||
with pytest.raises(NotUniqueError):
|
||||
post2.save()
|
||||
|
||||
def test_unique_embedded_document_in_sorted_list(self):
|
||||
"""
|
||||
@ -695,12 +711,13 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
# confirm that the unique index is created
|
||||
indexes = BlogPost._get_collection().index_information()
|
||||
self.assertIn("subs.slug_1", indexes)
|
||||
self.assertTrue(indexes["subs.slug_1"]["unique"])
|
||||
assert "subs.slug_1" in indexes
|
||||
assert indexes["subs.slug_1"]["unique"]
|
||||
|
||||
post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")])
|
||||
|
||||
self.assertRaises(NotUniqueError, post2.save)
|
||||
with pytest.raises(NotUniqueError):
|
||||
post2.save()
|
||||
|
||||
def test_unique_embedded_document_in_embedded_document_list(self):
|
||||
"""
|
||||
@ -730,12 +747,13 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
# confirm that the unique index is created
|
||||
indexes = BlogPost._get_collection().index_information()
|
||||
self.assertIn("subs.slug_1", indexes)
|
||||
self.assertTrue(indexes["subs.slug_1"]["unique"])
|
||||
assert "subs.slug_1" in indexes
|
||||
assert indexes["subs.slug_1"]["unique"]
|
||||
|
||||
post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")])
|
||||
|
||||
self.assertRaises(NotUniqueError, post2.save)
|
||||
with pytest.raises(NotUniqueError):
|
||||
post2.save()
|
||||
|
||||
def test_unique_with_embedded_document_and_embedded_unique(self):
|
||||
"""Ensure that uniqueness constraints are applied to fields on
|
||||
@ -761,11 +779,13 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
# Now there will be two docs with the same sub.slug
|
||||
post3 = BlogPost(title="test3", sub=SubDocument(year=2010, slug="test"))
|
||||
self.assertRaises(NotUniqueError, post3.save)
|
||||
with pytest.raises(NotUniqueError):
|
||||
post3.save()
|
||||
|
||||
# Now there will be two docs with the same title and year
|
||||
post3 = BlogPost(title="test1", sub=SubDocument(year=2009, slug="test-1"))
|
||||
self.assertRaises(NotUniqueError, post3.save)
|
||||
with pytest.raises(NotUniqueError):
|
||||
post3.save()
|
||||
|
||||
def test_ttl_indexes(self):
|
||||
class Log(Document):
|
||||
@ -777,19 +797,7 @@ class IndexesTest(unittest.TestCase):
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(Log.objects)
|
||||
info = Log.objects._collection.index_information()
|
||||
self.assertEqual(3600, info["created_1"]["expireAfterSeconds"])
|
||||
|
||||
def test_index_drop_dups_silently_ignored(self):
|
||||
class Customer(Document):
|
||||
cust_id = IntField(unique=True, required=True)
|
||||
meta = {
|
||||
"indexes": ["cust_id"],
|
||||
"index_drop_dups": True,
|
||||
"allow_inheritance": False,
|
||||
}
|
||||
|
||||
Customer.drop_collection()
|
||||
Customer.objects.first()
|
||||
assert 3600 == info["created_1"]["expireAfterSeconds"]
|
||||
|
||||
def test_unique_and_indexes(self):
|
||||
"""Ensure that 'unique' constraints aren't overridden by
|
||||
@ -805,14 +813,14 @@ class IndexesTest(unittest.TestCase):
|
||||
cust.save()
|
||||
|
||||
cust_dupe = Customer(cust_id=1)
|
||||
with self.assertRaises(NotUniqueError):
|
||||
with pytest.raises(NotUniqueError):
|
||||
cust_dupe.save()
|
||||
|
||||
cust = Customer(cust_id=2)
|
||||
cust.save()
|
||||
|
||||
# duplicate key on update
|
||||
with self.assertRaises(NotUniqueError):
|
||||
with pytest.raises(NotUniqueError):
|
||||
cust.cust_id = 1
|
||||
cust.save()
|
||||
|
||||
@ -833,8 +841,8 @@ class IndexesTest(unittest.TestCase):
|
||||
user = User(name="huangz", password="secret2")
|
||||
user.save()
|
||||
|
||||
self.assertEqual(User.objects.count(), 1)
|
||||
self.assertEqual(User.objects.get().password, "secret2")
|
||||
assert User.objects.count() == 1
|
||||
assert User.objects.get().password == "secret2"
|
||||
|
||||
def test_unique_and_primary_create(self):
|
||||
"""Create a new record with a duplicate primary key
|
||||
@ -848,11 +856,11 @@ class IndexesTest(unittest.TestCase):
|
||||
User.drop_collection()
|
||||
|
||||
User.objects.create(name="huangz", password="secret")
|
||||
with self.assertRaises(NotUniqueError):
|
||||
with pytest.raises(NotUniqueError):
|
||||
User.objects.create(name="huangz", password="secret2")
|
||||
|
||||
self.assertEqual(User.objects.count(), 1)
|
||||
self.assertEqual(User.objects.get().password, "secret")
|
||||
assert User.objects.count() == 1
|
||||
assert User.objects.get().password == "secret"
|
||||
|
||||
def test_index_with_pk(self):
|
||||
"""Ensure you can use `pk` as part of a query"""
|
||||
@ -874,9 +882,9 @@ class IndexesTest(unittest.TestCase):
|
||||
self.fail("Unbound local error at index + pk definition")
|
||||
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
index_item = [("_id", 1), ("comments.comment_id", 1)]
|
||||
self.assertIn(index_item, info)
|
||||
assert index_item in info
|
||||
|
||||
def test_compound_key_embedded(self):
|
||||
class CompoundKey(EmbeddedDocument):
|
||||
@ -890,10 +898,8 @@ class IndexesTest(unittest.TestCase):
|
||||
my_key = CompoundKey(name="n", term="ok")
|
||||
report = ReportEmbedded(text="OK", key=my_key).save()
|
||||
|
||||
self.assertEqual(
|
||||
{"text": "OK", "_id": {"term": "ok", "name": "n"}}, report.to_mongo()
|
||||
)
|
||||
self.assertEqual(report, ReportEmbedded.objects.get(pk=my_key))
|
||||
assert {"text": "OK", "_id": {"term": "ok", "name": "n"}} == report.to_mongo()
|
||||
assert report == ReportEmbedded.objects.get(pk=my_key)
|
||||
|
||||
def test_compound_key_dictfield(self):
|
||||
class ReportDictField(Document):
|
||||
@ -903,15 +909,13 @@ class IndexesTest(unittest.TestCase):
|
||||
my_key = {"name": "n", "term": "ok"}
|
||||
report = ReportDictField(text="OK", key=my_key).save()
|
||||
|
||||
self.assertEqual(
|
||||
{"text": "OK", "_id": {"term": "ok", "name": "n"}}, report.to_mongo()
|
||||
)
|
||||
assert {"text": "OK", "_id": {"term": "ok", "name": "n"}} == report.to_mongo()
|
||||
|
||||
# We can't directly call ReportDictField.objects.get(pk=my_key),
|
||||
# because dicts are unordered, and if the order in MongoDB is
|
||||
# different than the one in `my_key`, this test will fail.
|
||||
self.assertEqual(report, ReportDictField.objects.get(pk__name=my_key["name"]))
|
||||
self.assertEqual(report, ReportDictField.objects.get(pk__term=my_key["term"]))
|
||||
assert report == ReportDictField.objects.get(pk__name=my_key["name"])
|
||||
assert report == ReportDictField.objects.get(pk__term=my_key["term"])
|
||||
|
||||
def test_string_indexes(self):
|
||||
class MyDoc(Document):
|
||||
@ -919,9 +923,9 @@ class IndexesTest(unittest.TestCase):
|
||||
meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]}
|
||||
|
||||
info = MyDoc.objects._collection.index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
self.assertIn([("provider_ids.foo", 1)], info)
|
||||
self.assertIn([("provider_ids.bar", 1)], info)
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("provider_ids.foo", 1)] in info
|
||||
assert [("provider_ids.bar", 1)] in info
|
||||
|
||||
def test_sparse_compound_indexes(self):
|
||||
class MyDoc(Document):
|
||||
@ -933,11 +937,10 @@ class IndexesTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
info = MyDoc.objects._collection.index_information()
|
||||
self.assertEqual(
|
||||
[("provider_ids.foo", 1), ("provider_ids.bar", 1)],
|
||||
info["provider_ids.foo_1_provider_ids.bar_1"]["key"],
|
||||
)
|
||||
self.assertTrue(info["provider_ids.foo_1_provider_ids.bar_1"]["sparse"])
|
||||
assert [("provider_ids.foo", 1), ("provider_ids.bar", 1)] == info[
|
||||
"provider_ids.foo_1_provider_ids.bar_1"
|
||||
]["key"]
|
||||
assert info["provider_ids.foo_1_provider_ids.bar_1"]["sparse"]
|
||||
|
||||
def test_text_indexes(self):
|
||||
class Book(Document):
|
||||
@ -945,9 +948,9 @@ class IndexesTest(unittest.TestCase):
|
||||
meta = {"indexes": ["$title"]}
|
||||
|
||||
indexes = Book.objects._collection.index_information()
|
||||
self.assertIn("title_text", indexes)
|
||||
assert "title_text" in indexes
|
||||
key = indexes["title_text"]["key"]
|
||||
self.assertIn(("_fts", "text"), key)
|
||||
assert ("_fts", "text") in key
|
||||
|
||||
def test_hashed_indexes(self):
|
||||
class Book(Document):
|
||||
@ -955,8 +958,8 @@ class IndexesTest(unittest.TestCase):
|
||||
meta = {"indexes": ["#ref_id"]}
|
||||
|
||||
indexes = Book.objects._collection.index_information()
|
||||
self.assertIn("ref_id_hashed", indexes)
|
||||
self.assertIn(("ref_id", "hashed"), indexes["ref_id_hashed"]["key"])
|
||||
assert "ref_id_hashed" in indexes
|
||||
assert ("ref_id", "hashed") in indexes["ref_id_hashed"]["key"]
|
||||
|
||||
def test_indexes_after_database_drop(self):
|
||||
"""
|
||||
@ -993,7 +996,8 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
# Create Post #2
|
||||
post2 = BlogPost(title="test2", slug="test")
|
||||
self.assertRaises(NotUniqueError, post2.save)
|
||||
with pytest.raises(NotUniqueError):
|
||||
post2.save()
|
||||
finally:
|
||||
# Drop the temporary database at the end
|
||||
connection.drop_database("tempdatabase")
|
||||
@ -1035,20 +1039,13 @@ class IndexesTest(unittest.TestCase):
|
||||
del index_info[key][
|
||||
"ns"
|
||||
] # drop the index namespace - we don't care about that here, MongoDB 3+
|
||||
if "dropDups" in index_info[key]:
|
||||
del index_info[key][
|
||||
"dropDups"
|
||||
] # drop the index dropDups - it is deprecated in MongoDB 3+
|
||||
|
||||
self.assertEqual(
|
||||
index_info,
|
||||
{
|
||||
assert index_info == {
|
||||
"txt_1": {"key": [("txt", 1)], "background": False},
|
||||
"_id_": {"key": [("_id", 1)]},
|
||||
"txt2_1": {"key": [("txt2", 1)], "background": False},
|
||||
"_cls_1": {"key": [("_cls", 1)], "background": False},
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
def test_compound_index_underscore_cls_not_overwritten(self):
|
||||
"""
|
||||
@ -1071,7 +1068,7 @@ class IndexesTest(unittest.TestCase):
|
||||
TestDoc.ensure_indexes()
|
||||
|
||||
index_info = TestDoc._get_collection().index_information()
|
||||
self.assertIn("shard_1_1__cls_1_txt_1_1", index_info)
|
||||
assert "shard_1_1__cls_1_txt_1_1" in index_info
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
@ -1,8 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from six import iteritems
|
||||
import pytest
|
||||
|
||||
from mongoengine import (
|
||||
BooleanField,
|
||||
@ -15,13 +14,11 @@ from mongoengine import (
|
||||
StringField,
|
||||
)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
from tests.fixtures import Base
|
||||
|
||||
__all__ = ("InheritanceTest",)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class InheritanceTest(MongoDBTestCase):
|
||||
class TestInheritance(MongoDBTestCase):
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
@ -39,17 +36,16 @@ class InheritanceTest(MongoDBTestCase):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
test_doc = DataDoc(name="test", embed=EmbedData(data="data"))
|
||||
self.assertEqual(test_doc._cls, "DataDoc")
|
||||
self.assertEqual(test_doc.embed._cls, "EmbedData")
|
||||
assert test_doc._cls == "DataDoc"
|
||||
assert test_doc.embed._cls == "EmbedData"
|
||||
test_doc.save()
|
||||
saved_doc = DataDoc.objects.with_id(test_doc.id)
|
||||
self.assertEqual(test_doc._cls, saved_doc._cls)
|
||||
self.assertEqual(test_doc.embed._cls, saved_doc.embed._cls)
|
||||
assert test_doc._cls == saved_doc._cls
|
||||
assert test_doc.embed._cls == saved_doc.embed._cls
|
||||
test_doc.delete()
|
||||
|
||||
def test_superclasses(self):
|
||||
"""Ensure that the correct list of superclasses is assembled.
|
||||
"""
|
||||
"""Ensure that the correct list of superclasses is assembled."""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
@ -69,12 +65,12 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Fish._superclasses, ("Animal",))
|
||||
self.assertEqual(Guppy._superclasses, ("Animal", "Animal.Fish"))
|
||||
self.assertEqual(Mammal._superclasses, ("Animal",))
|
||||
self.assertEqual(Dog._superclasses, ("Animal", "Animal.Mammal"))
|
||||
self.assertEqual(Human._superclasses, ("Animal", "Animal.Mammal"))
|
||||
assert Animal._superclasses == ()
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Guppy._superclasses == ("Animal", "Animal.Fish")
|
||||
assert Mammal._superclasses == ("Animal",)
|
||||
assert Dog._superclasses == ("Animal", "Animal.Mammal")
|
||||
assert Human._superclasses == ("Animal", "Animal.Mammal")
|
||||
|
||||
def test_external_superclasses(self):
|
||||
"""Ensure that the correct list of super classes is assembled when
|
||||
@ -99,18 +95,12 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ("Base",))
|
||||
self.assertEqual(Fish._superclasses, ("Base", "Base.Animal"))
|
||||
self.assertEqual(
|
||||
Guppy._superclasses, ("Base", "Base.Animal", "Base.Animal.Fish")
|
||||
)
|
||||
self.assertEqual(Mammal._superclasses, ("Base", "Base.Animal"))
|
||||
self.assertEqual(
|
||||
Dog._superclasses, ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
)
|
||||
self.assertEqual(
|
||||
Human._superclasses, ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
)
|
||||
assert Animal._superclasses == ("Base",)
|
||||
assert Fish._superclasses == ("Base", "Base.Animal")
|
||||
assert Guppy._superclasses == ("Base", "Base.Animal", "Base.Animal.Fish")
|
||||
assert Mammal._superclasses == ("Base", "Base.Animal")
|
||||
assert Dog._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
assert Human._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
|
||||
def test_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
@ -135,24 +125,22 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
self.assertEqual(
|
||||
Animal._subclasses,
|
||||
(
|
||||
assert Animal._subclasses == (
|
||||
"Animal",
|
||||
"Animal.Fish",
|
||||
"Animal.Fish.Guppy",
|
||||
"Animal.Mammal",
|
||||
"Animal.Mammal.Dog",
|
||||
"Animal.Mammal.Human",
|
||||
),
|
||||
)
|
||||
self.assertEqual(Fish._subclasses, ("Animal.Fish", "Animal.Fish.Guppy"))
|
||||
self.assertEqual(Guppy._subclasses, ("Animal.Fish.Guppy",))
|
||||
self.assertEqual(
|
||||
Mammal._subclasses,
|
||||
("Animal.Mammal", "Animal.Mammal.Dog", "Animal.Mammal.Human"),
|
||||
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Guppy")
|
||||
assert Guppy._subclasses == ("Animal.Fish.Guppy",)
|
||||
assert Mammal._subclasses == (
|
||||
"Animal.Mammal",
|
||||
"Animal.Mammal.Dog",
|
||||
"Animal.Mammal.Human",
|
||||
)
|
||||
self.assertEqual(Human._subclasses, ("Animal.Mammal.Human",))
|
||||
assert Human._subclasses == ("Animal.Mammal.Human",)
|
||||
|
||||
def test_external_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
@ -177,30 +165,22 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
self.assertEqual(
|
||||
Animal._subclasses,
|
||||
(
|
||||
assert Animal._subclasses == (
|
||||
"Base.Animal",
|
||||
"Base.Animal.Fish",
|
||||
"Base.Animal.Fish.Guppy",
|
||||
"Base.Animal.Mammal",
|
||||
"Base.Animal.Mammal.Dog",
|
||||
"Base.Animal.Mammal.Human",
|
||||
),
|
||||
)
|
||||
self.assertEqual(
|
||||
Fish._subclasses, ("Base.Animal.Fish", "Base.Animal.Fish.Guppy")
|
||||
)
|
||||
self.assertEqual(Guppy._subclasses, ("Base.Animal.Fish.Guppy",))
|
||||
self.assertEqual(
|
||||
Mammal._subclasses,
|
||||
(
|
||||
assert Fish._subclasses == ("Base.Animal.Fish", "Base.Animal.Fish.Guppy")
|
||||
assert Guppy._subclasses == ("Base.Animal.Fish.Guppy",)
|
||||
assert Mammal._subclasses == (
|
||||
"Base.Animal.Mammal",
|
||||
"Base.Animal.Mammal.Dog",
|
||||
"Base.Animal.Mammal.Human",
|
||||
),
|
||||
)
|
||||
self.assertEqual(Human._subclasses, ("Base.Animal.Mammal.Human",))
|
||||
assert Human._subclasses == ("Base.Animal.Mammal.Human",)
|
||||
|
||||
def test_dynamic_declarations(self):
|
||||
"""Test that declaring an extra class updates meta data"""
|
||||
@ -208,37 +188,34 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ("Animal",))
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal",)
|
||||
|
||||
# Test dynamically adding a class changes the meta data
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ("Animal", "Animal.Fish"))
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal", "Animal.Fish")
|
||||
|
||||
self.assertEqual(Fish._superclasses, ("Animal",))
|
||||
self.assertEqual(Fish._subclasses, ("Animal.Fish",))
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Fish._subclasses == ("Animal.Fish",)
|
||||
|
||||
# Test dynamically adding an inherited class changes the meta data
|
||||
class Pike(Fish):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(
|
||||
Animal._subclasses, ("Animal", "Animal.Fish", "Animal.Fish.Pike")
|
||||
)
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal", "Animal.Fish", "Animal.Fish.Pike")
|
||||
|
||||
self.assertEqual(Fish._superclasses, ("Animal",))
|
||||
self.assertEqual(Fish._subclasses, ("Animal.Fish", "Animal.Fish.Pike"))
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Pike")
|
||||
|
||||
self.assertEqual(Pike._superclasses, ("Animal", "Animal.Fish"))
|
||||
self.assertEqual(Pike._subclasses, ("Animal.Fish.Pike",))
|
||||
assert Pike._superclasses == ("Animal", "Animal.Fish")
|
||||
assert Pike._subclasses == ("Animal.Fish.Pike",)
|
||||
|
||||
def test_inheritance_meta_data(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
"""Ensure that document may inherit fields from a superclass document."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
@ -249,14 +226,13 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(
|
||||
["_cls", "age", "id", "name", "salary"], sorted(Employee._fields.keys())
|
||||
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
||||
Employee._fields.keys()
|
||||
)
|
||||
self.assertEqual(Employee._get_collection_name(), Person._get_collection_name())
|
||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||
|
||||
def test_inheritance_to_mongo_keys(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
"""Ensure that document may inherit fields from a superclass document."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
@ -267,20 +243,20 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(
|
||||
["_cls", "age", "id", "name", "salary"], sorted(Employee._fields.keys())
|
||||
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
||||
Employee._fields.keys()
|
||||
)
|
||||
self.assertEqual(
|
||||
Person(name="Bob", age=35).to_mongo().keys(), ["_cls", "name", "age"]
|
||||
)
|
||||
self.assertEqual(
|
||||
Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||
["_cls", "name", "age", "salary"],
|
||||
)
|
||||
self.assertEqual(Employee._get_collection_name(), Person._get_collection_name())
|
||||
assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"]
|
||||
assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [
|
||||
"_cls",
|
||||
"name",
|
||||
"age",
|
||||
"salary",
|
||||
]
|
||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||
|
||||
def test_indexes_and_multiple_inheritance(self):
|
||||
""" Ensure that all of the indexes are created for a document with
|
||||
"""Ensure that all of the indexes are created for a document with
|
||||
multiple inheritance.
|
||||
"""
|
||||
|
||||
@ -303,18 +279,12 @@ class InheritanceTest(MongoDBTestCase):
|
||||
|
||||
C.ensure_indexes()
|
||||
|
||||
self.assertEqual(
|
||||
sorted(
|
||||
[idx["key"] for idx in C._get_collection().index_information().values()]
|
||||
),
|
||||
sorted(
|
||||
[[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]]
|
||||
),
|
||||
)
|
||||
assert sorted(
|
||||
idx["key"] for idx in C._get_collection().index_information().values()
|
||||
) == sorted([[("_cls", 1), ("b", 1)], [("_id", 1)], [("_cls", 1), ("a", 1)]])
|
||||
|
||||
def test_polymorphic_queries(self):
|
||||
"""Ensure that the correct subclasses are returned from a query
|
||||
"""
|
||||
"""Ensure that the correct subclasses are returned from a query"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
@ -340,13 +310,13 @@ class InheritanceTest(MongoDBTestCase):
|
||||
Human().save()
|
||||
|
||||
classes = [obj.__class__ for obj in Animal.objects]
|
||||
self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human])
|
||||
assert classes == [Animal, Fish, Mammal, Dog, Human]
|
||||
|
||||
classes = [obj.__class__ for obj in Mammal.objects]
|
||||
self.assertEqual(classes, [Mammal, Dog, Human])
|
||||
assert classes == [Mammal, Dog, Human]
|
||||
|
||||
classes = [obj.__class__ for obj in Human.objects]
|
||||
self.assertEqual(classes, [Human])
|
||||
assert classes == [Human]
|
||||
|
||||
def test_allow_inheritance(self):
|
||||
"""Ensure that inheritance is disabled by default on simple
|
||||
@ -357,37 +327,34 @@ class InheritanceTest(MongoDBTestCase):
|
||||
name = StringField()
|
||||
|
||||
# can't inherit because Animal didn't explicitly allow inheritance
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
with pytest.raises(ValueError, match="Document Animal may not be subclassed"):
|
||||
|
||||
class Dog(Animal):
|
||||
pass
|
||||
|
||||
self.assertIn("Document Animal may not be subclassed", str(cm.exception))
|
||||
|
||||
# Check that _cls etc aren't present on simple documents
|
||||
dog = Animal(name="dog").save()
|
||||
self.assertEqual(dog.to_mongo().keys(), ["_id", "name"])
|
||||
assert dog.to_mongo().keys() == ["_id", "name"]
|
||||
|
||||
collection = self.db[Animal._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertNotIn("_cls", obj)
|
||||
assert "_cls" not in obj
|
||||
|
||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||
"""
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off."""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
|
||||
class Mammal(Animal):
|
||||
meta = {"allow_inheritance": False}
|
||||
|
||||
self.assertEqual(
|
||||
str(cm.exception),
|
||||
'Only direct subclasses of Document may set "allow_inheritance" to False',
|
||||
assert (
|
||||
str(exc_info.value)
|
||||
== 'Only direct subclasses of Document may set "allow_inheritance" to False'
|
||||
)
|
||||
|
||||
def test_allow_inheritance_abstract_document(self):
|
||||
@ -401,14 +368,14 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class Animal(FinalDocument):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls isn't present in simple documents
|
||||
doc = Animal(name="dog")
|
||||
self.assertNotIn("_cls", doc.to_mongo())
|
||||
assert "_cls" not in doc.to_mongo()
|
||||
|
||||
def test_using_abstract_class_in_reference_field(self):
|
||||
# Ensures no regression of #1920
|
||||
@ -454,10 +421,10 @@ class InheritanceTest(MongoDBTestCase):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], "id")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 3
|
||||
assert berlin._fields_ordered[0] == "id"
|
||||
|
||||
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||
class City(Document):
|
||||
@ -469,10 +436,10 @@ class InheritanceTest(MongoDBTestCase):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], "city_id")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 3
|
||||
assert berlin._fields_ordered[0] == "city_id"
|
||||
|
||||
def test_auto_id_vs_non_pk_id_field(self):
|
||||
class City(Document):
|
||||
@ -484,12 +451,12 @@ class InheritanceTest(MongoDBTestCase):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 4)
|
||||
self.assertEqual(berlin._fields_ordered[0], "auto_id_0")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 4
|
||||
assert berlin._fields_ordered[0] == "auto_id_0"
|
||||
berlin.save()
|
||||
self.assertEqual(berlin.pk, berlin.auto_id_0)
|
||||
assert berlin.pk == berlin.auto_id_0
|
||||
|
||||
def test_abstract_document_creation_does_not_fail(self):
|
||||
class City(Document):
|
||||
@ -497,10 +464,10 @@ class InheritanceTest(MongoDBTestCase):
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
city = City(continent="asia")
|
||||
self.assertEqual(None, city.pk)
|
||||
assert city.pk is None
|
||||
# TODO: expected error? Shouldn't we create a new error type?
|
||||
with self.assertRaises(KeyError):
|
||||
setattr(city, "pk", 1)
|
||||
with pytest.raises(KeyError):
|
||||
city.pk = 1
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance."""
|
||||
@ -508,24 +475,23 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class SpecialComment(Comment):
|
||||
pass
|
||||
|
||||
doc = Comment(content="test")
|
||||
self.assertNotIn("_cls", doc.to_mongo())
|
||||
assert "_cls" not in doc.to_mongo()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
doc = Comment(content="test")
|
||||
self.assertIn("_cls", doc.to_mongo())
|
||||
assert "_cls" in doc.to_mongo()
|
||||
|
||||
def test_document_inheritance(self):
|
||||
"""Ensure mutliple inheritance of abstract documents
|
||||
"""
|
||||
"""Ensure mutliple inheritance of abstract documents"""
|
||||
|
||||
class DateCreatedDocument(Document):
|
||||
meta = {"allow_inheritance": True, "abstract": True}
|
||||
@ -533,21 +499,15 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class DateUpdatedDocument(Document):
|
||||
meta = {"allow_inheritance": True, "abstract": True}
|
||||
|
||||
try:
|
||||
|
||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||
pass
|
||||
|
||||
except Exception:
|
||||
self.assertTrue(False, "Couldn't create MyDocument class")
|
||||
|
||||
def test_abstract_documents(self):
|
||||
"""Ensure that a document superclass can be marked as abstract
|
||||
thereby not using it as the name for the collection."""
|
||||
|
||||
defaults = {
|
||||
"index_background": True,
|
||||
"index_drop_dups": True,
|
||||
"index_opts": {"hello": "world"},
|
||||
"allow_inheritance": True,
|
||||
"queryset_class": "QuerySet",
|
||||
@ -574,22 +534,22 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
for k, v in iteritems(defaults):
|
||||
for k, v in defaults.items():
|
||||
for cls in [Animal, Fish, Guppy]:
|
||||
self.assertEqual(cls._meta[k], v)
|
||||
assert cls._meta[k] == v
|
||||
|
||||
self.assertNotIn("collection", Animal._meta)
|
||||
self.assertNotIn("collection", Mammal._meta)
|
||||
assert "collection" not in Animal._meta
|
||||
assert "collection" not in Mammal._meta
|
||||
|
||||
self.assertEqual(Animal._get_collection_name(), None)
|
||||
self.assertEqual(Mammal._get_collection_name(), None)
|
||||
assert Animal._get_collection_name() is None
|
||||
assert Mammal._get_collection_name() is None
|
||||
|
||||
self.assertEqual(Fish._get_collection_name(), "fish")
|
||||
self.assertEqual(Guppy._get_collection_name(), "fish")
|
||||
self.assertEqual(Human._get_collection_name(), "human")
|
||||
assert Fish._get_collection_name() == "fish"
|
||||
assert Guppy._get_collection_name() == "fish"
|
||||
assert Human._get_collection_name() == "human"
|
||||
|
||||
# ensure that a subclass of a non-abstract class can't be abstract
|
||||
with self.assertRaises(ValueError):
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class EvilHuman(Human):
|
||||
evil = BooleanField(default=True)
|
||||
@ -603,7 +563,7 @@ class InheritanceTest(MongoDBTestCase):
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
self.assertFalse(B._meta["abstract"])
|
||||
assert not B._meta["abstract"]
|
||||
|
||||
def test_inherited_collections(self):
|
||||
"""Ensure that subclassed documents don't override parents'
|
||||
@ -649,8 +609,8 @@ class InheritanceTest(MongoDBTestCase):
|
||||
real_person = Drinker(drink=beer)
|
||||
real_person.save()
|
||||
|
||||
self.assertEqual(Drinker.objects[0].drink.name, red_bull.name)
|
||||
self.assertEqual(Drinker.objects[1].drink.name, beer.name)
|
||||
assert Drinker.objects[0].drink.name == red_bull.name
|
||||
assert Drinker.objects[1].drink.name == beer.name
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
File diff suppressed because it is too large
Load Diff
@ -1,21 +1,14 @@
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
from datetime import datetime
|
||||
|
||||
from bson import ObjectId
|
||||
|
||||
import pymongo
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
__all__ = ("TestJson",)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestJson(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
class TestJson(MongoDBTestCase):
|
||||
def test_json_names(self):
|
||||
"""
|
||||
Going to test reported issue:
|
||||
@ -39,7 +32,7 @@ class TestJson(unittest.TestCase):
|
||||
|
||||
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
|
||||
|
||||
self.assertEqual(doc_json, expected_json)
|
||||
assert doc_json == expected_json
|
||||
|
||||
def test_json_simple(self):
|
||||
class Embedded(EmbeddedDocument):
|
||||
@ -59,9 +52,9 @@ class TestJson(unittest.TestCase):
|
||||
|
||||
doc_json = doc.to_json(sort_keys=True, separators=(",", ":"))
|
||||
expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}"""
|
||||
self.assertEqual(doc_json, expected_json)
|
||||
assert doc_json == expected_json
|
||||
|
||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||
assert doc == Doc.from_json(doc.to_json())
|
||||
|
||||
def test_json_complex(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
@ -106,7 +99,7 @@ class TestJson(unittest.TestCase):
|
||||
return json.loads(self.to_json()) == json.loads(other.to_json())
|
||||
|
||||
doc = Doc()
|
||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||
assert doc == Doc.from_json(doc.to_json())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
@ -1,26 +1,22 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
__all__ = ("ValidatorErrorTest",)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class ValidatorErrorTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
class TestValidatorError(MongoDBTestCase):
|
||||
def test_to_dict(self):
|
||||
"""Ensure a ValidationError handles error to_dict correctly.
|
||||
"""
|
||||
"""Ensure a ValidationError handles error to_dict correctly."""
|
||||
error = ValidationError("root")
|
||||
self.assertEqual(error.to_dict(), {})
|
||||
assert error.to_dict() == {}
|
||||
|
||||
# 1st level error schema
|
||||
error.errors = {"1st": ValidationError("bad 1st")}
|
||||
self.assertIn("1st", error.to_dict())
|
||||
self.assertEqual(error.to_dict()["1st"], "bad 1st")
|
||||
assert "1st" in error.to_dict()
|
||||
assert error.to_dict()["1st"] == "bad 1st"
|
||||
|
||||
# 2nd level error schema
|
||||
error.errors = {
|
||||
@ -28,10 +24,10 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
"bad 1st", errors={"2nd": ValidationError("bad 2nd")}
|
||||
)
|
||||
}
|
||||
self.assertIn("1st", error.to_dict())
|
||||
self.assertIsInstance(error.to_dict()["1st"], dict)
|
||||
self.assertIn("2nd", error.to_dict()["1st"])
|
||||
self.assertEqual(error.to_dict()["1st"]["2nd"], "bad 2nd")
|
||||
assert "1st" in error.to_dict()
|
||||
assert isinstance(error.to_dict()["1st"], dict)
|
||||
assert "2nd" in error.to_dict()["1st"]
|
||||
assert error.to_dict()["1st"]["2nd"] == "bad 2nd"
|
||||
|
||||
# moar levels
|
||||
error.errors = {
|
||||
@ -49,13 +45,13 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
},
|
||||
)
|
||||
}
|
||||
self.assertIn("1st", error.to_dict())
|
||||
self.assertIn("2nd", error.to_dict()["1st"])
|
||||
self.assertIn("3rd", error.to_dict()["1st"]["2nd"])
|
||||
self.assertIn("4th", error.to_dict()["1st"]["2nd"]["3rd"])
|
||||
self.assertEqual(error.to_dict()["1st"]["2nd"]["3rd"]["4th"], "Inception")
|
||||
assert "1st" in error.to_dict()
|
||||
assert "2nd" in error.to_dict()["1st"]
|
||||
assert "3rd" in error.to_dict()["1st"]["2nd"]
|
||||
assert "4th" in error.to_dict()["1st"]["2nd"]["3rd"]
|
||||
assert error.to_dict()["1st"]["2nd"]["3rd"]["4th"] == "Inception"
|
||||
|
||||
self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
|
||||
assert error.message == "root(2nd.3rd.4th.Inception: ['1st'])"
|
||||
|
||||
def test_model_validation(self):
|
||||
class User(Document):
|
||||
@ -65,19 +61,19 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
User().validate()
|
||||
except ValidationError as e:
|
||||
self.assertIn("User:None", e.message)
|
||||
self.assertEqual(
|
||||
e.to_dict(),
|
||||
{"username": "Field is required", "name": "Field is required"},
|
||||
)
|
||||
assert "User:None" in e.message
|
||||
assert e.to_dict() == {
|
||||
"username": "Field is required",
|
||||
"name": "Field is required",
|
||||
}
|
||||
|
||||
user = User(username="RossC0", name="Ross").save()
|
||||
user.name = None
|
||||
try:
|
||||
user.save()
|
||||
except ValidationError as e:
|
||||
self.assertIn("User:RossC0", e.message)
|
||||
self.assertEqual(e.to_dict(), {"name": "Field is required"})
|
||||
assert "User:RossC0" in e.message
|
||||
assert e.to_dict() == {"name": "Field is required"}
|
||||
|
||||
def test_fields_rewrite(self):
|
||||
class BasePerson(Document):
|
||||
@ -89,28 +85,30 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
name = StringField(required=True)
|
||||
|
||||
p = Person(age=15)
|
||||
self.assertRaises(ValidationError, p.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
p.validate()
|
||||
|
||||
def test_embedded_document_validation(self):
|
||||
"""Ensure that embedded documents may be validated.
|
||||
"""
|
||||
"""Ensure that embedded documents may be validated."""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
date = DateTimeField()
|
||||
content = StringField(required=True)
|
||||
|
||||
comment = Comment()
|
||||
self.assertRaises(ValidationError, comment.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
comment.validate()
|
||||
|
||||
comment.content = "test"
|
||||
comment.validate()
|
||||
|
||||
comment.date = 4
|
||||
self.assertRaises(ValidationError, comment.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
comment.validate()
|
||||
|
||||
comment.date = datetime.now()
|
||||
comment.validate()
|
||||
self.assertEqual(comment._instance, None)
|
||||
assert comment._instance is None
|
||||
|
||||
def test_embedded_db_field_validate(self):
|
||||
class SubDoc(EmbeddedDocument):
|
||||
@ -123,10 +121,8 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
Doc(id="bad").validate()
|
||||
except ValidationError as e:
|
||||
self.assertIn("SubDoc:None", e.message)
|
||||
self.assertEqual(
|
||||
e.to_dict(), {"e": {"val": "OK could not be converted to int"}}
|
||||
)
|
||||
assert "SubDoc:None" in e.message
|
||||
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
@ -134,18 +130,16 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
doc = Doc.objects.first()
|
||||
keys = doc._data.keys()
|
||||
self.assertEqual(2, len(keys))
|
||||
self.assertIn("e", keys)
|
||||
self.assertIn("id", keys)
|
||||
assert 2 == len(keys)
|
||||
assert "e" in keys
|
||||
assert "id" in keys
|
||||
|
||||
doc.e.val = "OK"
|
||||
try:
|
||||
doc.save()
|
||||
except ValidationError as e:
|
||||
self.assertIn("Doc:test", e.message)
|
||||
self.assertEqual(
|
||||
e.to_dict(), {"e": {"val": "OK could not be converted to int"}}
|
||||
)
|
||||
assert "Doc:test" in e.message
|
||||
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
||||
|
||||
def test_embedded_weakref(self):
|
||||
class SubDoc(EmbeddedDocument):
|
||||
@ -161,14 +155,16 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
s = SubDoc()
|
||||
|
||||
self.assertRaises(ValidationError, s.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
s.validate()
|
||||
|
||||
d1.e = s
|
||||
d2.e = s
|
||||
|
||||
del d1
|
||||
|
||||
self.assertRaises(ValidationError, d2.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
d2.validate()
|
||||
|
||||
def test_parent_reference_in_child_document(self):
|
||||
"""
|
||||
@ -214,10 +210,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
child.reference = parent
|
||||
|
||||
# Saving the child should not raise a ValidationError
|
||||
try:
|
||||
child.save()
|
||||
except ValidationError as e:
|
||||
self.fail("ValidationError raised: %s" % e.message)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
@ -1,3 +0,0 @@
|
||||
from .fields import *
|
||||
from .file_tests import *
|
||||
from .geo import *
|
@ -1,29 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import uuid
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
import six
|
||||
|
||||
import pytest
|
||||
from bson import Binary
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
BIN_VALUE = six.b(
|
||||
"\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5"
|
||||
BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode(
|
||||
"latin-1"
|
||||
)
|
||||
|
||||
|
||||
class TestBinaryField(MongoDBTestCase):
|
||||
def test_binary_fields(self):
|
||||
"""Ensure that binary fields can be stored and retrieved.
|
||||
"""
|
||||
"""Ensure that binary fields can be stored and retrieved."""
|
||||
|
||||
class Attachment(Document):
|
||||
content_type = StringField()
|
||||
blob = BinaryField()
|
||||
|
||||
BLOB = six.b("\xe6\x00\xc4\xff\x07")
|
||||
BLOB = b"\xe6\x00\xc4\xff\x07"
|
||||
MIME_TYPE = "application/octet-stream"
|
||||
|
||||
Attachment.drop_collection()
|
||||
@ -32,12 +28,11 @@ class TestBinaryField(MongoDBTestCase):
|
||||
attachment.save()
|
||||
|
||||
attachment_1 = Attachment.objects().first()
|
||||
self.assertEqual(MIME_TYPE, attachment_1.content_type)
|
||||
self.assertEqual(BLOB, six.binary_type(attachment_1.blob))
|
||||
assert MIME_TYPE == attachment_1.content_type
|
||||
assert BLOB == bytes(attachment_1.blob)
|
||||
|
||||
def test_validation_succeeds(self):
|
||||
"""Ensure that valid values can be assigned to binary fields.
|
||||
"""
|
||||
"""Ensure that valid values can be assigned to binary fields."""
|
||||
|
||||
class AttachmentRequired(Document):
|
||||
blob = BinaryField(required=True)
|
||||
@ -46,13 +41,15 @@ class TestBinaryField(MongoDBTestCase):
|
||||
blob = BinaryField(max_bytes=4)
|
||||
|
||||
attachment_required = AttachmentRequired()
|
||||
self.assertRaises(ValidationError, attachment_required.validate)
|
||||
attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07"))
|
||||
with pytest.raises(ValidationError):
|
||||
attachment_required.validate()
|
||||
attachment_required.blob = Binary(b"\xe6\x00\xc4\xff\x07")
|
||||
attachment_required.validate()
|
||||
|
||||
_5_BYTES = six.b("\xe6\x00\xc4\xff\x07")
|
||||
_4_BYTES = six.b("\xe6\x00\xc4\xff")
|
||||
self.assertRaises(ValidationError, AttachmentSizeLimit(blob=_5_BYTES).validate)
|
||||
_5_BYTES = b"\xe6\x00\xc4\xff\x07"
|
||||
_4_BYTES = b"\xe6\x00\xc4\xff"
|
||||
with pytest.raises(ValidationError):
|
||||
AttachmentSizeLimit(blob=_5_BYTES).validate()
|
||||
AttachmentSizeLimit(blob=_4_BYTES).validate()
|
||||
|
||||
def test_validation_fails(self):
|
||||
@ -61,8 +58,9 @@ class TestBinaryField(MongoDBTestCase):
|
||||
class Attachment(Document):
|
||||
blob = BinaryField()
|
||||
|
||||
for invalid_data in (2, u"Im_a_unicode", ["some_str"]):
|
||||
self.assertRaises(ValidationError, Attachment(blob=invalid_data).validate)
|
||||
for invalid_data in (2, "Im_a_unicode", ["some_str"]):
|
||||
with pytest.raises(ValidationError):
|
||||
Attachment(blob=invalid_data).validate()
|
||||
|
||||
def test__primary(self):
|
||||
class Attachment(Document):
|
||||
@ -71,23 +69,21 @@ class TestBinaryField(MongoDBTestCase):
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
self.assertEqual(1, Attachment.objects.count())
|
||||
self.assertEqual(1, Attachment.objects.filter(id=att.id).count())
|
||||
assert 1 == Attachment.objects.count()
|
||||
assert 1 == Attachment.objects.filter(id=att.id).count()
|
||||
att.delete()
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
assert 0 == Attachment.objects.count()
|
||||
|
||||
def test_primary_filter_by_binary_pk_as_str(self):
|
||||
raise SkipTest("Querying by id as string is not currently supported")
|
||||
|
||||
class Attachment(Document):
|
||||
id = BinaryField(primary_key=True)
|
||||
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
self.assertEqual(1, Attachment.objects.filter(id=binary_id).count())
|
||||
assert 1 == Attachment.objects.filter(id=binary_id).count()
|
||||
att.delete()
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
assert 0 == Attachment.objects.count()
|
||||
|
||||
def test_match_querying_with_bytes(self):
|
||||
class MyDocument(Document):
|
||||
@ -97,7 +93,7 @@ class TestBinaryField(MongoDBTestCase):
|
||||
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first()
|
||||
self.assertEqual(matched_doc.id, doc.id)
|
||||
assert matched_doc.id == doc.id
|
||||
|
||||
def test_match_querying_with_binary(self):
|
||||
class MyDocument(Document):
|
||||
@ -108,7 +104,7 @@ class TestBinaryField(MongoDBTestCase):
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
|
||||
matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first()
|
||||
self.assertEqual(matched_doc.id, doc.id)
|
||||
assert matched_doc.id == doc.id
|
||||
|
||||
def test_modify_operation__set(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
@ -122,11 +118,8 @@ class TestBinaryField(MongoDBTestCase):
|
||||
doc = MyDocument.objects(some_field="test").modify(
|
||||
upsert=True, new=True, set__bin_field=BIN_VALUE
|
||||
)
|
||||
self.assertEqual(doc.some_field, "test")
|
||||
if six.PY3:
|
||||
self.assertEqual(doc.bin_field, BIN_VALUE)
|
||||
else:
|
||||
self.assertEqual(doc.bin_field, Binary(BIN_VALUE))
|
||||
assert doc.some_field == "test"
|
||||
assert doc.bin_field == BIN_VALUE
|
||||
|
||||
def test_update_one(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
@ -136,15 +129,12 @@ class TestBinaryField(MongoDBTestCase):
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
bin_data = six.b("\xe6\x00\xc4\xff\x07")
|
||||
bin_data = b"\xe6\x00\xc4\xff\x07"
|
||||
doc = MyDocument(bin_field=bin_data).save()
|
||||
|
||||
n_updated = MyDocument.objects(bin_field=bin_data).update_one(
|
||||
bin_field=BIN_VALUE
|
||||
)
|
||||
self.assertEqual(n_updated, 1)
|
||||
assert n_updated == 1
|
||||
fetched = MyDocument.objects.with_id(doc.id)
|
||||
if six.PY3:
|
||||
self.assertEqual(fetched.bin_field, BIN_VALUE)
|
||||
else:
|
||||
self.assertEqual(fetched.bin_field, Binary(BIN_VALUE))
|
||||
assert fetched.bin_field == BIN_VALUE
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
@ -11,7 +11,18 @@ class TestBooleanField(MongoDBTestCase):
|
||||
|
||||
person = Person(admin=True)
|
||||
person.save()
|
||||
self.assertEqual(get_as_pymongo(person), {"_id": person.id, "admin": True})
|
||||
assert get_as_pymongo(person) == {"_id": person.id, "admin": True}
|
||||
|
||||
def test_construction_does_not_fail_uncastable_value(self):
|
||||
class BoolFail:
|
||||
def __bool__(self):
|
||||
return "bogus"
|
||||
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
person = Person(admin=BoolFail())
|
||||
person.admin == "bogus"
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to boolean
|
||||
@ -26,11 +37,14 @@ class TestBooleanField(MongoDBTestCase):
|
||||
person.validate()
|
||||
|
||||
person.admin = 2
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.admin = "Yes"
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.admin = "False"
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
def test_weirdness_constructor(self):
|
||||
"""When attribute is set in contructor, it gets cast into a bool
|
||||
@ -42,7 +56,7 @@ class TestBooleanField(MongoDBTestCase):
|
||||
admin = BooleanField()
|
||||
|
||||
new_person = Person(admin="False")
|
||||
self.assertTrue(new_person.admin)
|
||||
assert new_person.admin
|
||||
|
||||
new_person = Person(admin="0")
|
||||
self.assertTrue(new_person.admin)
|
||||
assert new_person.admin
|
||||
|
@ -1,12 +1,29 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import (
|
||||
CachedReferenceField,
|
||||
DecimalField,
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
InvalidDocumentError,
|
||||
ListField,
|
||||
ReferenceField,
|
||||
StringField,
|
||||
ValidationError,
|
||||
)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestCachedReferenceField(MongoDBTestCase):
|
||||
def test_constructor_fail_bad_document_type(self):
|
||||
with pytest.raises(
|
||||
ValidationError, match="must be a document class or a string"
|
||||
):
|
||||
CachedReferenceField(document_type=0)
|
||||
|
||||
def test_get_and_save(self):
|
||||
"""
|
||||
Tests #1047: CachedReferenceField creates DBRefs on to_python,
|
||||
@ -46,29 +63,29 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
a = Animal(name="Leopard", tag="heavy")
|
||||
a.save()
|
||||
|
||||
self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal])
|
||||
assert Animal._cached_reference_fields == [Ocorrence.animal]
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
|
||||
p = Ocorrence(person="Wilson")
|
||||
p.save()
|
||||
|
||||
self.assertEqual(Ocorrence.objects(animal=None).count(), 1)
|
||||
assert Ocorrence.objects(animal=None).count() == 1
|
||||
|
||||
self.assertEqual(a.to_mongo(fields=["tag"]), {"tag": "heavy", "_id": a.pk})
|
||||
assert a.to_mongo(fields=["tag"]) == {"tag": "heavy", "_id": a.pk}
|
||||
|
||||
self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy")
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(animal__tag="heavy").count()
|
||||
self.assertEqual(count, 1)
|
||||
assert count == 1
|
||||
|
||||
ocorrence = Ocorrence.objects(animal__tag="heavy").first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
assert ocorrence.person == "teste"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_with_decimal(self):
|
||||
class PersonAuto(Document):
|
||||
@ -88,10 +105,11 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
s = SocialTest(group="dev", person=p)
|
||||
s.save()
|
||||
|
||||
self.assertEqual(
|
||||
SocialTest.objects._collection.find_one({"person.salary": 7000.00}),
|
||||
{"_id": s.pk, "group": s.group, "person": {"_id": p.pk, "salary": 7000.00}},
|
||||
)
|
||||
assert SocialTest.objects._collection.find_one({"person.salary": 7000.00}) == {
|
||||
"_id": s.pk,
|
||||
"group": s.group,
|
||||
"person": {"_id": p.pk, "salary": 7000.00},
|
||||
}
|
||||
|
||||
def test_cached_reference_field_reference(self):
|
||||
class Group(Document):
|
||||
@ -131,18 +149,15 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"])
|
||||
s2.save()
|
||||
|
||||
self.assertEqual(
|
||||
SocialData.objects._collection.find_one({"tags": "tag2"}),
|
||||
{
|
||||
assert SocialData.objects._collection.find_one({"tags": "tag2"}) == {
|
||||
"_id": s1.pk,
|
||||
"obs": "testing 123",
|
||||
"tags": ["tag1", "tag2"],
|
||||
"person": {"_id": p1.pk, "group": g1.pk},
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
self.assertEqual(SocialData.objects(person__group=g2).count(), 1)
|
||||
self.assertEqual(SocialData.objects(person__group=g2).first(), s2)
|
||||
assert SocialData.objects(person__group=g2).count() == 1
|
||||
assert SocialData.objects(person__group=g2).first() == s2
|
||||
|
||||
def test_cached_reference_field_push_with_fields(self):
|
||||
class Product(Document):
|
||||
@ -157,26 +172,20 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
product1 = Product(name="abc").save()
|
||||
product2 = Product(name="def").save()
|
||||
basket = Basket(products=[product1]).save()
|
||||
self.assertEqual(
|
||||
Basket.objects._collection.find_one(),
|
||||
{
|
||||
assert Basket.objects._collection.find_one() == {
|
||||
"_id": basket.pk,
|
||||
"products": [{"_id": product1.pk, "name": product1.name}],
|
||||
},
|
||||
)
|
||||
}
|
||||
# push to list
|
||||
basket.update(push__products=product2)
|
||||
basket.reload()
|
||||
self.assertEqual(
|
||||
Basket.objects._collection.find_one(),
|
||||
{
|
||||
assert Basket.objects._collection.find_one() == {
|
||||
"_id": basket.pk,
|
||||
"products": [
|
||||
{"_id": product1.pk, "name": product1.name},
|
||||
{"_id": product2.pk, "name": product2.name},
|
||||
],
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
def test_cached_reference_field_update_all(self):
|
||||
class Person(Document):
|
||||
@ -194,37 +203,31 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
a2.save()
|
||||
|
||||
a2 = Person.objects.with_id(a2.id)
|
||||
self.assertEqual(a2.father.tp, a1.tp)
|
||||
assert a2.father.tp == a1.tp
|
||||
|
||||
self.assertEqual(
|
||||
dict(a2.to_mongo()),
|
||||
{
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {"_id": a1.pk, "tp": u"pj"},
|
||||
},
|
||||
)
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pj"},
|
||||
}
|
||||
|
||||
self.assertEqual(Person.objects(father=a1)._query, {"father._id": a1.pk})
|
||||
self.assertEqual(Person.objects(father=a1).count(), 1)
|
||||
assert Person.objects(father=a1)._query == {"father._id": a1.pk}
|
||||
assert Person.objects(father=a1).count() == 1
|
||||
|
||||
Person.objects.update(set__tp="pf")
|
||||
Person.father.sync_all()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(
|
||||
dict(a2.to_mongo()),
|
||||
{
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {"_id": a1.pk, "tp": u"pf"},
|
||||
},
|
||||
)
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pf"},
|
||||
}
|
||||
|
||||
def test_cached_reference_fields_on_embedded_documents(self):
|
||||
with self.assertRaises(InvalidDocumentError):
|
||||
with pytest.raises(InvalidDocumentError):
|
||||
|
||||
class Test(Document):
|
||||
name = StringField()
|
||||
@ -255,15 +258,12 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
a1.save()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(
|
||||
dict(a2.to_mongo()),
|
||||
{
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pf"},
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
def test_cached_reference_auto_sync_disabled(self):
|
||||
class Persone(Document):
|
||||
@ -284,15 +284,12 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
a1.tp = "pf"
|
||||
a1.save()
|
||||
|
||||
self.assertEqual(
|
||||
Persone.objects._collection.find_one({"_id": a2.pk}),
|
||||
{
|
||||
assert Persone.objects._collection.find_one({"_id": a2.pk}) == {
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pj"},
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
def test_cached_reference_embedded_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
@ -320,28 +317,29 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(
|
||||
dict(a.to_mongo(fields=["tag", "owner.tp"])),
|
||||
{"_id": a.pk, "tag": "heavy", "owner": {"t": "u"}},
|
||||
)
|
||||
self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy")
|
||||
self.assertEqual(o.to_mongo()["animal"]["owner"]["t"], "u")
|
||||
assert dict(a.to_mongo(fields=["tag", "owner.tp"])) == {
|
||||
"_id": a.pk,
|
||||
"tag": "heavy",
|
||||
"owner": {"t": "u"},
|
||||
}
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
assert o.to_mongo()["animal"]["owner"]["t"] == "u"
|
||||
|
||||
# Check to_mongo with fields
|
||||
self.assertNotIn("animal", o.to_mongo(fields=["person"]))
|
||||
assert "animal" not in o.to_mongo(fields=["person"])
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count()
|
||||
self.assertEqual(count, 1)
|
||||
assert count == 1
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag="heavy", animal__owner__tp="u"
|
||||
).first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
assert ocorrence.person == "teste"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_cached_reference_embedded_list_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
@ -370,13 +368,14 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
o = Ocorrence(person="teste 2", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(
|
||||
dict(a.to_mongo(fields=["tag", "owner.tags"])),
|
||||
{"_id": a.pk, "tag": "heavy", "owner": {"tags": ["cool", "funny"]}},
|
||||
)
|
||||
assert dict(a.to_mongo(fields=["tag", "owner.tags"])) == {
|
||||
"_id": a.pk,
|
||||
"tag": "heavy",
|
||||
"owner": {"tags": ["cool", "funny"]},
|
||||
}
|
||||
|
||||
self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy")
|
||||
self.assertEqual(o.to_mongo()["animal"]["owner"]["tags"], ["cool", "funny"])
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
assert o.to_mongo()["animal"]["owner"]["tags"] == ["cool", "funny"]
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
@ -385,10 +384,10 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
query = Ocorrence.objects(
|
||||
animal__tag="heavy", animal__owner__tags="cool"
|
||||
)._query
|
||||
self.assertEqual(query, {"animal.owner.tags": "cool", "animal.tag": "heavy"})
|
||||
assert query == {"animal.owner.tags": "cool", "animal.tag": "heavy"}
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag="heavy", animal__owner__tags="cool"
|
||||
).first()
|
||||
self.assertEqual(ocorrence.person, "teste 2")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
assert ocorrence.person == "teste 2"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
|
@ -1,11 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import math
|
||||
import itertools
|
||||
import math
|
||||
import re
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -28,7 +28,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond - with
|
||||
# default datetimefields
|
||||
@ -36,7 +36,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
|
||||
# Pre UTC dates microseconds below 1000 are dropped - with default
|
||||
# datetimefields
|
||||
@ -44,7 +44,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
|
||||
# Pre UTC microseconds above 1000 is wonky - with default datetimefields
|
||||
# log.date has an invalid microsecond value so I can't construct
|
||||
@ -54,18 +54,18 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
# Test string padding
|
||||
microsecond = map(int, [math.pow(10, x) for x in range(6)])
|
||||
microsecond = map(int, (math.pow(10, x) for x in range(6)))
|
||||
mm = dd = hh = ii = ss = [1, 10]
|
||||
|
||||
for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond):
|
||||
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"]
|
||||
self.assertTrue(
|
||||
re.match("^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored)
|
||||
assert (
|
||||
re.match(r"^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored)
|
||||
is not None
|
||||
)
|
||||
|
||||
@ -73,8 +73,8 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[
|
||||
"date_with_dots"
|
||||
]
|
||||
self.assertTrue(
|
||||
re.match("^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None
|
||||
assert (
|
||||
re.match(r"^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None
|
||||
)
|
||||
|
||||
def test_complexdatetime_usage(self):
|
||||
@ -93,40 +93,40 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.save()
|
||||
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
# create extra 59 log entries for a total of 60
|
||||
for i in range(1951, 2010):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1, 999)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 60)
|
||||
assert LogEntry.objects.count() == 60
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
assert logs.count() == 30
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
assert logs.count() == 30
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2011, 1, 1),
|
||||
date__gte=datetime.datetime(2000, 1, 1),
|
||||
)
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
@ -137,17 +137,17 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
logs = list(LogEntry.objects.order_by("date"))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
self.assertTrue(log.date < next_log.date)
|
||||
assert log.date < next_log.date
|
||||
|
||||
logs = list(LogEntry.objects.order_by("-date"))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
self.assertTrue(log.date > next_log.date)
|
||||
assert log.date > next_log.date
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)
|
||||
)
|
||||
self.assertEqual(logs.count(), 4)
|
||||
assert logs.count() == 4
|
||||
|
||||
def test_no_default_value(self):
|
||||
class Log(Document):
|
||||
@ -156,11 +156,11 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertIsNone(log.timestamp)
|
||||
assert log.timestamp is None
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertIsNone(fetched_log.timestamp)
|
||||
assert fetched_log.timestamp is None
|
||||
|
||||
def test_default_static_value(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
@ -171,11 +171,11 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertEqual(log.timestamp, NOW)
|
||||
assert log.timestamp == NOW
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertEqual(fetched_log.timestamp, NOW)
|
||||
assert fetched_log.timestamp == NOW
|
||||
|
||||
def test_default_callable(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
@ -186,8 +186,23 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertGreaterEqual(log.timestamp, NOW)
|
||||
assert log.timestamp >= NOW
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertGreaterEqual(fetched_log.timestamp, NOW)
|
||||
assert fetched_log.timestamp >= NOW
|
||||
|
||||
def test_setting_bad_value_does_not_raise_unless_validate_is_called(self):
|
||||
# test regression of #2253
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField()
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log(timestamp="garbage")
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
log.save()
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import six
|
||||
|
||||
import pytest
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
@ -8,7 +8,6 @@ except ImportError:
|
||||
dateutil = None
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -23,7 +22,8 @@ class TestDateField(MongoDBTestCase):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt="")
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_date_from_whitespace_string(self):
|
||||
"""
|
||||
@ -35,7 +35,8 @@ class TestDateField(MongoDBTestCase):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt=" ")
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_default_values_today(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
@ -47,9 +48,9 @@ class TestDateField(MongoDBTestCase):
|
||||
|
||||
person = Person()
|
||||
person.validate()
|
||||
self.assertEqual(person.day, person.day)
|
||||
self.assertEqual(person.day, datetime.date.today())
|
||||
self.assertEqual(person._data["day"], person.day)
|
||||
assert person.day == person.day
|
||||
assert person.day == datetime.date.today()
|
||||
assert person._data["day"] == person.day
|
||||
|
||||
def test_date(self):
|
||||
"""Tests showing pymongo date fields
|
||||
@ -67,7 +68,7 @@ class TestDateField(MongoDBTestCase):
|
||||
log.date = datetime.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, datetime.date.today())
|
||||
assert log.date == datetime.date.today()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
||||
@ -75,27 +76,16 @@ class TestDateField(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
assert log.date == d1.date()
|
||||
assert log.date == d2.date()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = datetime.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
assert log.date == d1.date()
|
||||
assert log.date == d2.date()
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
@ -113,35 +103,35 @@ class TestDateField(MongoDBTestCase):
|
||||
|
||||
for query in (d1, d1.isoformat(" ")):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 20)
|
||||
assert LogEntry.objects.count() == 20
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
@ -166,6 +156,8 @@ class TestDateField(MongoDBTestCase):
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "ABC"
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime as dt
|
||||
import six
|
||||
|
||||
import pytest
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
@ -9,7 +9,6 @@ except ImportError:
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine import connection
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -24,7 +23,8 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt="")
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_datetime_from_whitespace_string(self):
|
||||
"""
|
||||
@ -36,7 +36,8 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt=" ")
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_default_value_utcnow(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
@ -50,11 +51,9 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
person = Person()
|
||||
person.validate()
|
||||
person_created_t0 = person.created
|
||||
self.assertLess(person.created - utcnow, dt.timedelta(seconds=1))
|
||||
self.assertEqual(
|
||||
person_created_t0, person.created
|
||||
) # make sure it does not change
|
||||
self.assertEqual(person._data["created"], person.created)
|
||||
assert person.created - utcnow < dt.timedelta(seconds=1)
|
||||
assert person_created_t0 == person.created # make sure it does not change
|
||||
assert person._data["created"] == person.created
|
||||
|
||||
def test_handling_microseconds(self):
|
||||
"""Tests showing pymongo datetime fields handling of microseconds.
|
||||
@ -74,7 +73,7 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.date = dt.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date.date(), dt.date.today())
|
||||
assert log.date.date() == dt.date.today()
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
||||
# dropped
|
||||
@ -84,8 +83,8 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
assert log.date != d1
|
||||
assert log.date == d2
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond
|
||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
@ -93,19 +92,8 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = dt.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
assert log.date != d1
|
||||
assert log.date == d2
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
@ -123,43 +111,43 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
|
||||
for query in (d1, d1.isoformat(" ")):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = dt.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 20)
|
||||
assert LogEntry.objects.count() == 20
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=dt.datetime(1980, 1, 1), date__gte=dt.datetime(1975, 1, 1)
|
||||
)
|
||||
self.assertEqual(logs.count(), 5)
|
||||
assert logs.count() == 5
|
||||
|
||||
def test_datetime_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
@ -187,15 +175,20 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "ABC"
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:GARBAGE:12"
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:42:57.GARBAGE"
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:42:57.123.456"
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
||||
def test_parse_datetime_as_str(self):
|
||||
class DTDoc(Document):
|
||||
@ -206,15 +199,16 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
# make sure that passing a parsable datetime works
|
||||
dtd = DTDoc()
|
||||
dtd.date = date_str
|
||||
self.assertIsInstance(dtd.date, six.string_types)
|
||||
assert isinstance(dtd.date, str)
|
||||
dtd.save()
|
||||
dtd.reload()
|
||||
|
||||
self.assertIsInstance(dtd.date, dt.datetime)
|
||||
self.assertEqual(str(dtd.date), date_str)
|
||||
assert isinstance(dtd.date, dt.datetime)
|
||||
assert str(dtd.date) == date_str
|
||||
|
||||
dtd.date = "January 1st, 9999999999"
|
||||
self.assertRaises(ValidationError, dtd.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
dtd.validate()
|
||||
|
||||
|
||||
class TestDateTimeTzAware(MongoDBTestCase):
|
||||
@ -235,4 +229,4 @@ class TestDateTimeTzAware(MongoDBTestCase):
|
||||
|
||||
log = LogEntry.objects.first()
|
||||
log.time = dt.datetime(2013, 1, 1, 0, 0, 0)
|
||||
self.assertEqual(["time"], log._changed_fields)
|
||||
assert ["time"] == log._changed_fields
|
||||
|
@ -1,56 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import DecimalField, Document, ValidationError
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDecimalField(MongoDBTestCase):
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to decimal fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5"))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(height=Decimal("1.89")).save()
|
||||
person = Person.objects.first()
|
||||
self.assertEqual(person.height, Decimal("1.89"))
|
||||
|
||||
person.height = "2.0"
|
||||
person.save()
|
||||
person.height = 0.01
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = Decimal("0.01")
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = Decimal("4.0")
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = "something invalid"
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person_2 = Person(height="something invalid")
|
||||
self.assertRaises(ValidationError, person_2.validate)
|
||||
|
||||
def test_comparison(self):
|
||||
class Person(Document):
|
||||
money = DecimalField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(money=6).save()
|
||||
Person(money=7).save()
|
||||
Person(money=8).save()
|
||||
Person(money=10).save()
|
||||
|
||||
self.assertEqual(2, Person.objects(money__gt=Decimal("7")).count())
|
||||
self.assertEqual(2, Person.objects(money__gt=7).count())
|
||||
self.assertEqual(2, Person.objects(money__gt="7").count())
|
||||
|
||||
self.assertEqual(3, Person.objects(money__gte="7").count())
|
||||
|
||||
def test_storage(self):
|
||||
class Person(Document):
|
||||
float_value = DecimalField(precision=4)
|
||||
@ -87,7 +43,7 @@ class TestDecimalField(MongoDBTestCase):
|
||||
]
|
||||
expected.extend(expected)
|
||||
actual = list(Person.objects.exclude("id").as_pymongo())
|
||||
self.assertEqual(expected, actual)
|
||||
assert expected == actual
|
||||
|
||||
# How it comes out locally
|
||||
expected = [
|
||||
@ -101,4 +57,84 @@ class TestDecimalField(MongoDBTestCase):
|
||||
expected.extend(expected)
|
||||
for field_name in ["float_value", "string_value"]:
|
||||
actual = list(Person.objects().scalar(field_name))
|
||||
self.assertEqual(expected, actual)
|
||||
assert expected == actual
|
||||
|
||||
def test_save_none(self):
|
||||
class Person(Document):
|
||||
value = DecimalField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(value=None)
|
||||
assert person.value is None
|
||||
person.save()
|
||||
fetched_person = Person.objects.first()
|
||||
fetched_person.value is None
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to decimal fields."""
|
||||
|
||||
class Person(Document):
|
||||
height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5"))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(height=Decimal("1.89")).save()
|
||||
person = Person.objects.first()
|
||||
assert person.height == Decimal("1.89")
|
||||
|
||||
person.height = "2.0"
|
||||
person.save()
|
||||
person.height = 0.01
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = Decimal("0.01")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = Decimal("4.0")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = "something invalid"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person_2 = Person(height="something invalid")
|
||||
with pytest.raises(ValidationError):
|
||||
person_2.validate()
|
||||
|
||||
def test_comparison(self):
|
||||
class Person(Document):
|
||||
money = DecimalField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(money=6).save()
|
||||
Person(money=7).save()
|
||||
Person(money=8).save()
|
||||
Person(money=10).save()
|
||||
|
||||
assert 2 == Person.objects(money__gt=Decimal("7")).count()
|
||||
assert 2 == Person.objects(money__gt=7).count()
|
||||
assert 2 == Person.objects(money__gt="7").count()
|
||||
|
||||
assert 3 == Person.objects(money__gte="7").count()
|
||||
|
||||
def test_precision_0(self):
|
||||
"""prevent regression of a bug that was raising an exception when using precision=0"""
|
||||
|
||||
class TestDoc(Document):
|
||||
d = DecimalField(precision=0)
|
||||
|
||||
TestDoc.drop_collection()
|
||||
|
||||
td = TestDoc(d=Decimal("12.00032678131263"))
|
||||
assert td.d == Decimal("12")
|
||||
|
||||
def test_precision_negative_raise(self):
|
||||
"""prevent regression of a bug that was raising an exception when using precision=0"""
|
||||
with pytest.raises(
|
||||
ValidationError, match="precision must be a positive integer"
|
||||
):
|
||||
|
||||
class TestDoc(Document):
|
||||
dneg = DecimalField(precision=-1)
|
||||
|
@ -1,7 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
from bson import InvalidDocument
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import BaseDict
|
||||
|
||||
from mongoengine.mongodb_support import (
|
||||
MONGODB_36,
|
||||
get_mongodb_version,
|
||||
)
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
@ -14,7 +19,57 @@ class TestDictField(MongoDBTestCase):
|
||||
|
||||
info = {"testkey": "testvalue"}
|
||||
post = BlogPost(info=info).save()
|
||||
self.assertEqual(get_as_pymongo(post), {"_id": post.id, "info": info})
|
||||
assert get_as_pymongo(post) == {"_id": post.id, "info": info}
|
||||
|
||||
def test_validate_invalid_type(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
invalid_infos = ["my post", ["test", "test"], {1: "test"}]
|
||||
for invalid_info in invalid_infos:
|
||||
with pytest.raises(ValidationError):
|
||||
BlogPost(info=invalid_info).validate()
|
||||
|
||||
def test_keys_with_dots_or_dollars(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost()
|
||||
|
||||
post.info = {"$title": "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {"nested": {"$title": "test"}}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {"$title.test": "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {"nested": {"the.title": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
# MongoDB < 3.6 rejects dots
|
||||
# To avoid checking the mongodb version from the DictField class
|
||||
# we rely on MongoDB to reject the data during the save
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
post.info = {"dollar_and_dot": {"te$st.test": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
@ -24,29 +79,7 @@ class TestDictField(MongoDBTestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = "my post"
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = ["test", "test"]
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {"$title": "test"}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {"nested": {"$title": "test"}}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {"the.title": "test"}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {"nested": {"the.title": "test"}}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {1: "test"}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {"title": "test"}
|
||||
post = BlogPost(info={"title": "test"})
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
@ -61,33 +94,27 @@ class TestDictField(MongoDBTestCase):
|
||||
post.info = {"details": {"test": 3}}
|
||||
post.save()
|
||||
|
||||
self.assertEqual(BlogPost.objects.count(), 4)
|
||||
self.assertEqual(BlogPost.objects.filter(info__title__exact="test").count(), 1)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact="test").count(), 1
|
||||
)
|
||||
assert BlogPost.objects.count() == 4
|
||||
assert BlogPost.objects.filter(info__title__exact="test").count() == 1
|
||||
assert BlogPost.objects.filter(info__details__test__exact="test").count() == 1
|
||||
|
||||
post = BlogPost.objects.filter(info__title__exact="dollar_sign").first()
|
||||
self.assertIn("te$t", post["info"]["details"])
|
||||
assert "te$t" in post["info"]["details"]
|
||||
|
||||
# Confirm handles non strings or non existing keys
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact=5).count(), 0
|
||||
)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__made_up__test__exact="test").count(), 0
|
||||
)
|
||||
assert BlogPost.objects.filter(info__details__test__exact=5).count() == 0
|
||||
assert BlogPost.objects.filter(info__made_up__test__exact="test").count() == 0
|
||||
|
||||
post = BlogPost.objects.create(info={"title": "original"})
|
||||
post.info.update({"title": "updated"})
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEqual("updated", post.info["title"])
|
||||
assert "updated" == post.info["title"]
|
||||
|
||||
post.info.setdefault("authors", [])
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEqual([], post.info["authors"])
|
||||
assert post.info["authors"] == []
|
||||
|
||||
def test_dictfield_dump_document(self):
|
||||
"""Ensure a DictField can handle another document's dump."""
|
||||
@ -114,10 +141,8 @@ class TestDictField(MongoDBTestCase):
|
||||
).save()
|
||||
doc = Doc(field=to_embed.to_mongo().to_dict())
|
||||
doc.save()
|
||||
self.assertIsInstance(doc.field, dict)
|
||||
self.assertEqual(
|
||||
doc.field, {"_id": 2, "recursive": {"_id": 1, "recursive": {}}}
|
||||
)
|
||||
assert isinstance(doc.field, dict)
|
||||
assert doc.field == {"_id": 2, "recursive": {"_id": 1, "recursive": {}}}
|
||||
# Same thing with a Document with a _cls field
|
||||
to_embed_recursive = ToEmbedChild(id=1).save()
|
||||
to_embed_child = ToEmbedChild(
|
||||
@ -125,7 +150,7 @@ class TestDictField(MongoDBTestCase):
|
||||
).save()
|
||||
doc = Doc(field=to_embed_child.to_mongo().to_dict())
|
||||
doc.save()
|
||||
self.assertIsInstance(doc.field, dict)
|
||||
assert isinstance(doc.field, dict)
|
||||
expected = {
|
||||
"_id": 2,
|
||||
"_cls": "ToEmbedParent.ToEmbedChild",
|
||||
@ -135,7 +160,7 @@ class TestDictField(MongoDBTestCase):
|
||||
"recursive": {},
|
||||
},
|
||||
}
|
||||
self.assertEqual(doc.field, expected)
|
||||
assert doc.field == expected
|
||||
|
||||
def test_dictfield_strict(self):
|
||||
"""Ensure that dict field handles validation if provided a strict field type."""
|
||||
@ -150,7 +175,7 @@ class TestDictField(MongoDBTestCase):
|
||||
e.save()
|
||||
|
||||
# try creating an invalid mapping
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
e.mapping["somestring"] = "abc"
|
||||
e.save()
|
||||
|
||||
@ -184,22 +209,21 @@ class TestDictField(MongoDBTestCase):
|
||||
e.save()
|
||||
|
||||
e2 = Simple.objects.get(id=e.id)
|
||||
self.assertIsInstance(e2.mapping["somestring"], StringSetting)
|
||||
self.assertIsInstance(e2.mapping["someint"], IntegerSetting)
|
||||
assert isinstance(e2.mapping["somestring"], StringSetting)
|
||||
assert isinstance(e2.mapping["someint"], IntegerSetting)
|
||||
|
||||
# Test querying
|
||||
self.assertEqual(Simple.objects.filter(mapping__someint__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__number=1).count(), 1
|
||||
assert Simple.objects.filter(mapping__someint__value=42).count() == 1
|
||||
assert Simple.objects.filter(mapping__nested_dict__number=1).count() == 1
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__complex__value=42).count() == 1
|
||||
)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__0__value=42).count() == 1
|
||||
)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1
|
||||
)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count(), 1
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count()
|
||||
== 1
|
||||
)
|
||||
|
||||
# Confirm can update
|
||||
@ -207,11 +231,13 @@ class TestDictField(MongoDBTestCase):
|
||||
Simple.objects().update(
|
||||
set__mapping__nested_dict__list__1=StringSetting(value="Boo")
|
||||
)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count(), 0
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count()
|
||||
== 0
|
||||
)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count(), 1
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count()
|
||||
== 1
|
||||
)
|
||||
|
||||
def test_push_dict(self):
|
||||
@ -221,12 +247,12 @@ class TestDictField(MongoDBTestCase):
|
||||
doc = MyModel(events=[{"a": 1}]).save()
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}]}
|
||||
self.assertEqual(raw_doc, expected_raw_doc)
|
||||
assert raw_doc == expected_raw_doc
|
||||
|
||||
MyModel.objects(id=doc.id).update(push__events={})
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}, {}]}
|
||||
self.assertEqual(raw_doc, expected_raw_doc)
|
||||
assert raw_doc == expected_raw_doc
|
||||
|
||||
def test_ensure_unique_default_instances(self):
|
||||
"""Ensure that every field has it's own unique default instance."""
|
||||
@ -239,8 +265,8 @@ class TestDictField(MongoDBTestCase):
|
||||
d1.data["foo"] = "bar"
|
||||
d1.data2["foo"] = "bar"
|
||||
d2 = D()
|
||||
self.assertEqual(d2.data, {})
|
||||
self.assertEqual(d2.data2, {})
|
||||
assert d2.data == {}
|
||||
assert d2.data2 == {}
|
||||
|
||||
def test_dict_field_invalid_dict_value(self):
|
||||
class DictFieldTest(Document):
|
||||
@ -250,11 +276,13 @@ class TestDictField(MongoDBTestCase):
|
||||
|
||||
test = DictFieldTest(dictionary=None)
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
test.validate()
|
||||
|
||||
test = DictFieldTest(dictionary=False)
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
test.validate()
|
||||
|
||||
def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self):
|
||||
class DictFieldTest(Document):
|
||||
@ -267,12 +295,12 @@ class TestDictField(MongoDBTestCase):
|
||||
|
||||
embed = Embedded(name="garbage")
|
||||
doc = DictFieldTest(dictionary=embed)
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
doc.validate()
|
||||
self.assertIn("'dictionary'", str(ctx_err.exception))
|
||||
self.assertIn(
|
||||
"Only dictionaries may be used in a DictField", str(ctx_err.exception)
|
||||
)
|
||||
|
||||
error_msg = str(exc_info.value)
|
||||
assert "'dictionary'" in error_msg
|
||||
assert "Only dictionaries may be used in a DictField" in error_msg
|
||||
|
||||
def test_atomic_update_dict_field(self):
|
||||
"""Ensure that the entire DictField can be atomically updated."""
|
||||
@ -287,11 +315,11 @@ class TestDictField(MongoDBTestCase):
|
||||
e.save()
|
||||
e.update(set__mapping={"ints": [3, 4]})
|
||||
e.reload()
|
||||
self.assertEqual(BaseDict, type(e.mapping))
|
||||
self.assertEqual({"ints": [3, 4]}, e.mapping)
|
||||
assert isinstance(e.mapping, BaseDict)
|
||||
assert {"ints": [3, 4]} == e.mapping
|
||||
|
||||
# try creating an invalid mapping
|
||||
with self.assertRaises(ValueError):
|
||||
with pytest.raises(ValueError):
|
||||
e.update(set__mapping={"somestrings": ["foo", "bar"]})
|
||||
|
||||
def test_dictfield_with_referencefield_complex_nesting_cases(self):
|
||||
@ -329,13 +357,13 @@ class TestDictField(MongoDBTestCase):
|
||||
e.save()
|
||||
|
||||
s = Simple.objects.first()
|
||||
self.assertIsInstance(s.mapping0["someint"], Doc)
|
||||
self.assertIsInstance(s.mapping1["someint"], Doc)
|
||||
self.assertIsInstance(s.mapping2["someint"][0], Doc)
|
||||
self.assertIsInstance(s.mapping3["someint"][0], Doc)
|
||||
self.assertIsInstance(s.mapping4["someint"]["d"], Doc)
|
||||
self.assertIsInstance(s.mapping5["someint"]["d"], Doc)
|
||||
self.assertIsInstance(s.mapping6["someint"][0]["d"], Doc)
|
||||
self.assertIsInstance(s.mapping7["someint"][0]["d"], Doc)
|
||||
self.assertIsInstance(s.mapping8["someint"][0]["d"][0], Doc)
|
||||
self.assertIsInstance(s.mapping9["someint"][0]["d"][0], Doc)
|
||||
assert isinstance(s.mapping0["someint"], Doc)
|
||||
assert isinstance(s.mapping1["someint"], Doc)
|
||||
assert isinstance(s.mapping2["someint"][0], Doc)
|
||||
assert isinstance(s.mapping3["someint"][0], Doc)
|
||||
assert isinstance(s.mapping4["someint"]["d"], Doc)
|
||||
assert isinstance(s.mapping5["someint"]["d"], Doc)
|
||||
assert isinstance(s.mapping6["someint"][0]["d"], Doc)
|
||||
assert isinstance(s.mapping7["someint"][0]["d"], Doc)
|
||||
assert isinstance(s.mapping8["someint"][0]["d"][0], Doc)
|
||||
assert isinstance(s.mapping9["someint"][0]["d"][0], Doc)
|
||||
|
@ -1,9 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
from unittest import SkipTest
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import Document, EmailField, ValidationError
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -27,38 +24,37 @@ class TestEmailField(MongoDBTestCase):
|
||||
user.validate()
|
||||
|
||||
user = User(email="ross@example.com.")
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# unicode domain
|
||||
user = User(email=u"user@пример.рф")
|
||||
user = User(email="user@пример.рф")
|
||||
user.validate()
|
||||
|
||||
# invalid unicode domain
|
||||
user = User(email=u"user@пример")
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email="user@пример")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# invalid data type
|
||||
user = User(email=123)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
def test_email_field_unicode_user(self):
|
||||
# Don't run this test on pypy3, which doesn't support unicode regex:
|
||||
# https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
raise SkipTest("unicode email addresses are not supported on PyPy 3")
|
||||
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
# unicode user shouldn't validate by default...
|
||||
user = User(email=u"Dörte@Sörensen.example.com")
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email="Dörte@Sörensen.example.com")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine with allow_utf8_user set to True
|
||||
class User(Document):
|
||||
email = EmailField(allow_utf8_user=True)
|
||||
|
||||
user = User(email=u"Dörte@Sörensen.example.com")
|
||||
user = User(email="Dörte@Sörensen.example.com")
|
||||
user.validate()
|
||||
|
||||
def test_email_field_domain_whitelist(self):
|
||||
@ -67,7 +63,8 @@ class TestEmailField(MongoDBTestCase):
|
||||
|
||||
# localhost domain shouldn't validate by default...
|
||||
user = User(email="me@localhost")
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine if it's whitelisted
|
||||
class User(Document):
|
||||
@ -82,9 +79,10 @@ class TestEmailField(MongoDBTestCase):
|
||||
|
||||
invalid_idn = ".google.com"
|
||||
user = User(email="me@%s" % invalid_idn)
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
user.validate()
|
||||
self.assertIn("domain failed IDN encoding", str(ctx_err.exception))
|
||||
assert "domain failed IDN encoding" in str(exc_info.value)
|
||||
|
||||
def test_email_field_ip_domain(self):
|
||||
class User(Document):
|
||||
@ -96,13 +94,16 @@ class TestEmailField(MongoDBTestCase):
|
||||
|
||||
# IP address as a domain shouldn't validate by default...
|
||||
user = User(email=valid_ipv4)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
user = User(email=valid_ipv6)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
user = User(email=invalid_ip)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine with allow_ip_domain set to True
|
||||
class User(Document):
|
||||
@ -116,7 +117,8 @@ class TestEmailField(MongoDBTestCase):
|
||||
|
||||
# invalid IP should still fail validation
|
||||
user = User(email=invalid_ip)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
def test_email_field_honors_regex(self):
|
||||
class User(Document):
|
||||
@ -124,8 +126,9 @@ class TestEmailField(MongoDBTestCase):
|
||||
|
||||
# Fails regex validation
|
||||
user = User(email="me@foo.com")
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# Passes regex validation
|
||||
user = User(email="me@example.com")
|
||||
self.assertIsNone(user.validate())
|
||||
assert user.validate() is None
|
||||
|
@ -1,19 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from copy import deepcopy
|
||||
|
||||
import pytest
|
||||
from bson import ObjectId
|
||||
|
||||
from mongoengine import (
|
||||
Document,
|
||||
StringField,
|
||||
ValidationError,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
InvalidQueryError,
|
||||
LookUpError,
|
||||
IntField,
|
||||
GenericEmbeddedDocumentField,
|
||||
IntField,
|
||||
InvalidQueryError,
|
||||
ListField,
|
||||
EmbeddedDocumentListField,
|
||||
ReferenceField,
|
||||
LookUpError,
|
||||
MapField,
|
||||
StringField,
|
||||
ValidationError,
|
||||
)
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -23,13 +25,13 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
name = StringField()
|
||||
|
||||
field = EmbeddedDocumentField(MyDoc)
|
||||
self.assertEqual(field.document_type_obj, MyDoc)
|
||||
assert field.document_type_obj == MyDoc
|
||||
|
||||
field2 = EmbeddedDocumentField("MyDoc")
|
||||
self.assertEqual(field2.document_type_obj, "MyDoc")
|
||||
assert field2.document_type_obj == "MyDoc"
|
||||
|
||||
def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self):
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
EmbeddedDocumentField(dict)
|
||||
|
||||
def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self):
|
||||
@ -37,11 +39,11 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
name = StringField()
|
||||
|
||||
emb = EmbeddedDocumentField("MyDoc")
|
||||
with self.assertRaises(ValidationError) as ctx:
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
emb.document_type
|
||||
self.assertIn(
|
||||
"Invalid embedded document class provided to an EmbeddedDocumentField",
|
||||
str(ctx.exception),
|
||||
assert (
|
||||
"Invalid embedded document class provided to an EmbeddedDocumentField"
|
||||
in str(exc_info.value)
|
||||
)
|
||||
|
||||
def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self):
|
||||
@ -49,12 +51,12 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
class MyDoc(Document):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
|
||||
class MyFailingDoc(Document):
|
||||
emb = EmbeddedDocumentField(MyDoc)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
|
||||
class MyFailingdoc2(Document):
|
||||
emb = EmbeddedDocumentField("MyDoc")
|
||||
@ -73,24 +75,24 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
p = Person(settings=AdminSettings(foo1="bar1", foo2="bar2"), name="John").save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
||||
|
||||
with self.assertRaises(LookUpError):
|
||||
with pytest.raises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__foo1="bar1").first().id, p.id)
|
||||
assert Person.objects(settings__foo1="bar1").first().id == p.id
|
||||
only_p = Person.objects.only("settings.foo1").first()
|
||||
self.assertEqual(only_p.settings.foo1, p.settings.foo1)
|
||||
self.assertIsNone(only_p.settings.foo2)
|
||||
self.assertIsNone(only_p.name)
|
||||
assert only_p.settings.foo1 == p.settings.foo1
|
||||
assert only_p.settings.foo2 is None
|
||||
assert only_p.name is None
|
||||
|
||||
exclude_p = Person.objects.exclude("settings.foo1").first()
|
||||
self.assertIsNone(exclude_p.settings.foo1)
|
||||
self.assertEqual(exclude_p.settings.foo2, p.settings.foo2)
|
||||
self.assertEqual(exclude_p.name, p.name)
|
||||
assert exclude_p.settings.foo1 is None
|
||||
assert exclude_p.settings.foo2 == p.settings.foo2
|
||||
assert exclude_p.name == p.name
|
||||
|
||||
def test_query_embedded_document_attribute_with_inheritance(self):
|
||||
class BaseSettings(EmbeddedDocument):
|
||||
@ -109,17 +111,17 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
self.assertEqual(Person.objects(settings__notexist="bar").first().id, p.id)
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__base_foo="basefoo").first().id, p.id)
|
||||
self.assertEqual(Person.objects(settings__sub_foo="subfoo").first().id, p.id)
|
||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||
assert Person.objects(settings__sub_foo="subfoo").first().id == p.id
|
||||
|
||||
only_p = Person.objects.only("settings.base_foo", "settings._cls").first()
|
||||
self.assertEqual(only_p.settings.base_foo, "basefoo")
|
||||
self.assertIsNone(only_p.settings.sub_foo)
|
||||
assert only_p.settings.base_foo == "basefoo"
|
||||
assert only_p.settings.sub_foo is None
|
||||
|
||||
def test_query_list_embedded_document_with_inheritance(self):
|
||||
class Post(EmbeddedDocument):
|
||||
@ -139,14 +141,14 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
record_text = Record(posts=[TextPost(content="a", title="foo")]).save()
|
||||
|
||||
records = list(Record.objects(posts__author=record_movie.posts[0].author))
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(records[0].id, record_movie.id)
|
||||
assert len(records) == 1
|
||||
assert records[0].id == record_movie.id
|
||||
|
||||
records = list(Record.objects(posts__content=record_text.posts[0].content))
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(records[0].id, record_text.id)
|
||||
assert len(records) == 1
|
||||
assert records[0].id == record_text.id
|
||||
|
||||
self.assertEqual(Record.objects(posts__title="foo").count(), 2)
|
||||
assert Record.objects(posts__title="foo").count() == 2
|
||||
|
||||
|
||||
class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
@ -169,13 +171,13 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Car)
|
||||
assert isinstance(person.like, Car)
|
||||
|
||||
person.like = Dish(food="arroz", number=15)
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Dish)
|
||||
assert isinstance(person.like, Dish)
|
||||
|
||||
def test_generic_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices."""
|
||||
@ -195,13 +197,14 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
person = Person(name="Test User")
|
||||
person.like = Car(name="Fiat")
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.like = Dish(food="arroz", number=15)
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Dish)
|
||||
assert isinstance(person.like, Dish)
|
||||
|
||||
def test_generic_list_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices inside
|
||||
@ -223,13 +226,14 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
person = Person(name="Test User")
|
||||
person.likes = [Car(name="Fiat")]
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.likes = [Dish(food="arroz", number=15)]
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.likes[0], Dish)
|
||||
assert isinstance(person.likes[0], Dish)
|
||||
|
||||
def test_choices_validation_documents(self):
|
||||
"""
|
||||
@ -265,7 +269,8 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
# Single Entry Failure
|
||||
post = BlogPost(comments=[ModeratorComments(author="mod1", message="message1")])
|
||||
self.assertRaises(ValidationError, post.save)
|
||||
with pytest.raises(ValidationError):
|
||||
post.save()
|
||||
|
||||
# Mixed Entry Failure
|
||||
post = BlogPost(
|
||||
@ -274,7 +279,8 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
UserComments(author="user2", message="message2"),
|
||||
]
|
||||
)
|
||||
self.assertRaises(ValidationError, post.save)
|
||||
with pytest.raises(ValidationError):
|
||||
post.save()
|
||||
|
||||
def test_choices_validation_documents_inheritance(self):
|
||||
"""
|
||||
@ -313,16 +319,16 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
p2 = Person(settings=NonAdminSettings(foo2="bar2")).save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
||||
|
||||
with self.assertRaises(LookUpError):
|
||||
with pytest.raises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__foo1="bar1").first().id, p1.id)
|
||||
self.assertEqual(Person.objects(settings__foo2="bar2").first().id, p2.id)
|
||||
assert Person.objects(settings__foo1="bar1").first().id == p1.id
|
||||
assert Person.objects(settings__foo2="bar2").first().id == p2.id
|
||||
|
||||
def test_query_generic_embedded_document_attribute_with_inheritance(self):
|
||||
class BaseSettings(EmbeddedDocument):
|
||||
@ -341,10 +347,37 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
self.assertEqual(Person.objects(settings__notexist="bar").first().id, p.id)
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__base_foo="basefoo").first().id, p.id)
|
||||
self.assertEqual(Person.objects(settings__sub_foo="subfoo").first().id, p.id)
|
||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||
assert Person.objects(settings__sub_foo="subfoo").first().id == p.id
|
||||
|
||||
def test_deepcopy_set__instance(self):
|
||||
"""Ensure that the _instance attribute on EmbeddedDocument exists after a deepcopy"""
|
||||
|
||||
class Wallet(EmbeddedDocument):
|
||||
money = IntField()
|
||||
|
||||
class Person(Document):
|
||||
wallet = EmbeddedDocumentField(Wallet)
|
||||
wallet_map = MapField(EmbeddedDocumentField(Wallet))
|
||||
|
||||
# Test on fresh EmbeddedDoc
|
||||
emb_doc = Wallet(money=1)
|
||||
assert emb_doc._instance is None
|
||||
copied_emb_doc = deepcopy(emb_doc)
|
||||
assert copied_emb_doc._instance is None
|
||||
|
||||
# Test on attached EmbeddedDoc
|
||||
doc = Person(
|
||||
id=ObjectId(), wallet=Wallet(money=2), wallet_map={"test": Wallet(money=2)}
|
||||
)
|
||||
assert doc.wallet._instance == doc
|
||||
copied_emb_doc = deepcopy(doc.wallet)
|
||||
assert copied_emb_doc._instance is None
|
||||
|
||||
copied_map_emb_doc = deepcopy(doc.wallet_map)
|
||||
assert copied_map_emb_doc["test"]._instance is None
|
||||
|
145
tests/fields/test_enum_field.py
Normal file
145
tests/fields/test_enum_field.py
Normal file
@ -0,0 +1,145 @@
|
||||
from enum import Enum
|
||||
|
||||
import pytest
|
||||
from bson import InvalidDocument
|
||||
|
||||
from mongoengine import Document, EnumField, ValidationError
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class Status(Enum):
|
||||
NEW = "new"
|
||||
DONE = "done"
|
||||
|
||||
|
||||
class Color(Enum):
|
||||
RED = 1
|
||||
BLUE = 2
|
||||
|
||||
|
||||
class ModelWithEnum(Document):
|
||||
status = EnumField(Status)
|
||||
|
||||
|
||||
class TestStringEnumField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
model = ModelWithEnum(status=Status.NEW).save()
|
||||
assert get_as_pymongo(model) == {"_id": model.id, "status": "new"}
|
||||
|
||||
def test_set_enum(self):
|
||||
ModelWithEnum.drop_collection()
|
||||
ModelWithEnum(status=Status.NEW).save()
|
||||
assert ModelWithEnum.objects(status=Status.NEW).count() == 1
|
||||
assert ModelWithEnum.objects.first().status == Status.NEW
|
||||
|
||||
def test_set_by_value(self):
|
||||
ModelWithEnum.drop_collection()
|
||||
ModelWithEnum(status="new").save()
|
||||
assert ModelWithEnum.objects.first().status == Status.NEW
|
||||
|
||||
def test_filter(self):
|
||||
ModelWithEnum.drop_collection()
|
||||
ModelWithEnum(status="new").save()
|
||||
assert ModelWithEnum.objects(status="new").count() == 1
|
||||
assert ModelWithEnum.objects(status=Status.NEW).count() == 1
|
||||
assert ModelWithEnum.objects(status=Status.DONE).count() == 0
|
||||
|
||||
def test_change_value(self):
|
||||
m = ModelWithEnum(status="new")
|
||||
m.status = Status.DONE
|
||||
m.save()
|
||||
assert m.status == Status.DONE
|
||||
|
||||
m.status = "wrong"
|
||||
assert m.status == "wrong"
|
||||
with pytest.raises(ValidationError):
|
||||
m.validate()
|
||||
|
||||
def test_set_default(self):
|
||||
class ModelWithDefault(Document):
|
||||
status = EnumField(Status, default=Status.DONE)
|
||||
|
||||
m = ModelWithDefault().save()
|
||||
assert m.status == Status.DONE
|
||||
|
||||
def test_enum_field_can_be_empty(self):
|
||||
ModelWithEnum.drop_collection()
|
||||
m = ModelWithEnum().save()
|
||||
assert m.status is None
|
||||
assert ModelWithEnum.objects()[0].status is None
|
||||
assert ModelWithEnum.objects(status=None).count() == 1
|
||||
|
||||
def test_set_none_explicitly(self):
|
||||
ModelWithEnum.drop_collection()
|
||||
ModelWithEnum(status=None).save()
|
||||
assert ModelWithEnum.objects.first().status is None
|
||||
|
||||
def test_cannot_create_model_with_wrong_enum_value(self):
|
||||
m = ModelWithEnum(status="wrong_one")
|
||||
with pytest.raises(ValidationError):
|
||||
m.validate()
|
||||
|
||||
def test_partial_choices(self):
|
||||
partial = [Status.DONE]
|
||||
enum_field = EnumField(Status, choices=partial)
|
||||
assert enum_field.choices == partial
|
||||
|
||||
class FancyDoc(Document):
|
||||
z = enum_field
|
||||
|
||||
FancyDoc(z=Status.DONE).validate()
|
||||
with pytest.raises(
|
||||
ValidationError, match=r"Value must be one of .*Status.DONE"
|
||||
):
|
||||
FancyDoc(z=Status.NEW).validate()
|
||||
|
||||
def test_wrong_choices(self):
|
||||
with pytest.raises(ValueError, match="Invalid choices"):
|
||||
EnumField(Status, choices=["my", "custom", "options"])
|
||||
with pytest.raises(ValueError, match="Invalid choices"):
|
||||
EnumField(Status, choices=[Color.RED])
|
||||
with pytest.raises(ValueError, match="Invalid choices"):
|
||||
EnumField(Status, choices=[Status.DONE, Color.RED])
|
||||
|
||||
|
||||
class ModelWithColor(Document):
|
||||
color = EnumField(Color, default=Color.RED)
|
||||
|
||||
|
||||
class TestIntEnumField(MongoDBTestCase):
|
||||
def test_enum_with_int(self):
|
||||
ModelWithColor.drop_collection()
|
||||
m = ModelWithColor().save()
|
||||
assert m.color == Color.RED
|
||||
assert ModelWithColor.objects(color=Color.RED).count() == 1
|
||||
assert ModelWithColor.objects(color=1).count() == 1
|
||||
assert ModelWithColor.objects(color=2).count() == 0
|
||||
|
||||
def test_create_int_enum_by_value(self):
|
||||
model = ModelWithColor(color=2).save()
|
||||
assert model.color == Color.BLUE
|
||||
|
||||
def test_storage_enum_with_int(self):
|
||||
model = ModelWithColor(color=Color.BLUE).save()
|
||||
assert get_as_pymongo(model) == {"_id": model.id, "color": 2}
|
||||
|
||||
def test_validate_model(self):
|
||||
with pytest.raises(ValidationError, match="Value must be one of"):
|
||||
ModelWithColor(color=3).validate()
|
||||
|
||||
with pytest.raises(ValidationError, match="Value must be one of"):
|
||||
ModelWithColor(color="wrong_type").validate()
|
||||
|
||||
|
||||
class TestFunkyEnumField(MongoDBTestCase):
|
||||
def test_enum_incompatible_bson_type_fails_during_save(self):
|
||||
class FunkyColor(Enum):
|
||||
YELLOW = object()
|
||||
|
||||
class ModelWithFunkyColor(Document):
|
||||
color = EnumField(FunkyColor)
|
||||
|
||||
m = ModelWithFunkyColor(color=FunkyColor.YELLOW)
|
||||
|
||||
with pytest.raises(InvalidDocument, match="[cC]annot encode object"):
|
||||
m.save()
|
File diff suppressed because it is too large
Load Diff
@ -1,19 +1,17 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import copy
|
||||
import os
|
||||
import unittest
|
||||
import tempfile
|
||||
import unittest
|
||||
from io import BytesIO
|
||||
|
||||
import gridfs
|
||||
import six
|
||||
import pytest
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.python_support import StringIO
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
from PIL import Image # noqa: F401
|
||||
|
||||
HAS_PIL = True
|
||||
except ImportError:
|
||||
@ -21,6 +19,8 @@ except ImportError:
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
require_pil = pytest.mark.skipif(not HAS_PIL, reason="PIL not installed")
|
||||
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "mongoengine.png")
|
||||
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png")
|
||||
|
||||
@ -28,14 +28,14 @@ TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png")
|
||||
def get_file(path):
|
||||
"""Use a BytesIO instead of a file to allow
|
||||
to have a one-liner and avoid that the file remains opened"""
|
||||
bytes_io = StringIO()
|
||||
bytes_io = BytesIO()
|
||||
with open(path, "rb") as f:
|
||||
bytes_io.write(f.read())
|
||||
bytes_io.seek(0)
|
||||
return bytes_io
|
||||
|
||||
|
||||
class FileTest(MongoDBTestCase):
|
||||
class TestFileField(MongoDBTestCase):
|
||||
def tearDown(self):
|
||||
self.db.drop_collection("fs.files")
|
||||
self.db.drop_collection("fs.chunks")
|
||||
@ -48,15 +48,14 @@ class FileTest(MongoDBTestCase):
|
||||
DemoFile.objects.create()
|
||||
|
||||
def test_file_fields(self):
|
||||
"""Ensure that file fields can be written to and their data retrieved
|
||||
"""
|
||||
"""Ensure that file fields can be written to and their data retrieved"""
|
||||
|
||||
class PutFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
text = b"Hello, World!"
|
||||
content_type = "text/plain"
|
||||
|
||||
putfile = PutFile()
|
||||
@ -64,13 +63,13 @@ class FileTest(MongoDBTestCase):
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual(
|
||||
"%s" % result.the_file,
|
||||
"<GridFSProxy: hello (%s)>" % result.the_file.grid_id,
|
||||
assert putfile == result
|
||||
assert (
|
||||
"%s" % result.the_file
|
||||
== "<GridFSProxy: hello (%s)>" % result.the_file.grid_id
|
||||
)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert result.the_file.read() == text
|
||||
assert result.the_file.content_type == content_type
|
||||
result.the_file.delete() # Remove file from GridFS
|
||||
PutFile.objects.delete()
|
||||
|
||||
@ -78,29 +77,28 @@ class FileTest(MongoDBTestCase):
|
||||
PutFile.drop_collection()
|
||||
|
||||
putfile = PutFile()
|
||||
putstring = StringIO()
|
||||
putstring = BytesIO()
|
||||
putstring.write(text)
|
||||
putstring.seek(0)
|
||||
putfile.the_file.put(putstring, content_type=content_type)
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert putfile == result
|
||||
assert result.the_file.read() == text
|
||||
assert result.the_file.content_type == content_type
|
||||
result.the_file.delete()
|
||||
|
||||
def test_file_fields_stream(self):
|
||||
"""Ensure that file fields can be written to and their data retrieved
|
||||
"""
|
||||
"""Ensure that file fields can be written to and their data retrieved"""
|
||||
|
||||
class StreamFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
more_text = six.b("Foo Bar")
|
||||
text = b"Hello, World!"
|
||||
more_text = b"Foo Bar"
|
||||
content_type = "text/plain"
|
||||
|
||||
streamfile = StreamFile()
|
||||
@ -111,19 +109,19 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert streamfile == result
|
||||
assert result.the_file.read() == text + more_text
|
||||
assert result.the_file.content_type == content_type
|
||||
result.the_file.seek(0)
|
||||
self.assertEqual(result.the_file.tell(), 0)
|
||||
self.assertEqual(result.the_file.read(len(text)), text)
|
||||
self.assertEqual(result.the_file.tell(), len(text))
|
||||
self.assertEqual(result.the_file.read(len(more_text)), more_text)
|
||||
self.assertEqual(result.the_file.tell(), len(text + more_text))
|
||||
assert result.the_file.tell() == 0
|
||||
assert result.the_file.read(len(text)) == text
|
||||
assert result.the_file.tell() == len(text)
|
||||
assert result.the_file.read(len(more_text)) == more_text
|
||||
assert result.the_file.tell() == len(text + more_text)
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.the_file.read() is None)
|
||||
assert result.the_file.read() is None
|
||||
|
||||
def test_file_fields_stream_after_none(self):
|
||||
"""Ensure that a file field can be written to after it has been saved as
|
||||
@ -135,9 +133,8 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
more_text = six.b("Foo Bar")
|
||||
content_type = "text/plain"
|
||||
text = b"Hello, World!"
|
||||
more_text = b"Foo Bar"
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.save()
|
||||
@ -148,26 +145,26 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
# self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert streamfile == result
|
||||
assert result.the_file.read() == text + more_text
|
||||
# assert result.the_file.content_type == content_type
|
||||
result.the_file.seek(0)
|
||||
self.assertEqual(result.the_file.tell(), 0)
|
||||
self.assertEqual(result.the_file.read(len(text)), text)
|
||||
self.assertEqual(result.the_file.tell(), len(text))
|
||||
self.assertEqual(result.the_file.read(len(more_text)), more_text)
|
||||
self.assertEqual(result.the_file.tell(), len(text + more_text))
|
||||
assert result.the_file.tell() == 0
|
||||
assert result.the_file.read(len(text)) == text
|
||||
assert result.the_file.tell() == len(text)
|
||||
assert result.the_file.read(len(more_text)) == more_text
|
||||
assert result.the_file.tell() == len(text + more_text)
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.the_file.read() is None)
|
||||
assert result.the_file.read() is None
|
||||
|
||||
def test_file_fields_set(self):
|
||||
class SetFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
more_text = six.b("Foo Bar")
|
||||
text = b"Hello, World!"
|
||||
more_text = b"Foo Bar"
|
||||
|
||||
SetFile.drop_collection()
|
||||
|
||||
@ -176,16 +173,16 @@ class FileTest(MongoDBTestCase):
|
||||
setfile.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
assert setfile == result
|
||||
assert result.the_file.read() == text
|
||||
|
||||
# Try replacing file with new one
|
||||
result.the_file.replace(more_text)
|
||||
result.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), more_text)
|
||||
assert setfile == result
|
||||
assert result.the_file.read() == more_text
|
||||
result.the_file.delete()
|
||||
|
||||
def test_file_field_no_default(self):
|
||||
@ -195,7 +192,7 @@ class FileTest(MongoDBTestCase):
|
||||
GridDocument.drop_collection()
|
||||
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write(six.b("Hello World!"))
|
||||
f.write(b"Hello World!")
|
||||
f.flush()
|
||||
|
||||
# Test without default
|
||||
@ -205,32 +202,31 @@ class FileTest(MongoDBTestCase):
|
||||
doc_b = GridDocument.objects.with_id(doc_a.id)
|
||||
doc_b.the_file.replace(f, filename="doc_b")
|
||||
doc_b.save()
|
||||
self.assertNotEqual(doc_b.the_file.grid_id, None)
|
||||
assert doc_b.the_file.grid_id is not None
|
||||
|
||||
# Test it matches
|
||||
doc_c = GridDocument.objects.with_id(doc_b.id)
|
||||
self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
|
||||
assert doc_b.the_file.grid_id == doc_c.the_file.grid_id
|
||||
|
||||
# Test with default
|
||||
doc_d = GridDocument(the_file=six.b(""))
|
||||
doc_d = GridDocument(the_file=b"")
|
||||
doc_d.save()
|
||||
|
||||
doc_e = GridDocument.objects.with_id(doc_d.id)
|
||||
self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id)
|
||||
assert doc_d.the_file.grid_id == doc_e.the_file.grid_id
|
||||
|
||||
doc_e.the_file.replace(f, filename="doc_e")
|
||||
doc_e.save()
|
||||
|
||||
doc_f = GridDocument.objects.with_id(doc_e.id)
|
||||
self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id)
|
||||
assert doc_e.the_file.grid_id == doc_f.the_file.grid_id
|
||||
|
||||
db = GridDocument._get_db()
|
||||
grid_fs = gridfs.GridFS(db)
|
||||
self.assertEqual(["doc_b", "doc_e"], grid_fs.list())
|
||||
assert ["doc_b", "doc_e"] == grid_fs.list()
|
||||
|
||||
def test_file_uniqueness(self):
|
||||
"""Ensure that each instance of a FileField is unique
|
||||
"""
|
||||
"""Ensure that each instance of a FileField is unique"""
|
||||
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
@ -239,15 +235,15 @@ class FileTest(MongoDBTestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(six.b("Hello, World!"))
|
||||
test_file.the_file.put(b"Hello, World!")
|
||||
test_file.save()
|
||||
|
||||
# Second instance
|
||||
test_file_dupe = TestFile()
|
||||
data = test_file_dupe.the_file.read() # Should be None
|
||||
|
||||
self.assertNotEqual(test_file.name, test_file_dupe.name)
|
||||
self.assertNotEqual(test_file.the_file.read(), data)
|
||||
assert test_file.name != test_file_dupe.name
|
||||
assert test_file.the_file.read() != data
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
@ -268,8 +264,8 @@ class FileTest(MongoDBTestCase):
|
||||
marmot.save()
|
||||
|
||||
marmot = Animal.objects.get()
|
||||
self.assertEqual(marmot.photo.content_type, "image/jpeg")
|
||||
self.assertEqual(marmot.photo.foo, "bar")
|
||||
assert marmot.photo.content_type == "image/jpeg"
|
||||
assert marmot.photo.foo == "bar"
|
||||
|
||||
def test_file_reassigning(self):
|
||||
class TestFile(Document):
|
||||
@ -278,16 +274,15 @@ class FileTest(MongoDBTestCase):
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save()
|
||||
self.assertEqual(test_file.the_file.get().length, 8313)
|
||||
assert test_file.the_file.get().length == 8313
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = get_file(TEST_IMAGE2_PATH)
|
||||
test_file.save()
|
||||
self.assertEqual(test_file.the_file.get().length, 4971)
|
||||
assert test_file.the_file.get().length == 4971
|
||||
|
||||
def test_file_boolean(self):
|
||||
"""Ensure that a boolean test of a FileField indicates its presence
|
||||
"""
|
||||
"""Ensure that a boolean test of a FileField indicates its presence"""
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
@ -295,13 +290,13 @@ class FileTest(MongoDBTestCase):
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertFalse(bool(test_file.the_file))
|
||||
test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain")
|
||||
assert not bool(test_file.the_file)
|
||||
test_file.the_file.put(b"Hello, World!", content_type="text/plain")
|
||||
test_file.save()
|
||||
self.assertTrue(bool(test_file.the_file))
|
||||
assert bool(test_file.the_file)
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.content_type, "text/plain")
|
||||
assert test_file.the_file.content_type == "text/plain"
|
||||
|
||||
def test_file_cmp(self):
|
||||
"""Test comparing against other types"""
|
||||
@ -310,15 +305,15 @@ class FileTest(MongoDBTestCase):
|
||||
the_file = FileField()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertNotIn(test_file.the_file, [{"test": 1}])
|
||||
assert test_file.the_file not in [{"test": 1}]
|
||||
|
||||
def test_file_disk_space(self):
|
||||
""" Test disk space usage when we delete/replace a file """
|
||||
"""Test disk space usage when we delete/replace a file"""
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
text = b"Hello, World!"
|
||||
content_type = "text/plain"
|
||||
|
||||
testfile = TestFile()
|
||||
@ -330,16 +325,16 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 1)
|
||||
self.assertEqual(len(list(chunks)), 1)
|
||||
assert len(list(files)) == 1
|
||||
assert len(list(chunks)) == 1
|
||||
|
||||
# Deleting the docoument should delete the files
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
# Test case where we don't store a file in the first place
|
||||
testfile = TestFile()
|
||||
@ -347,48 +342,46 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
# Test case where we overwrite the file
|
||||
testfile = TestFile()
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
text = six.b("Bonjour, World!")
|
||||
text = b"Bonjour, World!"
|
||||
testfile.the_file.replace(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 1)
|
||||
self.assertEqual(len(list(chunks)), 1)
|
||||
assert len(list(files)) == 1
|
||||
assert len(list(chunks)) == 1
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
@require_pil
|
||||
def test_image_field(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField()
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write(six.b("Hello World!"))
|
||||
f.write(b"Hello World!")
|
||||
f.flush()
|
||||
|
||||
t = TestImage()
|
||||
@ -396,9 +389,7 @@ class FileTest(MongoDBTestCase):
|
||||
t.image.put(f)
|
||||
self.fail("Should have raised an invalidation error")
|
||||
except ValidationError as e:
|
||||
self.assertEqual(
|
||||
"%s" % e, "Invalid image: cannot identify image file %s" % f
|
||||
)
|
||||
assert "%s" % e == "Invalid image: cannot identify image file %s" % f
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
@ -406,58 +397,31 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, "PNG")
|
||||
assert t.image.format == "PNG"
|
||||
|
||||
w, h = t.image.size
|
||||
self.assertEqual(w, 371)
|
||||
self.assertEqual(h, 76)
|
||||
assert w == 371
|
||||
assert h == 76
|
||||
|
||||
t.image.delete()
|
||||
|
||||
@require_pil
|
||||
def test_image_field_reassigning(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = ImageField()
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save()
|
||||
self.assertEqual(test_file.the_file.size, (371, 76))
|
||||
assert test_file.the_file.size == (371, 76)
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = get_file(TEST_IMAGE2_PATH)
|
||||
test_file.save()
|
||||
self.assertEqual(test_file.the_file.size, (45, 101))
|
||||
assert test_file.the_file.size == (45, 101)
|
||||
|
||||
@require_pil
|
||||
def test_image_field_resize(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(size=(185, 37))
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, "PNG")
|
||||
w, h = t.image.size
|
||||
|
||||
self.assertEqual(w, 185)
|
||||
self.assertEqual(h, 37)
|
||||
|
||||
t.image.delete()
|
||||
|
||||
def test_image_field_resize_force(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(size=(185, 37, True))
|
||||
|
||||
@ -469,20 +433,18 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, "PNG")
|
||||
assert t.image.format == "PNG"
|
||||
w, h = t.image.size
|
||||
|
||||
self.assertEqual(w, 185)
|
||||
self.assertEqual(h, 37)
|
||||
assert w == 185
|
||||
assert h == 37
|
||||
|
||||
t.image.delete()
|
||||
|
||||
def test_image_field_thumbnail(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
@require_pil
|
||||
def test_image_field_resize_force(self):
|
||||
class TestImage(Document):
|
||||
image = ImageField(thumbnail_size=(92, 18))
|
||||
image = ImageField(size=(185, 37, True))
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
@ -492,9 +454,30 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.thumbnail.format, "PNG")
|
||||
self.assertEqual(t.image.thumbnail.width, 92)
|
||||
self.assertEqual(t.image.thumbnail.height, 18)
|
||||
assert t.image.format == "PNG"
|
||||
w, h = t.image.size
|
||||
|
||||
assert w == 185
|
||||
assert h == 37
|
||||
|
||||
t.image.delete()
|
||||
|
||||
@require_pil
|
||||
def test_image_field_thumbnail(self):
|
||||
class TestImage(Document):
|
||||
image = ImageField(thumbnail_size=(92, 18, True))
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
assert t.image.thumbnail.format == "PNG"
|
||||
assert t.image.thumbnail.width == 92
|
||||
assert t.image.thumbnail.height == 18
|
||||
|
||||
t.image.delete()
|
||||
|
||||
@ -514,21 +497,21 @@ class FileTest(MongoDBTestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(six.b("Hello, World!"), name="hello.txt")
|
||||
test_file.the_file.put(b"Hello, World!", name="hello.txt")
|
||||
test_file.save()
|
||||
|
||||
data = get_db("test_files").macumba.files.find_one()
|
||||
self.assertEqual(data.get("name"), "hello.txt")
|
||||
assert data.get("name") == "hello.txt"
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.read(), six.b("Hello, World!"))
|
||||
assert test_file.the_file.read() == b"Hello, World!"
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = six.b("HELLO, WORLD!")
|
||||
test_file.the_file = b"Hello, World!"
|
||||
test_file.save()
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.read(), six.b("HELLO, WORLD!"))
|
||||
assert test_file.the_file.read() == b"Hello, World!"
|
||||
|
||||
def test_copyable(self):
|
||||
class PutFile(Document):
|
||||
@ -536,7 +519,7 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
text = b"Hello, World!"
|
||||
content_type = "text/plain"
|
||||
|
||||
putfile = PutFile()
|
||||
@ -546,14 +529,11 @@ class FileTest(MongoDBTestCase):
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
|
||||
self.assertEqual(putfile, copy.copy(putfile))
|
||||
self.assertEqual(putfile, copy.deepcopy(putfile))
|
||||
assert putfile == copy.copy(putfile)
|
||||
assert putfile == copy.deepcopy(putfile)
|
||||
|
||||
@require_pil
|
||||
def test_get_image_by_grid_id(self):
|
||||
|
||||
if not HAS_PIL:
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
class TestImage(Document):
|
||||
|
||||
image1 = ImageField()
|
||||
@ -569,9 +549,7 @@ class FileTest(MongoDBTestCase):
|
||||
test = TestImage.objects.first()
|
||||
grid_id = test.image1.grid_id
|
||||
|
||||
self.assertEqual(
|
||||
1, TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count()
|
||||
)
|
||||
assert 1 == TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count()
|
||||
|
||||
def test_complex_field_filefield(self):
|
||||
"""Ensure you can add meta data to file"""
|
||||
@ -593,9 +571,9 @@ class FileTest(MongoDBTestCase):
|
||||
marmot.save()
|
||||
|
||||
marmot = Animal.objects.get()
|
||||
self.assertEqual(marmot.photos[0].content_type, "image/jpeg")
|
||||
self.assertEqual(marmot.photos[0].foo, "bar")
|
||||
self.assertEqual(marmot.photos[0].get().length, 8313)
|
||||
assert marmot.photos[0].content_type == "image/jpeg"
|
||||
assert marmot.photos[0].foo == "bar"
|
||||
assert marmot.photos[0].get().length == 8313
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
@ -1,8 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import six
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -16,12 +14,11 @@ class TestFloatField(MongoDBTestCase):
|
||||
TestDocument(float_fld=None).save()
|
||||
TestDocument(float_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=1).count())
|
||||
assert 1 == TestDocument.objects(float_fld__ne=None).count()
|
||||
assert 1 == TestDocument.objects(float_fld__ne=1).count()
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to float fields.
|
||||
"""
|
||||
"""Ensure that invalid values cannot be assigned to float fields."""
|
||||
|
||||
class Person(Document):
|
||||
height = FloatField(min_value=0.1, max_value=3.5)
|
||||
@ -34,25 +31,29 @@ class TestFloatField(MongoDBTestCase):
|
||||
person.validate()
|
||||
|
||||
person.height = "2.0"
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.height = 0.01
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.height = 4.0
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person_2 = Person(height="something invalid")
|
||||
self.assertRaises(ValidationError, person_2.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person_2.validate()
|
||||
|
||||
big_person = BigPerson()
|
||||
|
||||
for value, value_type in enumerate(six.integer_types):
|
||||
big_person.height = value_type(value)
|
||||
big_person.height = int(0)
|
||||
big_person.validate()
|
||||
|
||||
big_person.height = 2 ** 500
|
||||
big_person.validate()
|
||||
|
||||
big_person.height = 2 ** 100000 # Too big for a float value
|
||||
self.assertRaises(ValidationError, big_person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
big_person.validate()
|
||||
|
@ -1,23 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("GeoFieldTest",)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class GeoFieldTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
self.db = get_db()
|
||||
|
||||
class TestGeoField(MongoDBTestCase):
|
||||
def _test_for_expected_error(self, Cls, loc, expected):
|
||||
try:
|
||||
Cls(loc=loc).validate()
|
||||
self.fail("Should not validate the location {0}".format(loc))
|
||||
self.fail(f"Should not validate the location {loc}")
|
||||
except ValidationError as e:
|
||||
self.assertEqual(expected, e.to_dict()["loc"])
|
||||
assert expected == e.to_dict()["loc"]
|
||||
|
||||
def test_geopoint_validation(self):
|
||||
class Location(Document):
|
||||
@ -297,15 +290,14 @@ class GeoFieldTest(unittest.TestCase):
|
||||
Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate()
|
||||
|
||||
def test_indexes_geopoint(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
"""Ensure that indexes are created automatically for GeoPointFields."""
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertEqual(geo_indicies, [{"fields": [("location", "2d")]}])
|
||||
assert geo_indicies == [{"fields": [("location", "2d")]}]
|
||||
|
||||
def test_geopoint_embedded_indexes(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields on
|
||||
@ -321,11 +313,10 @@ class GeoFieldTest(unittest.TestCase):
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertEqual(geo_indicies, [{"fields": [("venue.location", "2d")]}])
|
||||
assert geo_indicies == [{"fields": [("venue.location", "2d")]}]
|
||||
|
||||
def test_indexes_2dsphere(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
"""Ensure that indexes are created automatically for GeoPointFields."""
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
@ -334,13 +325,12 @@ class GeoFieldTest(unittest.TestCase):
|
||||
polygon = PolygonField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertIn({"fields": [("line", "2dsphere")]}, geo_indicies)
|
||||
self.assertIn({"fields": [("polygon", "2dsphere")]}, geo_indicies)
|
||||
self.assertIn({"fields": [("point", "2dsphere")]}, geo_indicies)
|
||||
assert {"fields": [("line", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("polygon", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("point", "2dsphere")]} in geo_indicies
|
||||
|
||||
def test_indexes_2dsphere_embedded(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
"""Ensure that indexes are created automatically for GeoPointFields."""
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
name = StringField()
|
||||
@ -353,9 +343,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertIn({"fields": [("venue.line", "2dsphere")]}, geo_indicies)
|
||||
self.assertIn({"fields": [("venue.polygon", "2dsphere")]}, geo_indicies)
|
||||
self.assertIn({"fields": [("venue.point", "2dsphere")]}, geo_indicies)
|
||||
assert {"fields": [("venue.line", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("venue.polygon", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("venue.point", "2dsphere")]} in geo_indicies
|
||||
|
||||
def test_geo_indexes_recursion(self):
|
||||
class Location(Document):
|
||||
@ -371,12 +361,12 @@ class GeoFieldTest(unittest.TestCase):
|
||||
|
||||
Parent(name="Berlin").save()
|
||||
info = Parent._get_collection().index_information()
|
||||
self.assertNotIn("location_2d", info)
|
||||
assert "location_2d" not in info
|
||||
info = Location._get_collection().index_information()
|
||||
self.assertIn("location_2d", info)
|
||||
assert "location_2d" in info
|
||||
|
||||
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||
self.assertEqual(len(Location._geo_indices()), 1)
|
||||
assert len(Parent._geo_indices()) == 0
|
||||
assert len(Location._geo_indices()) == 1
|
||||
|
||||
def test_geo_indexes_auto_index(self):
|
||||
|
||||
@ -387,16 +377,16 @@ class GeoFieldTest(unittest.TestCase):
|
||||
|
||||
meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]}
|
||||
|
||||
self.assertEqual([], Log._geo_indices())
|
||||
assert Log._geo_indices() == []
|
||||
|
||||
Log.drop_collection()
|
||||
Log.ensure_indexes()
|
||||
|
||||
info = Log._get_collection().index_information()
|
||||
self.assertEqual(
|
||||
info["location_2dsphere_datetime_1"]["key"],
|
||||
[("location", "2dsphere"), ("datetime", 1)],
|
||||
)
|
||||
assert info["location_2dsphere_datetime_1"]["key"] == [
|
||||
("location", "2dsphere"),
|
||||
("datetime", 1),
|
||||
]
|
||||
|
||||
# Test listing explicitly
|
||||
class Log(Document):
|
||||
@ -407,16 +397,16 @@ class GeoFieldTest(unittest.TestCase):
|
||||
"indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}]
|
||||
}
|
||||
|
||||
self.assertEqual([], Log._geo_indices())
|
||||
assert Log._geo_indices() == []
|
||||
|
||||
Log.drop_collection()
|
||||
Log.ensure_indexes()
|
||||
|
||||
info = Log._get_collection().index_information()
|
||||
self.assertEqual(
|
||||
info["location_2dsphere_datetime_1"]["key"],
|
||||
[("location", "2dsphere"), ("datetime", 1)],
|
||||
)
|
||||
assert info["location_2dsphere_datetime_1"]["key"] == [
|
||||
("location", "2dsphere"),
|
||||
("datetime", 1),
|
||||
]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
@ -1,13 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestIntField(MongoDBTestCase):
|
||||
def test_int_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to int fields.
|
||||
"""
|
||||
"""Ensure that invalid values cannot be assigned to int fields."""
|
||||
|
||||
class Person(Document):
|
||||
age = IntField(min_value=0, max_value=110)
|
||||
@ -23,11 +22,14 @@ class TestIntField(MongoDBTestCase):
|
||||
person.validate()
|
||||
|
||||
person.age = -1
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.age = 120
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.age = "ten"
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
def test_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
@ -38,5 +40,5 @@ class TestIntField(MongoDBTestCase):
|
||||
TestDocument(int_fld=None).save()
|
||||
TestDocument(int_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=1).count())
|
||||
assert 1 == TestDocument.objects(int_fld__ne=None).count()
|
||||
assert 1 == TestDocument.objects(int_fld__ne=1).count()
|
||||
|
@ -1,9 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
from bson import DBRef, ObjectId
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import LazyReference
|
||||
|
||||
from mongoengine.context_managers import query_counter
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -11,7 +11,8 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
def test_lazy_reference_config(self):
|
||||
# Make sure ReferenceField only accepts a document class or a string
|
||||
# with a document class name.
|
||||
self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument)
|
||||
with pytest.raises(ValidationError):
|
||||
LazyReferenceField(EmbeddedDocument)
|
||||
|
||||
def test___repr__(self):
|
||||
class Animal(Document):
|
||||
@ -25,7 +26,7 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
animal = Animal()
|
||||
oc = Ocurrence(animal=animal)
|
||||
self.assertIn("LazyReference", repr(oc.animal))
|
||||
assert "LazyReference" in repr(oc.animal)
|
||||
|
||||
def test___getattr___unknown_attr_raises_attribute_error(self):
|
||||
class Animal(Document):
|
||||
@ -39,7 +40,7 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
animal = Animal().save()
|
||||
oc = Ocurrence(animal=animal)
|
||||
with self.assertRaises(AttributeError):
|
||||
with pytest.raises(AttributeError):
|
||||
oc.animal.not_exist
|
||||
|
||||
def test_lazy_reference_simple(self):
|
||||
@ -57,19 +58,19 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
fetched_animal = p.animal.fetch()
|
||||
self.assertEqual(fetched_animal, animal)
|
||||
assert fetched_animal == animal
|
||||
# `fetch` keep cache on referenced document by default...
|
||||
animal.tag = "not so heavy"
|
||||
animal.save()
|
||||
double_fetch = p.animal.fetch()
|
||||
self.assertIs(fetched_animal, double_fetch)
|
||||
self.assertEqual(double_fetch.tag, "heavy")
|
||||
assert fetched_animal is double_fetch
|
||||
assert double_fetch.tag == "heavy"
|
||||
# ...unless specified otherwise
|
||||
fetch_force = p.animal.fetch(force=True)
|
||||
self.assertIsNot(fetch_force, fetched_animal)
|
||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
||||
assert fetch_force is not fetched_animal
|
||||
assert fetch_force.tag == "not so heavy"
|
||||
|
||||
def test_lazy_reference_fetch_invalid_ref(self):
|
||||
class Animal(Document):
|
||||
@ -87,8 +88,8 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
animal.delete()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
with self.assertRaises(DoesNotExist):
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
with pytest.raises(DoesNotExist):
|
||||
p.animal.fetch()
|
||||
|
||||
def test_lazy_reference_set(self):
|
||||
@ -122,7 +123,7 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
):
|
||||
p = Ocurrence(person="test", animal=ref).save()
|
||||
p.reload()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
p.animal.fetch()
|
||||
|
||||
def test_lazy_reference_bad_set(self):
|
||||
@ -149,8 +150,8 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
DBRef(baddoc._get_collection_name(), animal.pk),
|
||||
LazyReference(BadDoc, animal.pk),
|
||||
):
|
||||
with self.assertRaises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
with pytest.raises(ValidationError):
|
||||
Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_lazy_reference_query_conversion(self):
|
||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||
@ -179,14 +180,14 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_lazy_reference_query_conversion_dbref(self):
|
||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||
@ -215,14 +216,14 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_lazy_reference_passthrough(self):
|
||||
class Animal(Document):
|
||||
@ -239,20 +240,20 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(animal=animal, animal_passthrough=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
with self.assertRaises(KeyError):
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
with pytest.raises(KeyError):
|
||||
p.animal["name"]
|
||||
with self.assertRaises(AttributeError):
|
||||
with pytest.raises(AttributeError):
|
||||
p.animal.name
|
||||
self.assertEqual(p.animal.pk, animal.pk)
|
||||
assert p.animal.pk == animal.pk
|
||||
|
||||
self.assertEqual(p.animal_passthrough.name, "Leopard")
|
||||
self.assertEqual(p.animal_passthrough["name"], "Leopard")
|
||||
assert p.animal_passthrough.name == "Leopard"
|
||||
assert p.animal_passthrough["name"] == "Leopard"
|
||||
|
||||
# Should not be able to access referenced document's methods
|
||||
with self.assertRaises(AttributeError):
|
||||
with pytest.raises(AttributeError):
|
||||
p.animal.save
|
||||
with self.assertRaises(KeyError):
|
||||
with pytest.raises(KeyError):
|
||||
p.animal["save"]
|
||||
|
||||
def test_lazy_reference_not_set(self):
|
||||
@ -269,7 +270,7 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
Ocurrence(person="foo").save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIs(p.animal, None)
|
||||
assert p.animal is None
|
||||
|
||||
def test_lazy_reference_equality(self):
|
||||
class Animal(Document):
|
||||
@ -280,12 +281,12 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
animalref = LazyReference(Animal, animal.pk)
|
||||
self.assertEqual(animal, animalref)
|
||||
self.assertEqual(animalref, animal)
|
||||
assert animal == animalref
|
||||
assert animalref == animal
|
||||
|
||||
other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90"))
|
||||
self.assertNotEqual(animal, other_animalref)
|
||||
self.assertNotEqual(other_animalref, animal)
|
||||
assert animal != other_animalref
|
||||
assert other_animalref != animal
|
||||
|
||||
def test_lazy_reference_embedded(self):
|
||||
class Animal(Document):
|
||||
@ -308,12 +309,12 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
animal2 = Animal(name="cheeta").save()
|
||||
|
||||
def check_fields_type(occ):
|
||||
self.assertIsInstance(occ.direct, LazyReference)
|
||||
assert isinstance(occ.direct, LazyReference)
|
||||
for elem in occ.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
assert isinstance(occ.in_embedded.direct, LazyReference)
|
||||
for elem in occ.in_embedded.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
|
||||
occ = Ocurrence(
|
||||
in_list=[animal1, animal2],
|
||||
@ -329,6 +330,70 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
occ.in_embedded.in_list = [animal1.id, animal2.id]
|
||||
check_fields_type(occ)
|
||||
|
||||
def test_lazy_reference_embedded_dereferencing(self):
|
||||
# Test case for #2375
|
||||
|
||||
# -- Test documents
|
||||
|
||||
class Author(Document):
|
||||
name = StringField()
|
||||
|
||||
class AuthorReference(EmbeddedDocument):
|
||||
author = LazyReferenceField(Author)
|
||||
|
||||
class Book(Document):
|
||||
authors = EmbeddedDocumentListField(AuthorReference)
|
||||
|
||||
# -- Cleanup
|
||||
|
||||
Author.drop_collection()
|
||||
Book.drop_collection()
|
||||
|
||||
# -- Create test data
|
||||
|
||||
author_1 = Author(name="A1").save()
|
||||
author_2 = Author(name="A2").save()
|
||||
author_3 = Author(name="A3").save()
|
||||
book = Book(
|
||||
authors=[
|
||||
AuthorReference(author=author_1),
|
||||
AuthorReference(author=author_2),
|
||||
AuthorReference(author=author_3),
|
||||
]
|
||||
).save()
|
||||
|
||||
with query_counter() as qc:
|
||||
book = Book.objects.first()
|
||||
# Accessing the list must not trigger dereferencing.
|
||||
book.authors
|
||||
assert qc == 1
|
||||
|
||||
for ref in book.authors:
|
||||
with pytest.raises(AttributeError):
|
||||
ref["author"].name
|
||||
assert isinstance(ref.author, LazyReference)
|
||||
assert isinstance(ref.author.id, ObjectId)
|
||||
|
||||
def test_lazy_reference_in_list_with_changed_element(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
in_list = ListField(LazyReferenceField(Animal))
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal1 = Animal(name="doggo").save()
|
||||
|
||||
animal1.tag = "blue"
|
||||
|
||||
occ = Ocurrence(in_list=[animal1]).save()
|
||||
animal1.save()
|
||||
assert isinstance(occ.in_list[0], LazyReference)
|
||||
assert occ.in_list[0].pk == animal1.pk
|
||||
|
||||
|
||||
class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
def test_generic_lazy_reference_simple(self):
|
||||
@ -346,19 +411,19 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
fetched_animal = p.animal.fetch()
|
||||
self.assertEqual(fetched_animal, animal)
|
||||
assert fetched_animal == animal
|
||||
# `fetch` keep cache on referenced document by default...
|
||||
animal.tag = "not so heavy"
|
||||
animal.save()
|
||||
double_fetch = p.animal.fetch()
|
||||
self.assertIs(fetched_animal, double_fetch)
|
||||
self.assertEqual(double_fetch.tag, "heavy")
|
||||
assert fetched_animal is double_fetch
|
||||
assert double_fetch.tag == "heavy"
|
||||
# ...unless specified otherwise
|
||||
fetch_force = p.animal.fetch(force=True)
|
||||
self.assertIsNot(fetch_force, fetched_animal)
|
||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
||||
assert fetch_force is not fetched_animal
|
||||
assert fetch_force.tag == "not so heavy"
|
||||
|
||||
def test_generic_lazy_reference_choices(self):
|
||||
class Animal(Document):
|
||||
@ -384,14 +449,14 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
mineral = Mineral(name="Granite").save()
|
||||
|
||||
occ_animal = Ocurrence(living_thing=animal, thing=animal).save()
|
||||
occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
||||
with self.assertRaises(ValidationError):
|
||||
_ = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
||||
with pytest.raises(ValidationError):
|
||||
Ocurrence(living_thing=mineral).save()
|
||||
|
||||
occ = Ocurrence.objects.get(living_thing=animal)
|
||||
self.assertEqual(occ, occ_animal)
|
||||
self.assertIsInstance(occ.thing, LazyReference)
|
||||
self.assertIsInstance(occ.living_thing, LazyReference)
|
||||
assert occ == occ_animal
|
||||
assert isinstance(occ.thing, LazyReference)
|
||||
assert isinstance(occ.living_thing, LazyReference)
|
||||
|
||||
occ.thing = vegetal
|
||||
occ.living_thing = vegetal
|
||||
@ -399,7 +464,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
occ.thing = mineral
|
||||
occ.living_thing = mineral
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
occ.save()
|
||||
|
||||
def test_generic_lazy_reference_set(self):
|
||||
@ -434,7 +499,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
):
|
||||
p = Ocurrence(person="test", animal=ref).save()
|
||||
p.reload()
|
||||
self.assertIsInstance(p.animal, (LazyReference, Document))
|
||||
assert isinstance(p.animal, (LazyReference, Document))
|
||||
p.animal.fetch()
|
||||
|
||||
def test_generic_lazy_reference_bad_set(self):
|
||||
@ -455,8 +520,8 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
baddoc = BadDoc().save()
|
||||
for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)):
|
||||
with self.assertRaises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
with pytest.raises(ValidationError):
|
||||
Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_generic_lazy_reference_query_conversion(self):
|
||||
class Member(Document):
|
||||
@ -481,14 +546,14 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_generic_lazy_reference_not_set(self):
|
||||
class Animal(Document):
|
||||
@ -504,7 +569,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
Ocurrence(person="foo").save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIs(p.animal, None)
|
||||
assert p.animal is None
|
||||
|
||||
def test_generic_lazy_reference_accepts_string_instead_of_class(self):
|
||||
class Animal(Document):
|
||||
@ -521,7 +586,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal().save()
|
||||
Ocurrence(animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertEqual(p.animal, animal)
|
||||
assert p.animal == animal
|
||||
|
||||
def test_generic_lazy_reference_embedded(self):
|
||||
class Animal(Document):
|
||||
@ -544,12 +609,12 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
animal2 = Animal(name="cheeta").save()
|
||||
|
||||
def check_fields_type(occ):
|
||||
self.assertIsInstance(occ.direct, LazyReference)
|
||||
assert isinstance(occ.direct, LazyReference)
|
||||
for elem in occ.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
assert isinstance(occ.in_embedded.direct, LazyReference)
|
||||
for elem in occ.in_embedded.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
|
||||
occ = Ocurrence(
|
||||
in_list=[animal1, animal2],
|
||||
|
@ -1,18 +1,28 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import six
|
||||
|
||||
try:
|
||||
from bson.int64 import Int64
|
||||
except ImportError:
|
||||
Int64 = long
|
||||
import pytest
|
||||
from bson.int64 import Int64
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class TestLongField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
class Person(Document):
|
||||
value = LongField()
|
||||
|
||||
Person.drop_collection()
|
||||
person = Person(value=5000)
|
||||
person.save()
|
||||
assert get_as_pymongo(person) == {"_id": person.id, "value": 5000}
|
||||
|
||||
def test_construction_does_not_fail_with_invalid_value(self):
|
||||
class Person(Document):
|
||||
value = LongField()
|
||||
|
||||
person = Person(value="not_an_int")
|
||||
assert person.value == "not_an_int"
|
||||
|
||||
def test_long_field_is_considered_as_int64(self):
|
||||
"""
|
||||
Tests that long fields are stored as long in mongo, even if long
|
||||
@ -24,28 +34,27 @@ class TestLongField(MongoDBTestCase):
|
||||
|
||||
doc = TestLongFieldConsideredAsInt64(some_long=42).save()
|
||||
db = get_db()
|
||||
self.assertIsInstance(
|
||||
assert isinstance(
|
||||
db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64
|
||||
)
|
||||
self.assertIsInstance(doc.some_long, six.integer_types)
|
||||
assert isinstance(doc.some_long, int)
|
||||
|
||||
def test_long_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to long fields.
|
||||
"""
|
||||
"""Ensure that invalid values cannot be assigned to long fields."""
|
||||
|
||||
class TestDocument(Document):
|
||||
value = LongField(min_value=0, max_value=110)
|
||||
|
||||
doc = TestDocument()
|
||||
doc.value = 50
|
||||
doc.validate()
|
||||
TestDocument(value=50).validate()
|
||||
|
||||
doc.value = -1
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
doc.value = 120
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
doc.value = "ten"
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
TestDocument(value=-1).validate()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
TestDocument(value=120).validate()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
TestDocument(value="ten").validate()
|
||||
|
||||
def test_long_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
@ -56,4 +65,5 @@ class TestLongField(MongoDBTestCase):
|
||||
TestDocument(long_fld=None).save()
|
||||
TestDocument(long_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count())
|
||||
assert TestDocument.objects(long_fld__ne=None).count() == 1
|
||||
assert TestDocument.objects(long_fld__ne=1).count() == 1
|
||||
|
@ -1,8 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -19,11 +19,11 @@ class TestMapField(MongoDBTestCase):
|
||||
e.mapping["someint"] = 1
|
||||
e.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
e.mapping["somestring"] = "abc"
|
||||
e.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
|
||||
class NoDeclaredType(Document):
|
||||
mapping = MapField()
|
||||
@ -51,10 +51,10 @@ class TestMapField(MongoDBTestCase):
|
||||
e.save()
|
||||
|
||||
e2 = Extensible.objects.get(id=e.id)
|
||||
self.assertIsInstance(e2.mapping["somestring"], StringSetting)
|
||||
self.assertIsInstance(e2.mapping["someint"], IntegerSetting)
|
||||
assert isinstance(e2.mapping["somestring"], StringSetting)
|
||||
assert isinstance(e2.mapping["someint"], IntegerSetting)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
e.mapping["someint"] = 123
|
||||
e.save()
|
||||
|
||||
@ -74,9 +74,9 @@ class TestMapField(MongoDBTestCase):
|
||||
Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1)
|
||||
|
||||
test = Test.objects.get()
|
||||
self.assertEqual(test.my_map["DICTIONARY_KEY"].number, 2)
|
||||
assert test.my_map["DICTIONARY_KEY"].number == 2
|
||||
doc = self.db.test.find_one()
|
||||
self.assertEqual(doc["x"]["DICTIONARY_KEY"]["i"], 2)
|
||||
assert doc["x"]["DICTIONARY_KEY"]["i"] == 2
|
||||
|
||||
def test_mapfield_numerical_index(self):
|
||||
"""Ensure that MapField accept numeric strings as indexes."""
|
||||
@ -116,13 +116,13 @@ class TestMapField(MongoDBTestCase):
|
||||
actions={"friends": Action(operation="drink", object="beer")},
|
||||
).save()
|
||||
|
||||
self.assertEqual(1, Log.objects(visited__friends__exists=True).count())
|
||||
assert 1 == Log.objects(visited__friends__exists=True).count()
|
||||
|
||||
self.assertEqual(
|
||||
1,
|
||||
Log.objects(
|
||||
assert (
|
||||
1
|
||||
== Log.objects(
|
||||
actions__friends__operation="drink", actions__friends__object="beer"
|
||||
).count(),
|
||||
).count()
|
||||
)
|
||||
|
||||
def test_map_field_unicode(self):
|
||||
@ -135,11 +135,11 @@ class TestMapField(MongoDBTestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
tree = BlogPost(info_dict={u"éééé": {"description": u"VALUE: éééé"}})
|
||||
tree = BlogPost(info_dict={"éééé": {"description": "VALUE: éééé"}})
|
||||
|
||||
tree.save()
|
||||
|
||||
self.assertEqual(
|
||||
BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description,
|
||||
u"VALUE: éééé",
|
||||
assert (
|
||||
BlogPost.objects.get(id=tree.id).info_dict["éééé"].description
|
||||
== "VALUE: éééé"
|
||||
)
|
||||
|
@ -1,8 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
from bson import SON, DBRef
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -24,19 +23,22 @@ class TestReferenceField(MongoDBTestCase):
|
||||
|
||||
# Make sure ReferenceField only accepts a document class or a string
|
||||
# with a document class name.
|
||||
self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument)
|
||||
with pytest.raises(ValidationError):
|
||||
ReferenceField(EmbeddedDocument)
|
||||
|
||||
user = User(name="Test User")
|
||||
|
||||
# Ensure that the referenced object must have been saved
|
||||
post1 = BlogPost(content="Chips and gravy taste good.")
|
||||
post1.author = user
|
||||
self.assertRaises(ValidationError, post1.save)
|
||||
with pytest.raises(ValidationError):
|
||||
post1.save()
|
||||
|
||||
# Check that an invalid object type cannot be used
|
||||
post2 = BlogPost(content="Chips and chilli taste good.")
|
||||
post1.author = post2
|
||||
self.assertRaises(ValidationError, post1.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
post1.validate()
|
||||
|
||||
# Ensure ObjectID's are accepted as references
|
||||
user_object_id = user.pk
|
||||
@ -52,22 +54,8 @@ class TestReferenceField(MongoDBTestCase):
|
||||
# Make sure referencing a saved document of the *wrong* type fails
|
||||
post2.save()
|
||||
post1.author = post2
|
||||
self.assertRaises(ValidationError, post1.validate)
|
||||
|
||||
def test_objectid_reference_fields(self):
|
||||
"""Make sure storing Object ID references works."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField("self")
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1.pk).save()
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
with pytest.raises(ValidationError):
|
||||
post1.validate()
|
||||
|
||||
def test_dbref_reference_fields(self):
|
||||
"""Make sure storing references as bson.dbref.DBRef works."""
|
||||
@ -81,13 +69,12 @@ class TestReferenceField(MongoDBTestCase):
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1).save()
|
||||
|
||||
self.assertEqual(
|
||||
Person._get_collection().find_one({"name": "Ross"})["parent"],
|
||||
DBRef("person", p1.pk),
|
||||
assert Person._get_collection().find_one({"name": "Ross"})["parent"] == DBRef(
|
||||
"person", p1.pk
|
||||
)
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
assert p.parent == p1
|
||||
|
||||
def test_dbref_to_mongo(self):
|
||||
"""Make sure that calling to_mongo on a ReferenceField which
|
||||
@ -100,9 +87,7 @@ class TestReferenceField(MongoDBTestCase):
|
||||
parent = ReferenceField("self", dbref=False)
|
||||
|
||||
p = Person(name="Steve", parent=DBRef("person", "abcdefghijklmnop"))
|
||||
self.assertEqual(
|
||||
p.to_mongo(), SON([("name", u"Steve"), ("parent", "abcdefghijklmnop")])
|
||||
)
|
||||
assert p.to_mongo() == SON([("name", "Steve"), ("parent", "abcdefghijklmnop")])
|
||||
|
||||
def test_objectid_reference_fields(self):
|
||||
class Person(Document):
|
||||
@ -116,14 +101,13 @@ class TestReferenceField(MongoDBTestCase):
|
||||
|
||||
col = Person._get_collection()
|
||||
data = col.find_one({"name": "Ross"})
|
||||
self.assertEqual(data["parent"], p1.pk)
|
||||
assert data["parent"] == p1.pk
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
assert p.parent == p1
|
||||
|
||||
def test_undefined_reference(self):
|
||||
"""Ensure that ReferenceFields may reference undefined Documents.
|
||||
"""
|
||||
"""Ensure that ReferenceFields may reference undefined Documents."""
|
||||
|
||||
class Product(Document):
|
||||
name = StringField()
|
||||
@ -144,14 +128,14 @@ class TestReferenceField(MongoDBTestCase):
|
||||
me.save()
|
||||
|
||||
obj = Product.objects(company=ten_gen).first()
|
||||
self.assertEqual(obj, mongodb)
|
||||
self.assertEqual(obj.company, ten_gen)
|
||||
assert obj == mongodb
|
||||
assert obj.company == ten_gen
|
||||
|
||||
obj = Product.objects(company=None).first()
|
||||
self.assertEqual(obj, me)
|
||||
assert obj == me
|
||||
|
||||
obj = Product.objects.get(company=None)
|
||||
self.assertEqual(obj, me)
|
||||
assert obj == me
|
||||
|
||||
def test_reference_query_conversion(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
@ -180,10 +164,10 @@ class TestReferenceField(MongoDBTestCase):
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_reference_query_conversion_dbref(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
@ -212,7 +196,7 @@ class TestReferenceField(MongoDBTestCase):
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
@ -1,7 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -18,17 +15,17 @@ class TestSequenceField(MongoDBTestCase):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 1000)
|
||||
assert c["next"] == 1000
|
||||
|
||||
def test_sequence_field_get_next_value(self):
|
||||
class Person(Document):
|
||||
@ -41,10 +38,10 @@ class TestSequenceField(MongoDBTestCase):
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), 11)
|
||||
assert Person.id.get_next_value() == 11
|
||||
self.db["mongoengine.counters"].drop()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), 1)
|
||||
assert Person.id.get_next_value() == 1
|
||||
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, value_decorator=str)
|
||||
@ -56,10 +53,10 @@ class TestSequenceField(MongoDBTestCase):
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), "11")
|
||||
assert Person.id.get_next_value() == "11"
|
||||
self.db["mongoengine.counters"].drop()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), "1")
|
||||
assert Person.id.get_next_value() == "1"
|
||||
|
||||
def test_sequence_field_sequence_name(self):
|
||||
class Person(Document):
|
||||
@ -73,17 +70,17 @@ class TestSequenceField(MongoDBTestCase):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
self.assertEqual(c["next"], 1000)
|
||||
assert c["next"] == 1000
|
||||
|
||||
def test_multiple_sequence_fields(self):
|
||||
class Person(Document):
|
||||
@ -98,24 +95,24 @@ class TestSequenceField(MongoDBTestCase):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
counters = [i.counter for i in Person.objects]
|
||||
self.assertEqual(counters, range(1, 11))
|
||||
assert counters == list(range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 1000)
|
||||
assert c["next"] == 1000
|
||||
|
||||
Person.counter.set_next_value(999)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.counter"})
|
||||
self.assertEqual(c["next"], 999)
|
||||
assert c["next"] == 999
|
||||
|
||||
def test_sequence_fields_reload(self):
|
||||
class Animal(Document):
|
||||
@ -127,20 +124,20 @@ class TestSequenceField(MongoDBTestCase):
|
||||
|
||||
a = Animal(name="Boi").save()
|
||||
|
||||
self.assertEqual(a.counter, 1)
|
||||
assert a.counter == 1
|
||||
a.reload()
|
||||
self.assertEqual(a.counter, 1)
|
||||
assert a.counter == 1
|
||||
|
||||
a.counter = None
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
a.save()
|
||||
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
|
||||
a = Animal.objects.first()
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
a.reload()
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
|
||||
def test_multiple_sequence_fields_on_docs(self):
|
||||
class Animal(Document):
|
||||
@ -160,22 +157,22 @@ class TestSequenceField(MongoDBTestCase):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
id = [i.id for i in Animal.objects]
|
||||
self.assertEqual(id, range(1, 11))
|
||||
_id = [i.id for i in Animal.objects]
|
||||
assert _id == list(range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
def test_sequence_field_value_decorator(self):
|
||||
class Person(Document):
|
||||
@ -190,13 +187,13 @@ class TestSequenceField(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, map(str, range(1, 11)))
|
||||
assert ids == [str(i) for i in range(1, 11)]
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
assert c["next"] == 10
|
||||
|
||||
def test_embedded_sequence_field(self):
|
||||
class Comment(EmbeddedDocument):
|
||||
@ -218,10 +215,10 @@ class TestSequenceField(MongoDBTestCase):
|
||||
],
|
||||
).save()
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "comment.id"})
|
||||
self.assertEqual(c["next"], 2)
|
||||
assert c["next"] == 2
|
||||
post = Post.objects.first()
|
||||
self.assertEqual(1, post.comments[0].id)
|
||||
self.assertEqual(2, post.comments[1].id)
|
||||
assert 1 == post.comments[0].id
|
||||
assert 2 == post.comments[1].id
|
||||
|
||||
def test_inherited_sequencefield(self):
|
||||
class Base(Document):
|
||||
@ -241,16 +238,14 @@ class TestSequenceField(MongoDBTestCase):
|
||||
foo = Foo(name="Foo")
|
||||
foo.save()
|
||||
|
||||
self.assertTrue(
|
||||
"base.counter" in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
)
|
||||
self.assertFalse(
|
||||
assert "base.counter" in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
assert not (
|
||||
("foo.counter" or "bar.counter")
|
||||
in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
)
|
||||
self.assertNotEqual(foo.counter, bar.counter)
|
||||
self.assertEqual(foo._fields["counter"].owner_document, Base)
|
||||
self.assertEqual(bar._fields["counter"].owner_document, Base)
|
||||
assert foo.counter != bar.counter
|
||||
assert foo._fields["counter"].owner_document == Base
|
||||
assert bar._fields["counter"].owner_document == Base
|
||||
|
||||
def test_no_inherited_sequencefield(self):
|
||||
class Base(Document):
|
||||
@ -269,13 +264,34 @@ class TestSequenceField(MongoDBTestCase):
|
||||
foo = Foo(name="Foo")
|
||||
foo.save()
|
||||
|
||||
self.assertFalse(
|
||||
"base.counter" in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
assert "base.counter" not in self.db["mongoengine.counters"].find().distinct(
|
||||
"_id"
|
||||
)
|
||||
self.assertTrue(
|
||||
("foo.counter" and "bar.counter")
|
||||
in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
)
|
||||
self.assertEqual(foo.counter, bar.counter)
|
||||
self.assertEqual(foo._fields["counter"].owner_document, Foo)
|
||||
self.assertEqual(bar._fields["counter"].owner_document, Bar)
|
||||
existing_counters = self.db["mongoengine.counters"].find().distinct("_id")
|
||||
assert "foo.counter" in existing_counters
|
||||
assert "bar.counter" in existing_counters
|
||||
assert foo.counter == bar.counter
|
||||
assert foo._fields["counter"].owner_document == Foo
|
||||
assert bar._fields["counter"].owner_document == Bar
|
||||
|
||||
def test_sequence_setattr_not_incrementing_counter(self):
|
||||
class Person(DynamicDocument):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
# Setting SequenceField field value should not increment counter:
|
||||
new_person = Person()
|
||||
new_person.id = 1100
|
||||
|
||||
# Counter should still be at 10
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
43
tests/fields/test_string_field.py
Normal file
43
tests/fields/test_string_field.py
Normal file
@ -0,0 +1,43 @@
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class TestStringField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
|
||||
Person.drop_collection()
|
||||
person = Person(name="test123")
|
||||
person.save()
|
||||
assert get_as_pymongo(person) == {"_id": person.id, "name": "test123"}
|
||||
|
||||
def test_validation(self):
|
||||
class Person(Document):
|
||||
name = StringField(max_length=20, min_length=2)
|
||||
userid = StringField(r"[0-9a-z_]+$")
|
||||
|
||||
with pytest.raises(ValidationError, match="only accepts string values"):
|
||||
Person(name=34).validate()
|
||||
|
||||
with pytest.raises(ValidationError, match="value is too short"):
|
||||
Person(name="s").validate()
|
||||
|
||||
# Test regex validation on userid
|
||||
person = Person(userid="test.User")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.userid = "test_user"
|
||||
assert person.userid == "test_user"
|
||||
person.validate()
|
||||
|
||||
# Test max length validation on name
|
||||
person = Person(name="Name that is more than twenty characters")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person = Person(name="a friendl name", userid="7a757668sqjdkqlsdkq")
|
||||
person.validate()
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@ -13,7 +13,8 @@ class TestURLField(MongoDBTestCase):
|
||||
|
||||
link = Link()
|
||||
link.url = "google"
|
||||
self.assertRaises(ValidationError, link.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
link.validate()
|
||||
|
||||
link.url = "http://www.google.com:8080"
|
||||
link.validate()
|
||||
@ -25,20 +26,19 @@ class TestURLField(MongoDBTestCase):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = u"http://привет.com"
|
||||
link.url = "http://привет.com"
|
||||
|
||||
# TODO fix URL validation - this *IS* a valid URL
|
||||
# For now we just want to make sure that the error message is correct
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
link.validate()
|
||||
self.assertEqual(
|
||||
unicode(ctx_err.exception),
|
||||
u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])",
|
||||
assert (
|
||||
str(exc_info.value)
|
||||
== "ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])"
|
||||
)
|
||||
|
||||
def test_url_scheme_validation(self):
|
||||
"""Ensure that URLFields validate urls with specific schemes properly.
|
||||
"""
|
||||
"""Ensure that URLFields validate urls with specific schemes properly."""
|
||||
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
@ -48,7 +48,8 @@ class TestURLField(MongoDBTestCase):
|
||||
|
||||
link = Link()
|
||||
link.url = "ws://google.com"
|
||||
self.assertRaises(ValidationError, link.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
link.validate()
|
||||
|
||||
scheme_link = SchemeLink()
|
||||
scheme_link.url = "ws://google.com"
|
||||
|
@ -1,8 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import uuid
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
@ -14,19 +14,16 @@ class TestUUIDField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
uid = uuid.uuid4()
|
||||
person = Person(api_key=uid).save()
|
||||
self.assertEqual(
|
||||
get_as_pymongo(person), {"_id": person.id, "api_key": str(uid)}
|
||||
)
|
||||
assert get_as_pymongo(person) == {"_id": person.id, "api_key": str(uid)}
|
||||
|
||||
def test_field_string(self):
|
||||
"""Test UUID fields storing as String
|
||||
"""
|
||||
"""Test UUID fields storing as String"""
|
||||
Person.drop_collection()
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
assert 1 == Person.objects(api_key=uu).count()
|
||||
assert uu == Person.objects.first().api_key
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
@ -40,7 +37,8 @@ class TestUUIDField(MongoDBTestCase):
|
||||
)
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
def test_field_binary(self):
|
||||
"""Test UUID fields storing as Binary object."""
|
||||
@ -48,8 +46,8 @@ class TestUUIDField(MongoDBTestCase):
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
assert 1 == Person.objects(api_key=uu).count()
|
||||
assert uu == Person.objects.first().api_key
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
@ -63,4 +61,5 @@ class TestUUIDField(MongoDBTestCase):
|
||||
)
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
@ -42,18 +42,18 @@ class PickleSignalsTest(Document):
|
||||
|
||||
@classmethod
|
||||
def post_save(self, sender, document, created, **kwargs):
|
||||
pickled = pickle.dumps(document)
|
||||
pickle.dumps(document)
|
||||
|
||||
@classmethod
|
||||
def post_delete(self, sender, document, **kwargs):
|
||||
pickled = pickle.dumps(document)
|
||||
pickle.dumps(document)
|
||||
|
||||
|
||||
signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest)
|
||||
signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest)
|
||||
|
||||
|
||||
class Mixin(object):
|
||||
class Mixin:
|
||||
name = StringField()
|
||||
|
||||
|
||||
|
@ -1,6 +0,0 @@
|
||||
from .transform import *
|
||||
from .field_list import *
|
||||
from .queryset import *
|
||||
from .visitor import *
|
||||
from .geo import *
|
||||
from .modify import *
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user